mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Merge branch 'main' into DENO_ROOT_INSTALL_Fix
This commit is contained in:
commit
c8a7aa0300
2452 changed files with 58837 additions and 17317 deletions
2
.github/workflows/cargo_publish.yml
vendored
2
.github/workflows/cargo_publish.yml
vendored
|
@ -35,7 +35,7 @@ jobs:
|
||||||
- name: Install deno
|
- name: Install deno
|
||||||
uses: denoland/setup-deno@v2
|
uses: denoland/setup-deno@v2
|
||||||
with:
|
with:
|
||||||
deno-version: v1.x
|
deno-version: v2.x
|
||||||
|
|
||||||
- name: Publish
|
- name: Publish
|
||||||
env:
|
env:
|
||||||
|
|
69
.github/workflows/ci.generate.ts
vendored
69
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
||||||
// Bump this number when you want to purge the cache.
|
// Bump this number when you want to purge the cache.
|
||||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||||
// automatically via regex, so ensure that this line maintains this format.
|
// automatically via regex, so ensure that this line maintains this format.
|
||||||
const cacheVersion = 27;
|
const cacheVersion = 32;
|
||||||
|
|
||||||
const ubuntuX86Runner = "ubuntu-24.04";
|
const ubuntuX86Runner = "ubuntu-24.04";
|
||||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||||
|
@ -59,6 +59,15 @@ const Runners = {
|
||||||
|
|
||||||
const prCacheKeyPrefix =
|
const prCacheKeyPrefix =
|
||||||
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
|
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
|
||||||
|
const prCacheKey = `${prCacheKeyPrefix}\${{ github.sha }}`;
|
||||||
|
const prCachePath = [
|
||||||
|
// this must match for save and restore (https://github.com/actions/cache/issues/1444)
|
||||||
|
"./target",
|
||||||
|
"!./target/*/gn_out",
|
||||||
|
"!./target/*/gn_root",
|
||||||
|
"!./target/*/*.zip",
|
||||||
|
"!./target/*/*.tar.gz",
|
||||||
|
].join("\n");
|
||||||
|
|
||||||
// Note that you may need to add more version to the `apt-get remove` line below if you change this
|
// Note that you may need to add more version to the `apt-get remove` line below if you change this
|
||||||
const llvmVersion = 19;
|
const llvmVersion = 19;
|
||||||
|
@ -196,7 +205,7 @@ const installNodeStep = {
|
||||||
const installDenoStep = {
|
const installDenoStep = {
|
||||||
name: "Install Deno",
|
name: "Install Deno",
|
||||||
uses: "denoland/setup-deno@v2",
|
uses: "denoland/setup-deno@v2",
|
||||||
with: { "deno-version": "v1.x" },
|
with: { "deno-version": "v2.x" },
|
||||||
};
|
};
|
||||||
|
|
||||||
const authenticateWithGoogleCloud = {
|
const authenticateWithGoogleCloud = {
|
||||||
|
@ -475,6 +484,27 @@ const ci = {
|
||||||
" -czvf target/release/deno_src.tar.gz -C .. deno",
|
" -czvf target/release/deno_src.tar.gz -C .. deno",
|
||||||
].join("\n"),
|
].join("\n"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Cache Cargo home",
|
||||||
|
uses: "actions/cache@v4",
|
||||||
|
with: {
|
||||||
|
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
|
||||||
|
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
|
||||||
|
path: [
|
||||||
|
"~/.cargo/.crates.toml",
|
||||||
|
"~/.cargo/.crates2.json",
|
||||||
|
"~/.cargo/bin",
|
||||||
|
"~/.cargo/registry/index",
|
||||||
|
"~/.cargo/registry/cache",
|
||||||
|
"~/.cargo/git/db",
|
||||||
|
].join("\n"),
|
||||||
|
key:
|
||||||
|
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
|
||||||
|
// We will try to restore from the closest cargo-home we can find
|
||||||
|
"restore-keys":
|
||||||
|
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-`,
|
||||||
|
},
|
||||||
|
},
|
||||||
installRustStep,
|
installRustStep,
|
||||||
{
|
{
|
||||||
if:
|
if:
|
||||||
|
@ -598,23 +628,6 @@ const ci = {
|
||||||
installBenchTools,
|
installBenchTools,
|
||||||
].join("\n"),
|
].join("\n"),
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "Cache Cargo home",
|
|
||||||
uses: "actions/cache@v4",
|
|
||||||
with: {
|
|
||||||
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
|
|
||||||
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
|
|
||||||
path: [
|
|
||||||
"~/.cargo/registry/index",
|
|
||||||
"~/.cargo/registry/cache",
|
|
||||||
].join("\n"),
|
|
||||||
key:
|
|
||||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
|
|
||||||
// We will try to restore from the closest cargo-home we can find
|
|
||||||
"restore-keys":
|
|
||||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
// Restore cache from the latest 'main' branch build.
|
// Restore cache from the latest 'main' branch build.
|
||||||
name: "Restore cache build output (PR)",
|
name: "Restore cache build output (PR)",
|
||||||
|
@ -622,13 +635,7 @@ const ci = {
|
||||||
if:
|
if:
|
||||||
"github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')",
|
"github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')",
|
||||||
with: {
|
with: {
|
||||||
path: [
|
path: prCachePath,
|
||||||
"./target",
|
|
||||||
"!./target/*/gn_out",
|
|
||||||
"!./target/*/gn_root",
|
|
||||||
"!./target/*/*.zip",
|
|
||||||
"!./target/*/*.tar.gz",
|
|
||||||
].join("\n"),
|
|
||||||
key: "never_saved",
|
key: "never_saved",
|
||||||
"restore-keys": prCacheKeyPrefix,
|
"restore-keys": prCacheKeyPrefix,
|
||||||
},
|
},
|
||||||
|
@ -1080,14 +1087,8 @@ const ci = {
|
||||||
if:
|
if:
|
||||||
"(matrix.job == 'test' || matrix.job == 'lint') && github.ref == 'refs/heads/main'",
|
"(matrix.job == 'test' || matrix.job == 'lint') && github.ref == 'refs/heads/main'",
|
||||||
with: {
|
with: {
|
||||||
path: [
|
path: prCachePath,
|
||||||
"./target",
|
key: prCacheKey,
|
||||||
"!./target/*/gn_out",
|
|
||||||
"!./target/*/*.zip",
|
|
||||||
"!./target/*/*.sha256sum",
|
|
||||||
"!./target/*/*.tar.gz",
|
|
||||||
].join("\n"),
|
|
||||||
key: prCacheKeyPrefix + "${{ github.sha }}",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
|
|
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
|
@ -174,13 +174,26 @@ jobs:
|
||||||
mkdir -p target/release
|
mkdir -p target/release
|
||||||
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
|
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
|
||||||
-czvf target/release/deno_src.tar.gz -C .. deno
|
-czvf target/release/deno_src.tar.gz -C .. deno
|
||||||
|
- name: Cache Cargo home
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |-
|
||||||
|
~/.cargo/.crates.toml
|
||||||
|
~/.cargo/.crates2.json
|
||||||
|
~/.cargo/bin
|
||||||
|
~/.cargo/registry/index
|
||||||
|
~/.cargo/registry/cache
|
||||||
|
~/.cargo/git/db
|
||||||
|
key: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||||
|
restore-keys: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
|
||||||
|
if: '!(matrix.skip)'
|
||||||
- uses: dsherret/rust-toolchain-file@v1
|
- uses: dsherret/rust-toolchain-file@v1
|
||||||
if: '!(matrix.skip)'
|
if: '!(matrix.skip)'
|
||||||
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
|
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
|
||||||
name: Install Deno
|
name: Install Deno
|
||||||
uses: denoland/setup-deno@v2
|
uses: denoland/setup-deno@v2
|
||||||
with:
|
with:
|
||||||
deno-version: v1.x
|
deno-version: v2.x
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
|
@ -355,15 +368,6 @@ jobs:
|
||||||
- name: Install benchmark tools
|
- name: Install benchmark tools
|
||||||
if: '!(matrix.skip) && (matrix.job == ''bench'')'
|
if: '!(matrix.skip) && (matrix.job == ''bench'')'
|
||||||
run: ./tools/install_prebuilt.js wrk hyperfine
|
run: ./tools/install_prebuilt.js wrk hyperfine
|
||||||
- name: Cache Cargo home
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: |-
|
|
||||||
~/.cargo/registry/index
|
|
||||||
~/.cargo/registry/cache
|
|
||||||
key: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
|
||||||
restore-keys: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
|
||||||
if: '!(matrix.skip)'
|
|
||||||
- name: Restore cache build output (PR)
|
- name: Restore cache build output (PR)
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
if: '!(matrix.skip) && (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/''))'
|
if: '!(matrix.skip) && (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||||
|
@ -375,7 +379,7 @@ jobs:
|
||||||
!./target/*/*.zip
|
!./target/*/*.zip
|
||||||
!./target/*/*.tar.gz
|
!./target/*/*.tar.gz
|
||||||
key: never_saved
|
key: never_saved
|
||||||
restore-keys: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||||
- name: Apply and update mtime cache
|
- name: Apply and update mtime cache
|
||||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||||
uses: ./.github/mtime_cache
|
uses: ./.github/mtime_cache
|
||||||
|
@ -682,10 +686,10 @@ jobs:
|
||||||
path: |-
|
path: |-
|
||||||
./target
|
./target
|
||||||
!./target/*/gn_out
|
!./target/*/gn_out
|
||||||
|
!./target/*/gn_root
|
||||||
!./target/*/*.zip
|
!./target/*/*.zip
|
||||||
!./target/*/*.sha256sum
|
|
||||||
!./target/*/*.tar.gz
|
!./target/*/*.tar.gz
|
||||||
key: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
key: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||||
publish-canary:
|
publish-canary:
|
||||||
name: publish canary
|
name: publish canary
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
|
|
45
.github/workflows/npm_publish.yml
vendored
Normal file
45
.github/workflows/npm_publish.yml
vendored
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
name: npm_publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version'
|
||||||
|
type: string
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: npm publish
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Configure git
|
||||||
|
run: |
|
||||||
|
git config --global core.symlinks true
|
||||||
|
git config --global fetch.parallel 32
|
||||||
|
|
||||||
|
- name: Clone repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: recursive
|
||||||
|
|
||||||
|
- name: Install Deno
|
||||||
|
uses: denoland/setup-deno@v2
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
- name: Install Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22.x'
|
||||||
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
|
||||||
|
- name: Publish
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
run: ./tools/release/npm/build.ts ${{ github.event.inputs.version }} --publish
|
2
.github/workflows/promote_to_release.yml
vendored
2
.github/workflows/promote_to_release.yml
vendored
|
@ -42,7 +42,7 @@ jobs:
|
||||||
- name: Install deno
|
- name: Install deno
|
||||||
uses: denoland/setup-deno@v2
|
uses: denoland/setup-deno@v2
|
||||||
with:
|
with:
|
||||||
deno-version: v1.x
|
deno-version: v2.x
|
||||||
|
|
||||||
- name: Install rust-codesign
|
- name: Install rust-codesign
|
||||||
run: |-
|
run: |-
|
||||||
|
|
2
.github/workflows/start_release.yml
vendored
2
.github/workflows/start_release.yml
vendored
|
@ -36,7 +36,7 @@ jobs:
|
||||||
- name: Install deno
|
- name: Install deno
|
||||||
uses: denoland/setup-deno@v2
|
uses: denoland/setup-deno@v2
|
||||||
with:
|
with:
|
||||||
deno-version: v1.x
|
deno-version: v2.x
|
||||||
|
|
||||||
- name: Create Gist URL
|
- name: Create Gist URL
|
||||||
env:
|
env:
|
||||||
|
|
2
.github/workflows/version_bump.yml
vendored
2
.github/workflows/version_bump.yml
vendored
|
@ -41,7 +41,7 @@ jobs:
|
||||||
- name: Install deno
|
- name: Install deno
|
||||||
uses: denoland/setup-deno@v2
|
uses: denoland/setup-deno@v2
|
||||||
with:
|
with:
|
||||||
deno-version: v1.x
|
deno-version: v2.x
|
||||||
|
|
||||||
- name: Run version bump
|
- name: Run version bump
|
||||||
run: |
|
run: |
|
||||||
|
|
965
Cargo.lock
generated
965
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
97
Cargo.toml
97
Cargo.toml
|
@ -21,6 +21,7 @@ members = [
|
||||||
"ext/napi/sym",
|
"ext/napi/sym",
|
||||||
"ext/net",
|
"ext/net",
|
||||||
"ext/node",
|
"ext/node",
|
||||||
|
"ext/telemetry",
|
||||||
"ext/url",
|
"ext/url",
|
||||||
"ext/web",
|
"ext/web",
|
||||||
"ext/webgpu",
|
"ext/webgpu",
|
||||||
|
@ -29,6 +30,7 @@ members = [
|
||||||
"ext/webstorage",
|
"ext/webstorage",
|
||||||
"resolvers/deno",
|
"resolvers/deno",
|
||||||
"resolvers/node",
|
"resolvers/node",
|
||||||
|
"resolvers/npm_cache",
|
||||||
"runtime",
|
"runtime",
|
||||||
"runtime/permissions",
|
"runtime/permissions",
|
||||||
"tests",
|
"tests",
|
||||||
|
@ -45,20 +47,20 @@ license = "MIT"
|
||||||
repository = "https://github.com/denoland/deno"
|
repository = "https://github.com/denoland/deno"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
|
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
|
||||||
deno_core = { version = "0.322.0" }
|
deno_core = { version = "0.327.0" }
|
||||||
|
|
||||||
deno_bench_util = { version = "0.173.0", path = "./bench_util" }
|
deno_bench_util = { version = "0.178.0", path = "./bench_util" }
|
||||||
deno_config = { version = "=0.39.2", features = ["workspace", "sync"] }
|
deno_config = { version = "=0.41.0", features = ["workspace", "sync"] }
|
||||||
deno_lockfile = "=0.23.1"
|
deno_lockfile = "=0.24.0"
|
||||||
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
|
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
|
||||||
deno_npm = "=0.25.4"
|
deno_npm = "=0.27.0"
|
||||||
deno_path_util = "=0.2.1"
|
deno_path_util = "=0.2.2"
|
||||||
deno_permissions = { version = "0.39.0", path = "./runtime/permissions" }
|
deno_permissions = { version = "0.43.0", path = "./runtime/permissions" }
|
||||||
deno_runtime = { version = "0.188.0", path = "./runtime" }
|
deno_runtime = { version = "0.192.0", path = "./runtime" }
|
||||||
deno_semver = "=0.5.16"
|
deno_semver = "=0.7.1"
|
||||||
deno_terminal = "0.2.0"
|
deno_terminal = "0.2.0"
|
||||||
napi_sym = { version = "0.109.0", path = "./ext/napi/sym" }
|
napi_sym = { version = "0.114.0", path = "./ext/napi/sym" }
|
||||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||||
|
|
||||||
denokv_proto = "0.8.4"
|
denokv_proto = "0.8.4"
|
||||||
|
@ -67,32 +69,34 @@ denokv_remote = "0.8.4"
|
||||||
denokv_sqlite = { default-features = false, version = "0.8.4" }
|
denokv_sqlite = { default-features = false, version = "0.8.4" }
|
||||||
|
|
||||||
# exts
|
# exts
|
||||||
deno_broadcast_channel = { version = "0.173.0", path = "./ext/broadcast_channel" }
|
deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" }
|
||||||
deno_cache = { version = "0.111.0", path = "./ext/cache" }
|
deno_cache = { version = "0.116.0", path = "./ext/cache" }
|
||||||
deno_canvas = { version = "0.48.0", path = "./ext/canvas" }
|
deno_canvas = { version = "0.53.0", path = "./ext/canvas" }
|
||||||
deno_console = { version = "0.179.0", path = "./ext/console" }
|
deno_console = { version = "0.184.0", path = "./ext/console" }
|
||||||
deno_cron = { version = "0.59.0", path = "./ext/cron" }
|
deno_cron = { version = "0.64.0", path = "./ext/cron" }
|
||||||
deno_crypto = { version = "0.193.0", path = "./ext/crypto" }
|
deno_crypto = { version = "0.198.0", path = "./ext/crypto" }
|
||||||
deno_fetch = { version = "0.203.0", path = "./ext/fetch" }
|
deno_fetch = { version = "0.208.0", path = "./ext/fetch" }
|
||||||
deno_ffi = { version = "0.166.0", path = "./ext/ffi" }
|
deno_ffi = { version = "0.171.0", path = "./ext/ffi" }
|
||||||
deno_fs = { version = "0.89.0", path = "./ext/fs" }
|
deno_fs = { version = "0.94.0", path = "./ext/fs" }
|
||||||
deno_http = { version = "0.177.0", path = "./ext/http" }
|
deno_http = { version = "0.182.0", path = "./ext/http" }
|
||||||
deno_io = { version = "0.89.0", path = "./ext/io" }
|
deno_io = { version = "0.94.0", path = "./ext/io" }
|
||||||
deno_kv = { version = "0.87.0", path = "./ext/kv" }
|
deno_kv = { version = "0.92.0", path = "./ext/kv" }
|
||||||
deno_napi = { version = "0.110.0", path = "./ext/napi" }
|
deno_napi = { version = "0.115.0", path = "./ext/napi" }
|
||||||
deno_net = { version = "0.171.0", path = "./ext/net" }
|
deno_net = { version = "0.176.0", path = "./ext/net" }
|
||||||
deno_node = { version = "0.116.0", path = "./ext/node" }
|
deno_node = { version = "0.122.0", path = "./ext/node" }
|
||||||
deno_tls = { version = "0.166.0", path = "./ext/tls" }
|
deno_telemetry = { version = "0.6.0", path = "./ext/telemetry" }
|
||||||
deno_url = { version = "0.179.0", path = "./ext/url" }
|
deno_tls = { version = "0.171.0", path = "./ext/tls" }
|
||||||
deno_web = { version = "0.210.0", path = "./ext/web" }
|
deno_url = { version = "0.184.0", path = "./ext/url" }
|
||||||
deno_webgpu = { version = "0.146.0", path = "./ext/webgpu" }
|
deno_web = { version = "0.215.0", path = "./ext/web" }
|
||||||
deno_webidl = { version = "0.179.0", path = "./ext/webidl" }
|
deno_webgpu = { version = "0.151.0", path = "./ext/webgpu" }
|
||||||
deno_websocket = { version = "0.184.0", path = "./ext/websocket" }
|
deno_webidl = { version = "0.184.0", path = "./ext/webidl" }
|
||||||
deno_webstorage = { version = "0.174.0", path = "./ext/webstorage" }
|
deno_websocket = { version = "0.189.0", path = "./ext/websocket" }
|
||||||
|
deno_webstorage = { version = "0.179.0", path = "./ext/webstorage" }
|
||||||
|
|
||||||
# resolvers
|
# resolvers
|
||||||
deno_resolver = { version = "0.11.0", path = "./resolvers/deno" }
|
deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" }
|
||||||
node_resolver = { version = "0.18.0", path = "./resolvers/node" }
|
deno_resolver = { version = "0.15.0", path = "./resolvers/deno" }
|
||||||
|
node_resolver = { version = "0.22.0", path = "./resolvers/node" }
|
||||||
|
|
||||||
aes = "=0.8.3"
|
aes = "=0.8.3"
|
||||||
anyhow = "1.0.57"
|
anyhow = "1.0.57"
|
||||||
|
@ -100,10 +104,11 @@ async-trait = "0.1.73"
|
||||||
base32 = "=0.5.1"
|
base32 = "=0.5.1"
|
||||||
base64 = "0.21.7"
|
base64 = "0.21.7"
|
||||||
bencher = "0.1"
|
bencher = "0.1"
|
||||||
boxed_error = "0.2.2"
|
boxed_error = "0.2.3"
|
||||||
brotli = "6.0.0"
|
brotli = "6.0.0"
|
||||||
bytes = "1.4.0"
|
bytes = "1.4.0"
|
||||||
cache_control = "=0.2.0"
|
cache_control = "=0.2.0"
|
||||||
|
capacity_builder = "0.5.0"
|
||||||
cbc = { version = "=0.1.2", features = ["alloc"] }
|
cbc = { version = "=0.1.2", features = ["alloc"] }
|
||||||
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
|
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
|
||||||
# Instead use util::time::utc_now()
|
# Instead use util::time::utc_now()
|
||||||
|
@ -112,9 +117,11 @@ color-print = "0.3.5"
|
||||||
console_static_text = "=0.8.1"
|
console_static_text = "=0.8.1"
|
||||||
dashmap = "5.5.3"
|
dashmap = "5.5.3"
|
||||||
data-encoding = "2.3.3"
|
data-encoding = "2.3.3"
|
||||||
data-url = "=0.3.0"
|
data-url = "=0.3.1"
|
||||||
deno_cache_dir = "=0.13.2"
|
deno_cache_dir = "=0.15.0"
|
||||||
deno_package_json = { version = "0.1.2", default-features = false }
|
deno_error = "=0.5.2"
|
||||||
|
deno_package_json = { version = "0.3.0", default-features = false }
|
||||||
|
deno_unsync = "0.4.2"
|
||||||
dlopen2 = "0.6.1"
|
dlopen2 = "0.6.1"
|
||||||
ecb = "=0.1.2"
|
ecb = "=0.1.2"
|
||||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
|
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
|
||||||
|
@ -128,7 +135,7 @@ fs3 = "0.5.0"
|
||||||
futures = "0.3.21"
|
futures = "0.3.21"
|
||||||
glob = "0.3.1"
|
glob = "0.3.1"
|
||||||
h2 = "0.4.4"
|
h2 = "0.4.4"
|
||||||
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
|
hickory-resolver = { version = "0.25.0-alpha.4", features = ["tokio-runtime", "serde"] }
|
||||||
http = "1.0"
|
http = "1.0"
|
||||||
http-body = "1.0"
|
http-body = "1.0"
|
||||||
http-body-util = "0.1.2"
|
http-body-util = "0.1.2"
|
||||||
|
@ -136,13 +143,13 @@ http_v02 = { package = "http", version = "0.2.9" }
|
||||||
httparse = "1.8.0"
|
httparse = "1.8.0"
|
||||||
hyper = { version = "1.4.1", features = ["full"] }
|
hyper = { version = "1.4.1", features = ["full"] }
|
||||||
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
|
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
|
||||||
hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
|
hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
|
||||||
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
||||||
indexmap = { version = "2", features = ["serde"] }
|
indexmap = { version = "2", features = ["serde"] }
|
||||||
ipnet = "2.3"
|
ipnet = "2.3"
|
||||||
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
||||||
lazy-regex = "3"
|
lazy-regex = "3"
|
||||||
libc = "0.2.126"
|
libc = "0.2.168"
|
||||||
libz-sys = { version = "1.1.20", default-features = false }
|
libz-sys = { version = "1.1.20", default-features = false }
|
||||||
log = { version = "0.4.20", features = ["kv"] }
|
log = { version = "0.4.20", features = ["kv"] }
|
||||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||||
|
@ -189,13 +196,13 @@ spki = "0.7.2"
|
||||||
tar = "=0.4.40"
|
tar = "=0.4.40"
|
||||||
tempfile = "3.4.0"
|
tempfile = "3.4.0"
|
||||||
termcolor = "1.1.3"
|
termcolor = "1.1.3"
|
||||||
thiserror = "1.0.61"
|
thiserror = "2.0.3"
|
||||||
tokio = { version = "1.36.0", features = ["full"] }
|
tokio = { version = "1.36.0", features = ["full"] }
|
||||||
tokio-metrics = { version = "0.3.0", features = ["rt"] }
|
tokio-metrics = { version = "0.3.0", features = ["rt"] }
|
||||||
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
|
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
|
||||||
tokio-socks = "0.5.1"
|
tokio-socks = "0.5.1"
|
||||||
tokio-util = "0.7.4"
|
tokio-util = "0.7.4"
|
||||||
tower = { version = "0.4.13", default-features = false, features = ["util"] }
|
tower = { version = "0.5.2", default-features = false, features = ["retry", "util"] }
|
||||||
tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
|
tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
|
||||||
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
|
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
|
||||||
tower-service = "0.3.2"
|
tower-service = "0.3.2"
|
||||||
|
@ -234,7 +241,7 @@ nix = "=0.27.1"
|
||||||
# windows deps
|
# windows deps
|
||||||
junction = "=0.2.0"
|
junction = "=0.2.0"
|
||||||
winapi = "=0.3.9"
|
winapi = "=0.3.9"
|
||||||
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
|
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel", "Win32_System_Threading", "Win32_UI", "Win32_UI_Shell"] }
|
||||||
winres = "=0.1.12"
|
winres = "=0.1.12"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
|
84
Releases.md
84
Releases.md
|
@ -6,6 +6,90 @@ https://github.com/denoland/deno/releases
|
||||||
We also have one-line install commands at:
|
We also have one-line install commands at:
|
||||||
https://github.com/denoland/deno_install
|
https://github.com/denoland/deno_install
|
||||||
|
|
||||||
|
### 2.1.4 / 2024.12.11
|
||||||
|
|
||||||
|
- feat(unstable): support caching npm dependencies only as they're needed
|
||||||
|
(#27300)
|
||||||
|
- fix(compile): correct read length for transpiled typescript files (#27301)
|
||||||
|
- fix(ext/node): accept file descriptor in fs.readFile(Sync) (#27252)
|
||||||
|
- fix(ext/node): handle Float16Array in node:v8 module (#27285)
|
||||||
|
- fix(lint): do not error providing --allow-import (#27321)
|
||||||
|
- fix(node): update list of builtin node modules, add missing export to
|
||||||
|
_http_common (#27294)
|
||||||
|
- fix(outdated): error when there are no config files (#27306)
|
||||||
|
- fix(outdated): respect --quiet flag for hints (#27317)
|
||||||
|
- fix(outdated): show a suggestion for updating (#27304)
|
||||||
|
- fix(task): do not always kill child on ctrl+c on windows (#27269)
|
||||||
|
- fix(unstable): don't unwrap optional state in otel (#27292)
|
||||||
|
- fix: do not error when subpath has an @ symbol (#27290)
|
||||||
|
- fix: do not panic when fetching invalid file url on Windows (#27259)
|
||||||
|
- fix: replace the @deno-types with @ts-types (#27310)
|
||||||
|
- perf(compile): improve FileBackedVfsFile (#27299)
|
||||||
|
|
||||||
|
### 2.1.3 / 2024.12.05
|
||||||
|
|
||||||
|
- feat(unstable): add metrics to otel (#27143)
|
||||||
|
- fix(fmt): stable formatting of HTML files with JS (#27164)
|
||||||
|
- fix(install): use locked version of jsr package when fetching exports (#27237)
|
||||||
|
- fix(node/fs): support `recursive` option in readdir (#27179)
|
||||||
|
- fix(node/worker_threads): data url not encoded properly with eval (#27184)
|
||||||
|
- fix(outdated): allow `--latest` without `--update` (#27227)
|
||||||
|
- fix(task): `--recursive` option not working (#27183)
|
||||||
|
- fix(task): don't panic with filter on missing task argument (#27180)
|
||||||
|
- fix(task): forward signals to spawned sub-processes on unix (#27141)
|
||||||
|
- fix(task): kill descendants when killing task process on Windows (#27163)
|
||||||
|
- fix(task): only pass args to root task (#27213)
|
||||||
|
- fix(unstable): otel context with multiple keys (#27230)
|
||||||
|
- fix(unstable/temporal): respect locale in `Duration.prototype.toLocaleString`
|
||||||
|
(#27000)
|
||||||
|
- fix: clear dep analysis when module loading is done (#27204)
|
||||||
|
- fix: improve auto-imports for npm packages (#27224)
|
||||||
|
- fix: support `workspace:^` and `workspace:~` version constraints (#27096)
|
||||||
|
|
||||||
|
### 2.1.2 / 2024.11.28
|
||||||
|
|
||||||
|
- feat(unstable): Instrument Deno.serve (#26964)
|
||||||
|
- feat(unstable): Instrument fetch (#27057)
|
||||||
|
- feat(unstable): repurpose `--unstable-detect-cjs` to attempt loading more
|
||||||
|
modules as cjs (#27094)
|
||||||
|
- fix(check): support jsdoc `@import` tag (#26991)
|
||||||
|
- fix(compile): correct buffered reading of assets and files (#27008)
|
||||||
|
- fix(compile): do not error embedding same symlink via multiple methods
|
||||||
|
(#27015)
|
||||||
|
- fix(compile): handle TypeScript file included as asset (#27032)
|
||||||
|
- fix(ext/fetch): don't throw when `bodyUsed` inspect after upgrade (#27088)
|
||||||
|
- fix(ext/node): `tls.connect` socket upgrades (#27125)
|
||||||
|
- fix(ext/node): add `fs.promises.fstat` and `FileHandle#stat` (#26719)
|
||||||
|
- fix(ext/webgpu): normalize limits to number (#27072)
|
||||||
|
- fix(ext/webgpu): use correct variable name (#27108)
|
||||||
|
- fix(ext/websocket): don't throw exception when sending to closed socket
|
||||||
|
(#26932)
|
||||||
|
- fix(fmt): return `None` if sql fmt result is the same (#27014)
|
||||||
|
- fix(info): resolve bare specifier pointing to workspace member (#27020)
|
||||||
|
- fix(init): always force managed node modules (#27047)
|
||||||
|
- fix(init): support scoped npm packages (#27128)
|
||||||
|
- fix(install): don't re-set up node_modules if running lifecycle script
|
||||||
|
(#26984)
|
||||||
|
- fix(lsp): remove stray debug output (#27010)
|
||||||
|
- fix(lsp): support task object notation for tasks request (#27076)
|
||||||
|
- fix(lsp): wasm file import completions (#27018)
|
||||||
|
- fix(node): correct resolution of dynamic import of esm from cjs (#27071)
|
||||||
|
- fix(node/fs): add missing stat path argument validation (#27086)
|
||||||
|
- fix(node/fs): missing uv error context for readFile (#27011)
|
||||||
|
- fix(node/http): casing ignored in ServerResponse.hasHeader() (#27105)
|
||||||
|
- fix(node/timers): error when passing id to clearTimeout/clearInterval (#27130)
|
||||||
|
- fix(runtime/ops): Fix watchfs remove event (#27041)
|
||||||
|
- fix(streams): reject `string` in `ReadableStream.from` type (#25116)
|
||||||
|
- fix(task): handle carriage return in task description (#27099)
|
||||||
|
- fix(task): handle multiline descriptions properly (#27069)
|
||||||
|
- fix(task): strip ansi codes and control chars when printing tasks (#27100)
|
||||||
|
- fix(tools/doc): HTML resolve main entrypoint from config file (#27103)
|
||||||
|
- fix: support bun specifiers in JSR publish (#24588)
|
||||||
|
- fix: support non-function exports in Wasm modules (#26992)
|
||||||
|
- perf(compile): read embedded files as static references when UTF-8 and reading
|
||||||
|
as strings (#27033)
|
||||||
|
- perf(ext/webstorage): use object wrap for `Storage` (#26931)
|
||||||
|
|
||||||
### 2.1.1 / 2024.11.21
|
### 2.1.1 / 2024.11.21
|
||||||
|
|
||||||
- docs(add): clarification to add command (#26968)
|
- docs(add): clarification to add command (#26968)
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_bench_util"
|
name = "deno_bench_util"
|
||||||
version = "0.173.0"
|
version = "0.178.0"
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno"
|
name = "deno"
|
||||||
version = "2.1.1"
|
version = "2.1.4"
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
default-run = "deno"
|
default-run = "deno"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
|
@ -72,17 +72,20 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
|
||||||
deno_cache_dir.workspace = true
|
deno_cache_dir.workspace = true
|
||||||
deno_config.workspace = true
|
deno_config.workspace = true
|
||||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||||
deno_doc = { version = "0.160.0", features = ["rust", "comrak"] }
|
deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] }
|
||||||
deno_graph = { version = "=0.85.0" }
|
deno_error.workspace = true
|
||||||
deno_lint = { version = "=0.68.0", features = ["docs"] }
|
deno_graph = { version = "=0.86.5" }
|
||||||
|
deno_lint = { version = "=0.68.2", features = ["docs"] }
|
||||||
deno_lockfile.workspace = true
|
deno_lockfile.workspace = true
|
||||||
deno_npm.workspace = true
|
deno_npm.workspace = true
|
||||||
|
deno_npm_cache.workspace = true
|
||||||
deno_package_json.workspace = true
|
deno_package_json.workspace = true
|
||||||
deno_path_util.workspace = true
|
deno_path_util.workspace = true
|
||||||
deno_resolver.workspace = true
|
deno_resolver = { workspace = true, features = ["sync"] }
|
||||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||||
deno_semver.workspace = true
|
deno_semver.workspace = true
|
||||||
deno_task_shell = "=0.18.1"
|
deno_task_shell = "=0.20.2"
|
||||||
|
deno_telemetry.workspace = true
|
||||||
deno_terminal.workspace = true
|
deno_terminal.workspace = true
|
||||||
libsui = "0.5.0"
|
libsui = "0.5.0"
|
||||||
node_resolver.workspace = true
|
node_resolver.workspace = true
|
||||||
|
@ -91,8 +94,10 @@ anstream = "0.6.14"
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
base64.workspace = true
|
base64.workspace = true
|
||||||
bincode = "=1.3.3"
|
bincode = "=1.3.3"
|
||||||
|
boxed_error.workspace = true
|
||||||
bytes.workspace = true
|
bytes.workspace = true
|
||||||
cache_control.workspace = true
|
cache_control.workspace = true
|
||||||
|
capacity_builder.workspace = true
|
||||||
chrono = { workspace = true, features = ["now"] }
|
chrono = { workspace = true, features = ["now"] }
|
||||||
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
||||||
clap_complete = "=4.5.24"
|
clap_complete = "=4.5.24"
|
||||||
|
@ -107,7 +112,7 @@ dotenvy = "0.15.7"
|
||||||
dprint-plugin-json = "=0.19.4"
|
dprint-plugin-json = "=0.19.4"
|
||||||
dprint-plugin-jupyter = "=0.1.5"
|
dprint-plugin-jupyter = "=0.1.5"
|
||||||
dprint-plugin-markdown = "=0.17.8"
|
dprint-plugin-markdown = "=0.17.8"
|
||||||
dprint-plugin-typescript = "=0.93.2"
|
dprint-plugin-typescript = "=0.93.3"
|
||||||
env_logger = "=0.10.0"
|
env_logger = "=0.10.0"
|
||||||
fancy-regex = "=0.10.0"
|
fancy-regex = "=0.10.0"
|
||||||
faster-hex.workspace = true
|
faster-hex.workspace = true
|
||||||
|
@ -129,7 +134,7 @@ libz-sys.workspace = true
|
||||||
log = { workspace = true, features = ["serde"] }
|
log = { workspace = true, features = ["serde"] }
|
||||||
lsp-types.workspace = true
|
lsp-types.workspace = true
|
||||||
malva = "=0.11.0"
|
malva = "=0.11.0"
|
||||||
markup_fmt = "=0.16.0"
|
markup_fmt = "=0.18.0"
|
||||||
memmem.workspace = true
|
memmem.workspace = true
|
||||||
monch.workspace = true
|
monch.workspace = true
|
||||||
notify.workspace = true
|
notify.workspace = true
|
||||||
|
@ -151,8 +156,7 @@ serde_repr.workspace = true
|
||||||
sha2.workspace = true
|
sha2.workspace = true
|
||||||
shell-escape = "=0.1.5"
|
shell-escape = "=0.1.5"
|
||||||
spki = { version = "0.7", features = ["pem"] }
|
spki = { version = "0.7", features = ["pem"] }
|
||||||
# NOTE(bartlomieju): using temporary fork for now, revert back to `sqlformat-rs` later
|
sqlformat = "=0.3.2"
|
||||||
sqlformat = { package = "deno_sqlformat", version = "0.3.2" }
|
|
||||||
strsim = "0.11.1"
|
strsim = "0.11.1"
|
||||||
tar.workspace = true
|
tar.workspace = true
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
|
|
|
@ -18,7 +18,7 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
|
||||||
fn read_to_string_lossy(
|
fn read_to_string_lossy(
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
) -> Result<String, std::io::Error> {
|
) -> Result<std::borrow::Cow<'static, str>, std::io::Error> {
|
||||||
self
|
self
|
||||||
.0
|
.0
|
||||||
.read_text_file_lossy_sync(path, None)
|
.read_text_file_lossy_sync(path, None)
|
||||||
|
@ -64,6 +64,15 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn import_map_deps(
|
||||||
|
import_map: &serde_json::Value,
|
||||||
|
) -> HashSet<JsrDepPackageReq> {
|
||||||
|
let values = imports_values(import_map.get("imports"))
|
||||||
|
.into_iter()
|
||||||
|
.chain(scope_values(import_map.get("scopes")));
|
||||||
|
values_to_set(values)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn deno_json_deps(
|
pub fn deno_json_deps(
|
||||||
config: &deno_config::deno_json::ConfigFile,
|
config: &deno_config::deno_json::ConfigFile,
|
||||||
) -> HashSet<JsrDepPackageReq> {
|
) -> HashSet<JsrDepPackageReq> {
|
||||||
|
|
|
@ -36,7 +36,8 @@ use deno_path_util::normalize_path;
|
||||||
use deno_path_util::url_to_file_path;
|
use deno_path_util::url_to_file_path;
|
||||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||||
use deno_runtime::deno_permissions::SysDescriptor;
|
use deno_runtime::deno_permissions::SysDescriptor;
|
||||||
use deno_runtime::ops::otel::OtelConfig;
|
use deno_telemetry::OtelConfig;
|
||||||
|
use deno_telemetry::OtelConsoleConfig;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use log::Level;
|
use log::Level;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
@ -245,7 +246,7 @@ pub struct InstallFlagsGlobal {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum InstallKind {
|
pub enum InstallFlags {
|
||||||
Local(InstallFlagsLocal),
|
Local(InstallFlagsLocal),
|
||||||
Global(InstallFlagsGlobal),
|
Global(InstallFlagsGlobal),
|
||||||
}
|
}
|
||||||
|
@ -257,11 +258,6 @@ pub enum InstallFlagsLocal {
|
||||||
Entrypoints(Vec<String>),
|
Entrypoints(Vec<String>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub struct InstallFlags {
|
|
||||||
pub kind: InstallKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub struct JSONReferenceFlags {
|
pub struct JSONReferenceFlags {
|
||||||
pub json: deno_core::serde_json::Value,
|
pub json: deno_core::serde_json::Value,
|
||||||
|
@ -598,7 +594,9 @@ pub struct UnstableConfig {
|
||||||
// TODO(bartlomieju): remove in Deno 2.5
|
// TODO(bartlomieju): remove in Deno 2.5
|
||||||
pub legacy_flag_enabled: bool, // --unstable
|
pub legacy_flag_enabled: bool, // --unstable
|
||||||
pub bare_node_builtins: bool,
|
pub bare_node_builtins: bool,
|
||||||
|
pub detect_cjs: bool,
|
||||||
pub sloppy_imports: bool,
|
pub sloppy_imports: bool,
|
||||||
|
pub npm_lazy_caching: bool,
|
||||||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -989,21 +987,41 @@ impl Flags {
|
||||||
args
|
args
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn otel_config(&self) -> Option<OtelConfig> {
|
pub fn otel_config(&self) -> OtelConfig {
|
||||||
if self
|
let has_unstable_flag = self
|
||||||
.unstable_config
|
.unstable_config
|
||||||
.features
|
.features
|
||||||
.contains(&String::from("otel"))
|
.contains(&String::from("otel"));
|
||||||
{
|
|
||||||
Some(OtelConfig {
|
let otel_var = |name| match std::env::var(name) {
|
||||||
runtime_name: Cow::Borrowed("deno"),
|
Ok(s) if s.to_lowercase() == "true" => Some(true),
|
||||||
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
Ok(s) if s.to_lowercase() == "false" => Some(false),
|
||||||
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
|
_ => None,
|
||||||
.is_ok(),
|
};
|
||||||
..Default::default()
|
|
||||||
})
|
let disabled =
|
||||||
} else {
|
!has_unstable_flag || otel_var("OTEL_SDK_DISABLED").unwrap_or(false);
|
||||||
None
|
let default = !disabled && otel_var("OTEL_DENO").unwrap_or(false);
|
||||||
|
|
||||||
|
OtelConfig {
|
||||||
|
tracing_enabled: !disabled
|
||||||
|
&& otel_var("OTEL_DENO_TRACING").unwrap_or(default),
|
||||||
|
console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() {
|
||||||
|
Ok(_) if disabled => OtelConsoleConfig::Ignore,
|
||||||
|
Ok("ignore") => OtelConsoleConfig::Ignore,
|
||||||
|
Ok("capture") => OtelConsoleConfig::Capture,
|
||||||
|
Ok("replace") => OtelConsoleConfig::Replace,
|
||||||
|
_ => {
|
||||||
|
if default {
|
||||||
|
OtelConsoleConfig::Capture
|
||||||
|
} else {
|
||||||
|
OtelConsoleConfig::Ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
|
||||||
|
.as_deref()
|
||||||
|
== Ok("1"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2662,11 +2680,11 @@ By default, outdated dependencies are only displayed.
|
||||||
Display outdated dependencies:
|
Display outdated dependencies:
|
||||||
<p(245)>deno outdated</>
|
<p(245)>deno outdated</>
|
||||||
<p(245)>deno outdated --compatible</>
|
<p(245)>deno outdated --compatible</>
|
||||||
|
|
||||||
Update dependencies:
|
Update dependencies to latest semver compatible versions:
|
||||||
<p(245)>deno outdated --update</>
|
<p(245)>deno outdated --update</>
|
||||||
|
Update dependencies to latest versions, ignoring semver requirements:
|
||||||
<p(245)>deno outdated --update --latest</>
|
<p(245)>deno outdated --update --latest</>
|
||||||
<p(245)>deno outdated --update</>
|
|
||||||
|
|
||||||
Filters can be used to select which packages to act on. Filters can include wildcards (*) to match multiple packages.
|
Filters can be used to select which packages to act on. Filters can include wildcards (*) to match multiple packages.
|
||||||
<p(245)>deno outdated --update --latest \"@std/*\"</>
|
<p(245)>deno outdated --update --latest \"@std/*\"</>
|
||||||
|
@ -2702,7 +2720,6 @@ Specific version requirements to update to can be specified:
|
||||||
.help(
|
.help(
|
||||||
"Update to the latest version, regardless of semver constraints",
|
"Update to the latest version, regardless of semver constraints",
|
||||||
)
|
)
|
||||||
.requires("update")
|
|
||||||
.conflicts_with("compatible"),
|
.conflicts_with("compatible"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
|
@ -2904,6 +2921,7 @@ To ignore linting on an entire file, you can add an ignore comment at the top of
|
||||||
.arg(watch_arg(false))
|
.arg(watch_arg(false))
|
||||||
.arg(watch_exclude_arg())
|
.arg(watch_exclude_arg())
|
||||||
.arg(no_clear_screen_arg())
|
.arg(no_clear_screen_arg())
|
||||||
|
.arg(allow_import_arg())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3047,7 +3065,7 @@ fn task_subcommand() -> Command {
|
||||||
|
|
||||||
List all available tasks:
|
List all available tasks:
|
||||||
<p(245)>deno task</>
|
<p(245)>deno task</>
|
||||||
|
|
||||||
Evaluate a task from string
|
Evaluate a task from string
|
||||||
<p(245)>deno task --eval \"echo $(pwd)\"</>"
|
<p(245)>deno task --eval \"echo $(pwd)\"</>"
|
||||||
),
|
),
|
||||||
|
@ -4373,7 +4391,7 @@ impl CommandExt for Command {
|
||||||
).arg(
|
).arg(
|
||||||
Arg::new("unstable-detect-cjs")
|
Arg::new("unstable-detect-cjs")
|
||||||
.long("unstable-detect-cjs")
|
.long("unstable-detect-cjs")
|
||||||
.help("Reads the package.json type field in a project to treat .js files as .cjs")
|
.help("Treats ambiguous .js, .jsx, .ts, .tsx files as CommonJS modules in more cases")
|
||||||
.value_parser(FalseyValueParser::new())
|
.value_parser(FalseyValueParser::new())
|
||||||
.action(ArgAction::SetTrue)
|
.action(ArgAction::SetTrue)
|
||||||
.hide(true)
|
.hide(true)
|
||||||
|
@ -4406,6 +4424,16 @@ impl CommandExt for Command {
|
||||||
})
|
})
|
||||||
.help_heading(UNSTABLE_HEADING)
|
.help_heading(UNSTABLE_HEADING)
|
||||||
.display_order(next_display_order())
|
.display_order(next_display_order())
|
||||||
|
).arg(
|
||||||
|
Arg::new("unstable-npm-lazy-caching")
|
||||||
|
.long("unstable-npm-lazy-caching")
|
||||||
|
.help("Enable unstable lazy caching of npm dependencies, downloading them only as needed (disabled: all npm packages in package.json are installed on startup; enabled: only npm packages that are actually referenced in an import are installed")
|
||||||
|
.env("DENO_UNSTABLE_NPM_LAZY_CACHING")
|
||||||
|
.value_parser(FalseyValueParser::new())
|
||||||
|
.action(ArgAction::SetTrue)
|
||||||
|
.hide(true)
|
||||||
|
.help_heading(UNSTABLE_HEADING)
|
||||||
|
.display_order(next_display_order()),
|
||||||
);
|
);
|
||||||
|
|
||||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() {
|
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() {
|
||||||
|
@ -4919,15 +4947,14 @@ fn install_parse(
|
||||||
let module_url = cmd_values.next().unwrap();
|
let module_url = cmd_values.next().unwrap();
|
||||||
let args = cmd_values.collect();
|
let args = cmd_values.collect();
|
||||||
|
|
||||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
flags.subcommand =
|
||||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
DenoSubcommand::Install(InstallFlags::Global(InstallFlagsGlobal {
|
||||||
name,
|
name,
|
||||||
module_url,
|
module_url,
|
||||||
args,
|
args,
|
||||||
root,
|
root,
|
||||||
force,
|
force,
|
||||||
}),
|
}));
|
||||||
});
|
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -4936,22 +4963,19 @@ fn install_parse(
|
||||||
allow_scripts_arg_parse(flags, matches)?;
|
allow_scripts_arg_parse(flags, matches)?;
|
||||||
if matches.get_flag("entrypoint") {
|
if matches.get_flag("entrypoint") {
|
||||||
let entrypoints = matches.remove_many::<String>("cmd").unwrap_or_default();
|
let entrypoints = matches.remove_many::<String>("cmd").unwrap_or_default();
|
||||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
|
||||||
kind: InstallKind::Local(InstallFlagsLocal::Entrypoints(
|
InstallFlagsLocal::Entrypoints(entrypoints.collect()),
|
||||||
entrypoints.collect(),
|
));
|
||||||
)),
|
|
||||||
});
|
|
||||||
} else if let Some(add_files) = matches
|
} else if let Some(add_files) = matches
|
||||||
.remove_many("cmd")
|
.remove_many("cmd")
|
||||||
.map(|packages| add_parse_inner(matches, Some(packages)))
|
.map(|packages| add_parse_inner(matches, Some(packages)))
|
||||||
{
|
{
|
||||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
|
||||||
kind: InstallKind::Local(InstallFlagsLocal::Add(add_files)),
|
InstallFlagsLocal::Add(add_files),
|
||||||
})
|
))
|
||||||
} else {
|
} else {
|
||||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
flags.subcommand =
|
||||||
kind: InstallKind::Local(InstallFlagsLocal::TopLevel),
|
DenoSubcommand::Install(InstallFlags::Local(InstallFlagsLocal::TopLevel));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -5083,6 +5107,7 @@ fn lint_parse(
|
||||||
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly);
|
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly);
|
||||||
ext_arg_parse(flags, matches);
|
ext_arg_parse(flags, matches);
|
||||||
config_args_parse(flags, matches);
|
config_args_parse(flags, matches);
|
||||||
|
allow_import_parse(flags, matches);
|
||||||
|
|
||||||
let files = match matches.remove_many::<String>("files") {
|
let files = match matches.remove_many::<String>("files") {
|
||||||
Some(f) => f.collect(),
|
Some(f) => f.collect(),
|
||||||
|
@ -5277,8 +5302,15 @@ fn task_parse(
|
||||||
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
|
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
|
||||||
node_modules_arg_parse(flags, matches);
|
node_modules_arg_parse(flags, matches);
|
||||||
|
|
||||||
let filter = matches.remove_one::<String>("filter");
|
let mut recursive = matches.get_flag("recursive");
|
||||||
let recursive = matches.get_flag("recursive") || filter.is_some();
|
let filter = if let Some(filter) = matches.remove_one::<String>("filter") {
|
||||||
|
recursive = false;
|
||||||
|
Some(filter)
|
||||||
|
} else if recursive {
|
||||||
|
Some("*".to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let mut task_flags = TaskFlags {
|
let mut task_flags = TaskFlags {
|
||||||
cwd: matches.remove_one::<String>("cwd"),
|
cwd: matches.remove_one::<String>("cwd"),
|
||||||
|
@ -5986,8 +6018,11 @@ fn unstable_args_parse(
|
||||||
|
|
||||||
flags.unstable_config.bare_node_builtins =
|
flags.unstable_config.bare_node_builtins =
|
||||||
matches.get_flag("unstable-bare-node-builtins");
|
matches.get_flag("unstable-bare-node-builtins");
|
||||||
|
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
|
||||||
flags.unstable_config.sloppy_imports =
|
flags.unstable_config.sloppy_imports =
|
||||||
matches.get_flag("unstable-sloppy-imports");
|
matches.get_flag("unstable-sloppy-imports");
|
||||||
|
flags.unstable_config.npm_lazy_caching =
|
||||||
|
matches.get_flag("unstable-npm-lazy-caching");
|
||||||
|
|
||||||
if matches!(cfg, UnstableArgsConfig::ResolutionAndRuntime) {
|
if matches!(cfg, UnstableArgsConfig::ResolutionAndRuntime) {
|
||||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
|
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||||
|
@ -7133,6 +7168,7 @@ mod tests {
|
||||||
let r = flags_from_vec(svec![
|
let r = flags_from_vec(svec![
|
||||||
"deno",
|
"deno",
|
||||||
"lint",
|
"lint",
|
||||||
|
"--allow-import",
|
||||||
"--watch",
|
"--watch",
|
||||||
"script_1.ts",
|
"script_1.ts",
|
||||||
"script_2.ts"
|
"script_2.ts"
|
||||||
|
@ -7154,6 +7190,10 @@ mod tests {
|
||||||
compact: false,
|
compact: false,
|
||||||
watch: Some(Default::default()),
|
watch: Some(Default::default()),
|
||||||
}),
|
}),
|
||||||
|
permissions: PermissionFlags {
|
||||||
|
allow_import: Some(vec![]),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -8591,15 +8631,15 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
r.unwrap(),
|
r.unwrap(),
|
||||||
Flags {
|
Flags {
|
||||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
subcommand: DenoSubcommand::Install(InstallFlags::Global(
|
||||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
InstallFlagsGlobal {
|
||||||
name: None,
|
name: None,
|
||||||
module_url: "jsr:@std/http/file-server".to_string(),
|
module_url: "jsr:@std/http/file-server".to_string(),
|
||||||
args: vec![],
|
args: vec![],
|
||||||
root: None,
|
root: None,
|
||||||
force: false,
|
force: false,
|
||||||
}),
|
}
|
||||||
}),
|
),),
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -8613,15 +8653,15 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
r.unwrap(),
|
r.unwrap(),
|
||||||
Flags {
|
Flags {
|
||||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
subcommand: DenoSubcommand::Install(InstallFlags::Global(
|
||||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
InstallFlagsGlobal {
|
||||||
name: None,
|
name: None,
|
||||||
module_url: "jsr:@std/http/file-server".to_string(),
|
module_url: "jsr:@std/http/file-server".to_string(),
|
||||||
args: vec![],
|
args: vec![],
|
||||||
root: None,
|
root: None,
|
||||||
force: false,
|
force: false,
|
||||||
}),
|
}
|
||||||
}),
|
),),
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -8634,15 +8674,15 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
r.unwrap(),
|
r.unwrap(),
|
||||||
Flags {
|
Flags {
|
||||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
subcommand: DenoSubcommand::Install(InstallFlags::Global(
|
||||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
InstallFlagsGlobal {
|
||||||
name: Some("file_server".to_string()),
|
name: Some("file_server".to_string()),
|
||||||
module_url: "jsr:@std/http/file-server".to_string(),
|
module_url: "jsr:@std/http/file-server".to_string(),
|
||||||
args: svec!["foo", "bar"],
|
args: svec!["foo", "bar"],
|
||||||
root: Some("/foo".to_string()),
|
root: Some("/foo".to_string()),
|
||||||
force: true,
|
force: true,
|
||||||
}),
|
}
|
||||||
}),
|
),),
|
||||||
import_map_path: Some("import_map.json".to_string()),
|
import_map_path: Some("import_map.json".to_string()),
|
||||||
no_remote: true,
|
no_remote: true,
|
||||||
config_flag: ConfigFlag::Path("tsconfig.json".to_owned()),
|
config_flag: ConfigFlag::Path("tsconfig.json".to_owned()),
|
||||||
|
@ -10537,7 +10577,7 @@ mod tests {
|
||||||
cwd: None,
|
cwd: None,
|
||||||
task: Some("build".to_string()),
|
task: Some("build".to_string()),
|
||||||
is_run: false,
|
is_run: false,
|
||||||
recursive: true,
|
recursive: false,
|
||||||
filter: Some("*".to_string()),
|
filter: Some("*".to_string()),
|
||||||
eval: false,
|
eval: false,
|
||||||
}),
|
}),
|
||||||
|
@ -10554,7 +10594,7 @@ mod tests {
|
||||||
task: Some("build".to_string()),
|
task: Some("build".to_string()),
|
||||||
is_run: false,
|
is_run: false,
|
||||||
recursive: true,
|
recursive: true,
|
||||||
filter: None,
|
filter: Some("*".to_string()),
|
||||||
eval: false,
|
eval: false,
|
||||||
}),
|
}),
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
|
@ -10570,7 +10610,7 @@ mod tests {
|
||||||
task: Some("build".to_string()),
|
task: Some("build".to_string()),
|
||||||
is_run: false,
|
is_run: false,
|
||||||
recursive: true,
|
recursive: true,
|
||||||
filter: None,
|
filter: Some("*".to_string()),
|
||||||
eval: false,
|
eval: false,
|
||||||
}),
|
}),
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
|
@ -11196,9 +11236,9 @@ mod tests {
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
},
|
},
|
||||||
"install" => Flags {
|
"install" => Flags {
|
||||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
subcommand: DenoSubcommand::Install(InstallFlags::Local(
|
||||||
kind: InstallKind::Local(InstallFlagsLocal::Add(flags)),
|
InstallFlagsLocal::Add(flags),
|
||||||
}),
|
)),
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
},
|
},
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -11685,6 +11725,14 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
|
||||||
recursive: false,
|
recursive: false,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
svec!["--latest"],
|
||||||
|
OutdatedFlags {
|
||||||
|
filters: svec![],
|
||||||
|
kind: OutdatedKind::PrintOutdated { compatible: false },
|
||||||
|
recursive: false,
|
||||||
|
},
|
||||||
|
),
|
||||||
];
|
];
|
||||||
for (input, expected) in cases {
|
for (input, expected) in cases {
|
||||||
let mut args = svec!["deno", "outdated"];
|
let mut args = svec!["deno", "outdated"];
|
||||||
|
|
|
@ -4,21 +4,21 @@ use deno_core::error::AnyError;
|
||||||
use deno_core::serde_json;
|
use deno_core::serde_json;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
|
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
|
use crate::file_fetcher::TextDecodedFile;
|
||||||
|
|
||||||
pub async fn resolve_import_map_value_from_specifier(
|
pub async fn resolve_import_map_value_from_specifier(
|
||||||
specifier: &Url,
|
specifier: &Url,
|
||||||
file_fetcher: &FileFetcher,
|
file_fetcher: &CliFileFetcher,
|
||||||
) -> Result<serde_json::Value, AnyError> {
|
) -> Result<serde_json::Value, AnyError> {
|
||||||
if specifier.scheme() == "data" {
|
if specifier.scheme() == "data" {
|
||||||
let data_url_text =
|
let data_url_text =
|
||||||
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
|
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
|
||||||
Ok(serde_json::from_str(&data_url_text)?)
|
Ok(serde_json::from_str(&data_url_text)?)
|
||||||
} else {
|
} else {
|
||||||
let file = file_fetcher
|
let file = TextDecodedFile::decode(
|
||||||
.fetch_bypass_permissions(specifier)
|
file_fetcher.fetch_bypass_permissions(specifier).await?,
|
||||||
.await?
|
)?;
|
||||||
.into_text_decoded()?;
|
|
||||||
Ok(serde_json::from_str(&file.source)?)
|
Ok(serde_json::from_str(&file.source)?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,18 +9,19 @@ use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::parking_lot::Mutex;
|
use deno_core::parking_lot::Mutex;
|
||||||
use deno_core::parking_lot::MutexGuard;
|
use deno_core::parking_lot::MutexGuard;
|
||||||
|
use deno_core::serde_json;
|
||||||
use deno_lockfile::WorkspaceMemberConfig;
|
use deno_lockfile::WorkspaceMemberConfig;
|
||||||
use deno_package_json::PackageJsonDepValue;
|
use deno_package_json::PackageJsonDepValue;
|
||||||
use deno_runtime::deno_node::PackageJson;
|
use deno_runtime::deno_node::PackageJson;
|
||||||
use deno_semver::jsr::JsrDepPackageReq;
|
use deno_semver::jsr::JsrDepPackageReq;
|
||||||
|
|
||||||
|
use crate::args::deno_json::import_map_deps;
|
||||||
use crate::cache;
|
use crate::cache;
|
||||||
use crate::util::fs::atomic_write_file_with_retries;
|
use crate::util::fs::atomic_write_file_with_retries;
|
||||||
use crate::Flags;
|
use crate::Flags;
|
||||||
|
|
||||||
use crate::args::DenoSubcommand;
|
use crate::args::DenoSubcommand;
|
||||||
use crate::args::InstallFlags;
|
use crate::args::InstallFlags;
|
||||||
use crate::args::InstallKind;
|
|
||||||
|
|
||||||
use deno_lockfile::Lockfile;
|
use deno_lockfile::Lockfile;
|
||||||
|
|
||||||
|
@ -102,6 +103,7 @@ impl CliLockfile {
|
||||||
pub fn discover(
|
pub fn discover(
|
||||||
flags: &Flags,
|
flags: &Flags,
|
||||||
workspace: &Workspace,
|
workspace: &Workspace,
|
||||||
|
maybe_external_import_map: Option<&serde_json::Value>,
|
||||||
) -> Result<Option<CliLockfile>, AnyError> {
|
) -> Result<Option<CliLockfile>, AnyError> {
|
||||||
fn pkg_json_deps(
|
fn pkg_json_deps(
|
||||||
maybe_pkg_json: Option<&PackageJson>,
|
maybe_pkg_json: Option<&PackageJson>,
|
||||||
|
@ -109,9 +111,12 @@ impl CliLockfile {
|
||||||
let Some(pkg_json) = maybe_pkg_json else {
|
let Some(pkg_json) = maybe_pkg_json else {
|
||||||
return Default::default();
|
return Default::default();
|
||||||
};
|
};
|
||||||
pkg_json
|
let deps = pkg_json.resolve_local_package_json_deps();
|
||||||
.resolve_local_package_json_deps()
|
|
||||||
|
deps
|
||||||
|
.dependencies
|
||||||
.values()
|
.values()
|
||||||
|
.chain(deps.dev_dependencies.values())
|
||||||
.filter_map(|dep| dep.as_ref().ok())
|
.filter_map(|dep| dep.as_ref().ok())
|
||||||
.filter_map(|dep| match dep {
|
.filter_map(|dep| match dep {
|
||||||
PackageJsonDepValue::Req(req) => {
|
PackageJsonDepValue::Req(req) => {
|
||||||
|
@ -133,10 +138,8 @@ impl CliLockfile {
|
||||||
if flags.no_lock
|
if flags.no_lock
|
||||||
|| matches!(
|
|| matches!(
|
||||||
flags.subcommand,
|
flags.subcommand,
|
||||||
DenoSubcommand::Install(InstallFlags {
|
DenoSubcommand::Install(InstallFlags::Global(..))
|
||||||
kind: InstallKind::Global(..),
|
| DenoSubcommand::Uninstall(_)
|
||||||
..
|
|
||||||
}) | DenoSubcommand::Uninstall(_)
|
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
|
@ -171,7 +174,11 @@ impl CliLockfile {
|
||||||
let config = deno_lockfile::WorkspaceConfig {
|
let config = deno_lockfile::WorkspaceConfig {
|
||||||
root: WorkspaceMemberConfig {
|
root: WorkspaceMemberConfig {
|
||||||
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
|
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
|
||||||
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
|
dependencies: if let Some(map) = maybe_external_import_map {
|
||||||
|
import_map_deps(map)
|
||||||
|
} else {
|
||||||
|
deno_json_deps(root_folder.deno_json.as_deref())
|
||||||
|
},
|
||||||
},
|
},
|
||||||
members: workspace
|
members: workspace
|
||||||
.config_folders()
|
.config_folders()
|
||||||
|
|
189
cli/args/mod.rs
189
cli/args/mod.rs
|
@ -9,6 +9,7 @@ mod package_json;
|
||||||
|
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
use deno_ast::SourceMapOption;
|
use deno_ast::SourceMapOption;
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_config::deno_json::NodeModulesDirMode;
|
use deno_config::deno_json::NodeModulesDirMode;
|
||||||
use deno_config::workspace::CreateResolverOptions;
|
use deno_config::workspace::CreateResolverOptions;
|
||||||
use deno_config::workspace::FolderConfigs;
|
use deno_config::workspace::FolderConfigs;
|
||||||
|
@ -23,13 +24,16 @@ use deno_config::workspace::WorkspaceLintConfig;
|
||||||
use deno_config::workspace::WorkspaceResolver;
|
use deno_config::workspace::WorkspaceResolver;
|
||||||
use deno_core::resolve_url_or_path;
|
use deno_core::resolve_url_or_path;
|
||||||
use deno_graph::GraphKind;
|
use deno_graph::GraphKind;
|
||||||
|
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||||
use deno_npm::npm_rc::NpmRc;
|
use deno_npm::npm_rc::NpmRc;
|
||||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||||
use deno_npm::NpmSystemInfo;
|
use deno_npm::NpmSystemInfo;
|
||||||
use deno_path_util::normalize_path;
|
use deno_path_util::normalize_path;
|
||||||
use deno_runtime::ops::otel::OtelConfig;
|
|
||||||
use deno_semver::npm::NpmPackageReqReference;
|
use deno_semver::npm::NpmPackageReqReference;
|
||||||
|
use deno_semver::StackString;
|
||||||
|
use deno_telemetry::OtelConfig;
|
||||||
|
use deno_telemetry::OtelRuntimeConfig;
|
||||||
use import_map::resolve_import_map_value_from_specifier;
|
use import_map::resolve_import_map_value_from_specifier;
|
||||||
|
|
||||||
pub use deno_config::deno_json::BenchConfig;
|
pub use deno_config::deno_json::BenchConfig;
|
||||||
|
@ -83,7 +87,7 @@ use thiserror::Error;
|
||||||
|
|
||||||
use crate::cache;
|
use crate::cache;
|
||||||
use crate::cache::DenoDirProvider;
|
use crate::cache::DenoDirProvider;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||||
use crate::version;
|
use crate::version;
|
||||||
|
|
||||||
|
@ -215,47 +219,6 @@ pub fn ts_config_to_transpile_and_emit_options(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Indicates how cached source files should be handled.
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub enum CacheSetting {
|
|
||||||
/// Only the cached files should be used. Any files not in the cache will
|
|
||||||
/// error. This is the equivalent of `--cached-only` in the CLI.
|
|
||||||
Only,
|
|
||||||
/// No cached source files should be used, and all files should be reloaded.
|
|
||||||
/// This is the equivalent of `--reload` in the CLI.
|
|
||||||
ReloadAll,
|
|
||||||
/// Only some cached resources should be used. This is the equivalent of
|
|
||||||
/// `--reload=jsr:@std/http/file-server` or
|
|
||||||
/// `--reload=jsr:@std/http/file-server,jsr:@std/assert/assert-equals`.
|
|
||||||
ReloadSome(Vec<String>),
|
|
||||||
/// The usability of a cached value is determined by analyzing the cached
|
|
||||||
/// headers and other metadata associated with a cached response, reloading
|
|
||||||
/// any cached "non-fresh" cached responses.
|
|
||||||
RespectHeaders,
|
|
||||||
/// The cached source files should be used for local modules. This is the
|
|
||||||
/// default behavior of the CLI.
|
|
||||||
Use,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CacheSetting {
|
|
||||||
pub fn should_use_for_npm_package(&self, package_name: &str) -> bool {
|
|
||||||
match self {
|
|
||||||
CacheSetting::ReloadAll => false,
|
|
||||||
CacheSetting::ReloadSome(list) => {
|
|
||||||
if list.iter().any(|i| i == "npm:") {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
let specifier = format!("npm:{package_name}");
|
|
||||||
if list.contains(&specifier) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
_ => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct WorkspaceBenchOptions {
|
pub struct WorkspaceBenchOptions {
|
||||||
pub filter: Option<String>,
|
pub filter: Option<String>,
|
||||||
pub json: bool,
|
pub json: bool,
|
||||||
|
@ -801,6 +764,7 @@ pub struct CliOptions {
|
||||||
maybe_node_modules_folder: Option<PathBuf>,
|
maybe_node_modules_folder: Option<PathBuf>,
|
||||||
npmrc: Arc<ResolvedNpmRc>,
|
npmrc: Arc<ResolvedNpmRc>,
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
|
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
|
||||||
overrides: CliOptionOverrides,
|
overrides: CliOptionOverrides,
|
||||||
pub start_dir: Arc<WorkspaceDirectory>,
|
pub start_dir: Arc<WorkspaceDirectory>,
|
||||||
pub deno_dir_provider: Arc<DenoDirProvider>,
|
pub deno_dir_provider: Arc<DenoDirProvider>,
|
||||||
|
@ -814,6 +778,7 @@ impl CliOptions {
|
||||||
npmrc: Arc<ResolvedNpmRc>,
|
npmrc: Arc<ResolvedNpmRc>,
|
||||||
start_dir: Arc<WorkspaceDirectory>,
|
start_dir: Arc<WorkspaceDirectory>,
|
||||||
force_global_cache: bool,
|
force_global_cache: bool,
|
||||||
|
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
|
||||||
) -> Result<Self, AnyError> {
|
) -> Result<Self, AnyError> {
|
||||||
if let Some(insecure_allowlist) =
|
if let Some(insecure_allowlist) =
|
||||||
flags.unsafely_ignore_certificate_errors.as_ref()
|
flags.unsafely_ignore_certificate_errors.as_ref()
|
||||||
|
@ -851,6 +816,7 @@ impl CliOptions {
|
||||||
maybe_node_modules_folder,
|
maybe_node_modules_folder,
|
||||||
overrides: Default::default(),
|
overrides: Default::default(),
|
||||||
main_module_cell: std::sync::OnceLock::new(),
|
main_module_cell: std::sync::OnceLock::new(),
|
||||||
|
maybe_external_import_map,
|
||||||
start_dir,
|
start_dir,
|
||||||
deno_dir_provider,
|
deno_dir_provider,
|
||||||
})
|
})
|
||||||
|
@ -926,7 +892,33 @@ impl CliOptions {
|
||||||
|
|
||||||
let (npmrc, _) = discover_npmrc_from_workspace(&start_dir.workspace)?;
|
let (npmrc, _) = discover_npmrc_from_workspace(&start_dir.workspace)?;
|
||||||
|
|
||||||
let maybe_lock_file = CliLockfile::discover(&flags, &start_dir.workspace)?;
|
fn load_external_import_map(
|
||||||
|
deno_json: &ConfigFile,
|
||||||
|
) -> Result<Option<(PathBuf, serde_json::Value)>, AnyError> {
|
||||||
|
if !deno_json.is_an_import_map() {
|
||||||
|
if let Some(path) = deno_json.to_import_map_path()? {
|
||||||
|
let contents = std::fs::read_to_string(&path).with_context(|| {
|
||||||
|
format!("Unable to read import map at '{}'", path.display())
|
||||||
|
})?;
|
||||||
|
let map = serde_json::from_str(&contents)?;
|
||||||
|
return Ok(Some((path, map)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
let external_import_map =
|
||||||
|
if let Some(deno_json) = start_dir.workspace.root_deno_json() {
|
||||||
|
load_external_import_map(deno_json)?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let maybe_lock_file = CliLockfile::discover(
|
||||||
|
&flags,
|
||||||
|
&start_dir.workspace,
|
||||||
|
external_import_map.as_ref().map(|(_, v)| v),
|
||||||
|
)?;
|
||||||
|
|
||||||
log::debug!("Finished config loading.");
|
log::debug!("Finished config loading.");
|
||||||
|
|
||||||
|
@ -937,6 +929,7 @@ impl CliOptions {
|
||||||
npmrc,
|
npmrc,
|
||||||
Arc::new(start_dir),
|
Arc::new(start_dir),
|
||||||
false,
|
false,
|
||||||
|
external_import_map,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -964,9 +957,7 @@ impl CliOptions {
|
||||||
match self.sub_command() {
|
match self.sub_command() {
|
||||||
DenoSubcommand::Cache(_) => GraphKind::All,
|
DenoSubcommand::Cache(_) => GraphKind::All,
|
||||||
DenoSubcommand::Check(_) => GraphKind::TypesOnly,
|
DenoSubcommand::Check(_) => GraphKind::TypesOnly,
|
||||||
DenoSubcommand::Install(InstallFlags {
|
DenoSubcommand::Install(InstallFlags::Local(_)) => GraphKind::All,
|
||||||
kind: InstallKind::Local(_),
|
|
||||||
}) => GraphKind::All,
|
|
||||||
_ => self.type_check_mode().as_graph_kind(),
|
_ => self.type_check_mode().as_graph_kind(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1002,24 +993,24 @@ impl CliOptions {
|
||||||
// https://nodejs.org/api/process.html
|
// https://nodejs.org/api/process.html
|
||||||
match target.as_str() {
|
match target.as_str() {
|
||||||
"aarch64-apple-darwin" => NpmSystemInfo {
|
"aarch64-apple-darwin" => NpmSystemInfo {
|
||||||
os: "darwin".to_string(),
|
os: "darwin".into(),
|
||||||
cpu: "arm64".to_string(),
|
cpu: "arm64".into(),
|
||||||
},
|
},
|
||||||
"aarch64-unknown-linux-gnu" => NpmSystemInfo {
|
"aarch64-unknown-linux-gnu" => NpmSystemInfo {
|
||||||
os: "linux".to_string(),
|
os: "linux".into(),
|
||||||
cpu: "arm64".to_string(),
|
cpu: "arm64".into(),
|
||||||
},
|
},
|
||||||
"x86_64-apple-darwin" => NpmSystemInfo {
|
"x86_64-apple-darwin" => NpmSystemInfo {
|
||||||
os: "darwin".to_string(),
|
os: "darwin".into(),
|
||||||
cpu: "x64".to_string(),
|
cpu: "x64".into(),
|
||||||
},
|
},
|
||||||
"x86_64-unknown-linux-gnu" => NpmSystemInfo {
|
"x86_64-unknown-linux-gnu" => NpmSystemInfo {
|
||||||
os: "linux".to_string(),
|
os: "linux".into(),
|
||||||
cpu: "x64".to_string(),
|
cpu: "x64".into(),
|
||||||
},
|
},
|
||||||
"x86_64-pc-windows-msvc" => NpmSystemInfo {
|
"x86_64-pc-windows-msvc" => NpmSystemInfo {
|
||||||
os: "win32".to_string(),
|
os: "win32".into(),
|
||||||
cpu: "x64".to_string(),
|
cpu: "x64".into(),
|
||||||
},
|
},
|
||||||
value => {
|
value => {
|
||||||
log::warn!(
|
log::warn!(
|
||||||
|
@ -1056,10 +1047,10 @@ impl CliOptions {
|
||||||
|
|
||||||
pub async fn create_workspace_resolver(
|
pub async fn create_workspace_resolver(
|
||||||
&self,
|
&self,
|
||||||
file_fetcher: &FileFetcher,
|
file_fetcher: &CliFileFetcher,
|
||||||
pkg_json_dep_resolution: PackageJsonDepResolution,
|
pkg_json_dep_resolution: PackageJsonDepResolution,
|
||||||
) -> Result<WorkspaceResolver, AnyError> {
|
) -> Result<WorkspaceResolver, AnyError> {
|
||||||
let overrode_no_import_map = self
|
let overrode_no_import_map: bool = self
|
||||||
.overrides
|
.overrides
|
||||||
.import_map_specifier
|
.import_map_specifier
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -1087,7 +1078,19 @@ impl CliOptions {
|
||||||
value,
|
value,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
None => None,
|
None => {
|
||||||
|
if let Some((path, import_map)) =
|
||||||
|
self.maybe_external_import_map.as_ref()
|
||||||
|
{
|
||||||
|
let path_url = deno_path_util::url_from_file_path(path)?;
|
||||||
|
Some(deno_config::workspace::SpecifiedImportMap {
|
||||||
|
base_url: path_url,
|
||||||
|
value: import_map.clone(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Ok(self.workspace().create_resolver(
|
Ok(self.workspace().create_resolver(
|
||||||
|
@ -1126,7 +1129,7 @@ impl CliOptions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn otel_config(&self) -> Option<OtelConfig> {
|
pub fn otel_config(&self) -> OtelConfig {
|
||||||
self.flags.otel_config()
|
self.flags.otel_config()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1350,9 +1353,7 @@ impl CliOptions {
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_deno_lint_config(
|
pub fn resolve_deno_lint_config(&self) -> Result<DenoLintConfig, AnyError> {
|
||||||
&self,
|
|
||||||
) -> Result<deno_lint::linter::LintConfig, AnyError> {
|
|
||||||
let ts_config_result =
|
let ts_config_result =
|
||||||
self.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
self.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
||||||
|
|
||||||
|
@ -1361,11 +1362,11 @@ impl CliOptions {
|
||||||
ts_config_result.ts_config,
|
ts_config_result.ts_config,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(deno_lint::linter::LintConfig {
|
Ok(DenoLintConfig {
|
||||||
default_jsx_factory: (!transpile_options.jsx_automatic)
|
default_jsx_factory: (!transpile_options.jsx_automatic)
|
||||||
.then(|| transpile_options.jsx_factory.clone()),
|
.then_some(transpile_options.jsx_factory),
|
||||||
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
|
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
|
||||||
.then(|| transpile_options.jsx_fragment_factory.clone()),
|
.then_some(transpile_options.jsx_fragment_factory),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1543,11 +1544,11 @@ impl CliOptions {
|
||||||
DenoSubcommand::Check(check_flags) => {
|
DenoSubcommand::Check(check_flags) => {
|
||||||
Some(files_to_urls(&check_flags.files))
|
Some(files_to_urls(&check_flags.files))
|
||||||
}
|
}
|
||||||
DenoSubcommand::Install(InstallFlags {
|
DenoSubcommand::Install(InstallFlags::Global(flags)) => {
|
||||||
kind: InstallKind::Global(flags),
|
Url::parse(&flags.module_url)
|
||||||
}) => Url::parse(&flags.module_url)
|
.ok()
|
||||||
.ok()
|
.map(|url| vec![Cow::Owned(url)])
|
||||||
.map(|url| vec![Cow::Owned(url)]),
|
}
|
||||||
DenoSubcommand::Doc(DocFlags {
|
DenoSubcommand::Doc(DocFlags {
|
||||||
source_files: DocSourceFileFlag::Paths(paths),
|
source_files: DocSourceFileFlag::Paths(paths),
|
||||||
..
|
..
|
||||||
|
@ -1606,6 +1607,11 @@ impl CliOptions {
|
||||||
|| self.workspace().has_unstable("bare-node-builtins")
|
|| self.workspace().has_unstable("bare-node-builtins")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn unstable_detect_cjs(&self) -> bool {
|
||||||
|
self.flags.unstable_config.detect_cjs
|
||||||
|
|| self.workspace().has_unstable("detect-cjs")
|
||||||
|
}
|
||||||
|
|
||||||
pub fn detect_cjs(&self) -> bool {
|
pub fn detect_cjs(&self) -> bool {
|
||||||
// only enabled when there's a package.json in order to not have a
|
// only enabled when there's a package.json in order to not have a
|
||||||
// perf penalty for non-npm Deno projects of searching for the closest
|
// perf penalty for non-npm Deno projects of searching for the closest
|
||||||
|
@ -1675,8 +1681,10 @@ impl CliOptions {
|
||||||
"sloppy-imports",
|
"sloppy-imports",
|
||||||
"byonm",
|
"byonm",
|
||||||
"bare-node-builtins",
|
"bare-node-builtins",
|
||||||
|
"detect-cjs",
|
||||||
"fmt-component",
|
"fmt-component",
|
||||||
"fmt-sql",
|
"fmt-sql",
|
||||||
|
"lazy-npm-caching",
|
||||||
])
|
])
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -1755,6 +1763,19 @@ impl CliOptions {
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn unstable_npm_lazy_caching(&self) -> bool {
|
||||||
|
self.flags.unstable_config.npm_lazy_caching
|
||||||
|
|| self.workspace().has_unstable("npm-lazy-caching")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_npm_caching_strategy(&self) -> NpmCachingStrategy {
|
||||||
|
if self.flags.unstable_config.npm_lazy_caching {
|
||||||
|
NpmCachingStrategy::Lazy
|
||||||
|
} else {
|
||||||
|
NpmCachingStrategy::Eager
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves the path to use for a local node_modules folder.
|
/// Resolves the path to use for a local node_modules folder.
|
||||||
|
@ -1926,15 +1947,17 @@ pub fn has_flag_env_var(name: &str) -> bool {
|
||||||
pub fn npm_pkg_req_ref_to_binary_command(
|
pub fn npm_pkg_req_ref_to_binary_command(
|
||||||
req_ref: &NpmPackageReqReference,
|
req_ref: &NpmPackageReqReference,
|
||||||
) -> String {
|
) -> String {
|
||||||
let binary_name = req_ref.sub_path().unwrap_or(req_ref.req().name.as_str());
|
req_ref
|
||||||
binary_name.to_string()
|
.sub_path()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_else(|| req_ref.req().name.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn config_to_deno_graph_workspace_member(
|
pub fn config_to_deno_graph_workspace_member(
|
||||||
config: &ConfigFile,
|
config: &ConfigFile,
|
||||||
) -> Result<deno_graph::WorkspaceMember, AnyError> {
|
) -> Result<deno_graph::WorkspaceMember, AnyError> {
|
||||||
let name = match &config.json.name {
|
let name: StackString = match &config.json.name {
|
||||||
Some(name) => name.clone(),
|
Some(name) => name.as_str().into(),
|
||||||
None => bail!("Missing 'name' field in config file."),
|
None => bail!("Missing 'name' field in config file."),
|
||||||
};
|
};
|
||||||
let version = match &config.json.version {
|
let version = match &config.json.version {
|
||||||
|
@ -1969,6 +1992,20 @@ fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum NpmCachingStrategy {
|
||||||
|
Eager,
|
||||||
|
Lazy,
|
||||||
|
Manual,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn otel_runtime_config() -> OtelRuntimeConfig {
|
||||||
|
OtelRuntimeConfig {
|
||||||
|
runtime_name: Cow::Borrowed("deno"),
|
||||||
|
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
|
|
@ -8,20 +8,23 @@ use deno_core::serde_json;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use deno_package_json::PackageJsonDepValue;
|
use deno_package_json::PackageJsonDepValue;
|
||||||
use deno_package_json::PackageJsonDepValueParseError;
|
use deno_package_json::PackageJsonDepValueParseError;
|
||||||
|
use deno_package_json::PackageJsonDepWorkspaceReq;
|
||||||
use deno_semver::npm::NpmPackageReqReference;
|
use deno_semver::npm::NpmPackageReqReference;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
|
use deno_semver::StackString;
|
||||||
|
use deno_semver::VersionReq;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct InstallNpmRemotePkg {
|
pub struct InstallNpmRemotePkg {
|
||||||
pub alias: Option<String>,
|
pub alias: Option<StackString>,
|
||||||
pub base_dir: PathBuf,
|
pub base_dir: PathBuf,
|
||||||
pub req: PackageReq,
|
pub req: PackageReq,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct InstallNpmWorkspacePkg {
|
pub struct InstallNpmWorkspacePkg {
|
||||||
pub alias: Option<String>,
|
pub alias: Option<StackString>,
|
||||||
pub target_dir: PathBuf,
|
pub target_dir: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +32,7 @@ pub struct InstallNpmWorkspacePkg {
|
||||||
#[error("Failed to install '{}'\n at {}", alias, location)]
|
#[error("Failed to install '{}'\n at {}", alias, location)]
|
||||||
pub struct PackageJsonDepValueParseWithLocationError {
|
pub struct PackageJsonDepValueParseWithLocationError {
|
||||||
pub location: Url,
|
pub location: Url,
|
||||||
pub alias: String,
|
pub alias: StackString,
|
||||||
#[source]
|
#[source]
|
||||||
pub source: PackageJsonDepValueParseError,
|
pub source: PackageJsonDepValueParseError,
|
||||||
}
|
}
|
||||||
|
@ -95,16 +98,20 @@ impl NpmInstallDepsProvider {
|
||||||
|
|
||||||
if let Some(pkg_json) = &folder.pkg_json {
|
if let Some(pkg_json) = &folder.pkg_json {
|
||||||
let deps = pkg_json.resolve_local_package_json_deps();
|
let deps = pkg_json.resolve_local_package_json_deps();
|
||||||
let mut pkg_pkgs = Vec::with_capacity(deps.len());
|
let mut pkg_pkgs = Vec::with_capacity(
|
||||||
for (alias, dep) in deps {
|
deps.dependencies.len() + deps.dev_dependencies.len(),
|
||||||
|
);
|
||||||
|
for (alias, dep) in
|
||||||
|
deps.dependencies.iter().chain(deps.dev_dependencies.iter())
|
||||||
|
{
|
||||||
let dep = match dep {
|
let dep = match dep {
|
||||||
Ok(dep) => dep,
|
Ok(dep) => dep,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
pkg_json_dep_errors.push(
|
pkg_json_dep_errors.push(
|
||||||
PackageJsonDepValueParseWithLocationError {
|
PackageJsonDepValueParseWithLocationError {
|
||||||
location: pkg_json.specifier(),
|
location: pkg_json.specifier(),
|
||||||
alias,
|
alias: alias.clone(),
|
||||||
source: err,
|
source: err.clone(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
|
@ -113,30 +120,39 @@ impl NpmInstallDepsProvider {
|
||||||
match dep {
|
match dep {
|
||||||
PackageJsonDepValue::Req(pkg_req) => {
|
PackageJsonDepValue::Req(pkg_req) => {
|
||||||
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
||||||
pkg.matches_req(&pkg_req)
|
pkg.matches_req(pkg_req)
|
||||||
// do not resolve to the current package
|
// do not resolve to the current package
|
||||||
&& pkg.pkg_json.path != pkg_json.path
|
&& pkg.pkg_json.path != pkg_json.path
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(pkg) = workspace_pkg {
|
if let Some(pkg) = workspace_pkg {
|
||||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||||
alias: Some(alias),
|
alias: Some(alias.clone()),
|
||||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||||
alias: Some(alias),
|
alias: Some(alias.clone()),
|
||||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||||
req: pkg_req,
|
req: pkg_req.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PackageJsonDepValue::Workspace(version_req) => {
|
PackageJsonDepValue::Workspace(workspace_version_req) => {
|
||||||
|
let version_req = match workspace_version_req {
|
||||||
|
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
|
||||||
|
version_req.clone()
|
||||||
|
}
|
||||||
|
PackageJsonDepWorkspaceReq::Tilde
|
||||||
|
| PackageJsonDepWorkspaceReq::Caret => {
|
||||||
|
VersionReq::parse_from_npm("*").unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
pkg.matches_name_and_version_req(alias, &version_req)
|
||||||
}) {
|
}) {
|
||||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||||
alias: Some(alias),
|
alias: Some(alias.clone()),
|
||||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,369 +0,0 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
|
||||||
|
|
||||||
use base64::prelude::BASE64_STANDARD;
|
|
||||||
use base64::Engine;
|
|
||||||
use deno_core::ModuleSpecifier;
|
|
||||||
use log::debug;
|
|
||||||
use log::error;
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fmt;
|
|
||||||
use std::net::IpAddr;
|
|
||||||
use std::net::Ipv4Addr;
|
|
||||||
use std::net::Ipv6Addr;
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub enum AuthTokenData {
|
|
||||||
Bearer(String),
|
|
||||||
Basic { username: String, password: String },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct AuthToken {
|
|
||||||
host: AuthDomain,
|
|
||||||
token: AuthTokenData,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for AuthToken {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match &self.token {
|
|
||||||
AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"),
|
|
||||||
AuthTokenData::Basic { username, password } => {
|
|
||||||
let credentials = format!("{username}:{password}");
|
|
||||||
write!(f, "Basic {}", BASE64_STANDARD.encode(credentials))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A structure which contains bearer tokens that can be used when sending
|
|
||||||
/// requests to websites, intended to authorize access to private resources
|
|
||||||
/// such as remote modules.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct AuthTokens(Vec<AuthToken>);
|
|
||||||
|
|
||||||
/// An authorization domain, either an exact or suffix match.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub enum AuthDomain {
|
|
||||||
Ip(IpAddr),
|
|
||||||
IpPort(SocketAddr),
|
|
||||||
/// Suffix match, no dot. May include a port.
|
|
||||||
Suffix(Cow<'static, str>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: ToString> From<T> for AuthDomain {
|
|
||||||
fn from(value: T) -> Self {
|
|
||||||
let s = value.to_string().to_lowercase();
|
|
||||||
if let Ok(ip) = SocketAddr::from_str(&s) {
|
|
||||||
return AuthDomain::IpPort(ip);
|
|
||||||
};
|
|
||||||
if s.starts_with('[') && s.ends_with(']') {
|
|
||||||
if let Ok(ip) = Ipv6Addr::from_str(&s[1..s.len() - 1]) {
|
|
||||||
return AuthDomain::Ip(ip.into());
|
|
||||||
}
|
|
||||||
} else if let Ok(ip) = Ipv4Addr::from_str(&s) {
|
|
||||||
return AuthDomain::Ip(ip.into());
|
|
||||||
}
|
|
||||||
if let Some(s) = s.strip_prefix('.') {
|
|
||||||
AuthDomain::Suffix(Cow::Owned(s.to_owned()))
|
|
||||||
} else {
|
|
||||||
AuthDomain::Suffix(Cow::Owned(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthDomain {
|
|
||||||
pub fn matches(&self, specifier: &ModuleSpecifier) -> bool {
|
|
||||||
let Some(host) = specifier.host_str() else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
match *self {
|
|
||||||
Self::Ip(ip) => {
|
|
||||||
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
ip == parsed && specifier.port().is_none()
|
|
||||||
}
|
|
||||||
Self::IpPort(ip) => {
|
|
||||||
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
ip.ip() == parsed && specifier.port() == Some(ip.port())
|
|
||||||
}
|
|
||||||
Self::Suffix(ref suffix) => {
|
|
||||||
let hostname = if let Some(port) = specifier.port() {
|
|
||||||
Cow::Owned(format!("{}:{}", host, port))
|
|
||||||
} else {
|
|
||||||
Cow::Borrowed(host)
|
|
||||||
};
|
|
||||||
|
|
||||||
if suffix.len() == hostname.len() {
|
|
||||||
return suffix == &hostname;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If it's a suffix match, ensure a dot
|
|
||||||
if hostname.ends_with(suffix.as_ref())
|
|
||||||
&& hostname.ends_with(&format!(".{suffix}"))
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthTokens {
|
|
||||||
/// Create a new set of tokens based on the provided string. It is intended
|
|
||||||
/// that the string be the value of an environment variable and the string is
|
|
||||||
/// parsed for token values. The string is expected to be a semi-colon
|
|
||||||
/// separated string, where each value is `{token}@{hostname}`.
|
|
||||||
pub fn new(maybe_tokens_str: Option<String>) -> Self {
|
|
||||||
let mut tokens = Vec::new();
|
|
||||||
if let Some(tokens_str) = maybe_tokens_str {
|
|
||||||
for token_str in tokens_str.trim().split(';') {
|
|
||||||
if token_str.contains('@') {
|
|
||||||
let mut iter = token_str.rsplitn(2, '@');
|
|
||||||
let host = AuthDomain::from(iter.next().unwrap());
|
|
||||||
let token = iter.next().unwrap();
|
|
||||||
if token.contains(':') {
|
|
||||||
let mut iter = token.rsplitn(2, ':');
|
|
||||||
let password = iter.next().unwrap().to_owned();
|
|
||||||
let username = iter.next().unwrap().to_owned();
|
|
||||||
tokens.push(AuthToken {
|
|
||||||
host,
|
|
||||||
token: AuthTokenData::Basic { username, password },
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
tokens.push(AuthToken {
|
|
||||||
host,
|
|
||||||
token: AuthTokenData::Bearer(token.to_string()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error!("Badly formed auth token discarded.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
debug!("Parsed {} auth token(s).", tokens.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
Self(tokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempt to match the provided specifier to the tokens in the set. The
|
|
||||||
/// matching occurs from the right of the hostname plus port, irrespective of
|
|
||||||
/// scheme. For example `https://www.deno.land:8080/` would match a token
|
|
||||||
/// with a host value of `deno.land:8080` but not match `www.deno.land`. The
|
|
||||||
/// matching is case insensitive.
|
|
||||||
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<AuthToken> {
|
|
||||||
self.0.iter().find_map(|t| {
|
|
||||||
if t.host.matches(specifier) {
|
|
||||||
Some(t.clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use deno_core::resolve_url;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_token() {
|
|
||||||
let auth_tokens = AuthTokens::new(Some("abc123@deno.land".to_string()));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc123"
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc123".to_string()
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
let fixture =
|
|
||||||
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_tokens_multiple() {
|
|
||||||
let auth_tokens =
|
|
||||||
AuthTokens::new(Some("abc123@deno.land;def456@example.com".to_string()));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc123".to_string()
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer def456".to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_tokens_space() {
|
|
||||||
let auth_tokens = AuthTokens::new(Some(
|
|
||||||
" abc123@deno.land;def456@example.com\t".to_string(),
|
|
||||||
));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc123".to_string()
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer def456".to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_tokens_newline() {
|
|
||||||
let auth_tokens = AuthTokens::new(Some(
|
|
||||||
"\nabc123@deno.land;def456@example.com\n".to_string(),
|
|
||||||
));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc123".to_string()
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer def456".to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_tokens_port() {
|
|
||||||
let auth_tokens =
|
|
||||||
AuthTokens::new(Some("abc123@deno.land:8080".to_string()));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
let fixture = resolve_url("http://deno.land:8080/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc123".to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_tokens_contain_at() {
|
|
||||||
let auth_tokens = AuthTokens::new(Some("abc@123@deno.land".to_string()));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Bearer abc@123".to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auth_token_basic() {
|
|
||||||
let auth_tokens = AuthTokens::new(Some("abc:123@deno.land".to_string()));
|
|
||||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Basic YWJjOjEyMw=="
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
|
||||||
"Basic YWJjOjEyMw==".to_string()
|
|
||||||
);
|
|
||||||
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
let fixture =
|
|
||||||
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
|
|
||||||
assert_eq!(auth_tokens.get(&fixture), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_ip() {
|
|
||||||
let ip = AuthDomain::from("[2001:db8:a::123]");
|
|
||||||
assert_eq!("Ip(2001:db8:a::123)", format!("{ip:?}"));
|
|
||||||
let ip = AuthDomain::from("[2001:db8:a::123]:8080");
|
|
||||||
assert_eq!("IpPort([2001:db8:a::123]:8080)", format!("{ip:?}"));
|
|
||||||
let ip = AuthDomain::from("1.1.1.1");
|
|
||||||
assert_eq!("Ip(1.1.1.1)", format!("{ip:?}"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_case_insensitive() {
|
|
||||||
let domain = AuthDomain::from("EXAMPLE.com");
|
|
||||||
assert!(
|
|
||||||
domain.matches(&ModuleSpecifier::parse("http://example.com").unwrap())
|
|
||||||
);
|
|
||||||
assert!(
|
|
||||||
domain.matches(&ModuleSpecifier::parse("http://example.COM").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_matches() {
|
|
||||||
let candidates = [
|
|
||||||
"example.com",
|
|
||||||
"www.example.com",
|
|
||||||
"1.1.1.1",
|
|
||||||
"[2001:db8:a::123]",
|
|
||||||
// These will never match
|
|
||||||
"example.com.evil.com",
|
|
||||||
"1.1.1.1.evil.com",
|
|
||||||
"notexample.com",
|
|
||||||
"www.notexample.com",
|
|
||||||
];
|
|
||||||
let domains = [
|
|
||||||
("example.com", vec!["example.com", "www.example.com"]),
|
|
||||||
(".example.com", vec!["example.com", "www.example.com"]),
|
|
||||||
("www.example.com", vec!["www.example.com"]),
|
|
||||||
("1.1.1.1", vec!["1.1.1.1"]),
|
|
||||||
("[2001:db8:a::123]", vec!["[2001:db8:a::123]"]),
|
|
||||||
];
|
|
||||||
let url = |c: &str| ModuleSpecifier::parse(&format!("http://{c}")).unwrap();
|
|
||||||
let url_port =
|
|
||||||
|c: &str| ModuleSpecifier::parse(&format!("http://{c}:8080")).unwrap();
|
|
||||||
|
|
||||||
// Generate each candidate with and without a port
|
|
||||||
let candidates = candidates
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|c| [url(c), url_port(c)])
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
for (domain, expected_domain) in domains {
|
|
||||||
// Test without a port -- all candidates return without a port
|
|
||||||
let auth_domain = AuthDomain::from(domain);
|
|
||||||
let actual = candidates
|
|
||||||
.iter()
|
|
||||||
.filter(|c| auth_domain.matches(c))
|
|
||||||
.cloned()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let expected = expected_domain.iter().map(|u| url(u)).collect::<Vec<_>>();
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
// Test with a port, all candidates return with a port
|
|
||||||
let auth_domain = AuthDomain::from(&format!("{domain}:8080"));
|
|
||||||
let actual = candidates
|
|
||||||
.iter()
|
|
||||||
.filter(|c| auth_domain.matches(c))
|
|
||||||
.cloned()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let expected = expected_domain
|
|
||||||
.iter()
|
|
||||||
.map(|u| url_port(u))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
106
cli/cache/mod.rs
vendored
106
cli/cache/mod.rs
vendored
|
@ -1,18 +1,19 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use crate::args::jsr_url;
|
use crate::args::jsr_url;
|
||||||
use crate::args::CacheSetting;
|
use crate::file_fetcher::CliFetchNoFollowErrorKind;
|
||||||
use crate::errors::get_error_class_name;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::file_fetcher::FetchNoFollowOptions;
|
use crate::file_fetcher::FetchNoFollowOptions;
|
||||||
use crate::file_fetcher::FetchOptions;
|
|
||||||
use crate::file_fetcher::FetchPermissionsOptionRef;
|
use crate::file_fetcher::FetchPermissionsOptionRef;
|
||||||
use crate::file_fetcher::FileFetcher;
|
|
||||||
use crate::file_fetcher::FileOrRedirect;
|
|
||||||
use crate::util::fs::atomic_write_file_with_retries;
|
use crate::util::fs::atomic_write_file_with_retries;
|
||||||
use crate::util::fs::atomic_write_file_with_retries_and_fs;
|
use crate::util::fs::atomic_write_file_with_retries_and_fs;
|
||||||
use crate::util::fs::AtomicWriteFileFsAdapter;
|
use crate::util::fs::AtomicWriteFileFsAdapter;
|
||||||
|
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
|
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
|
||||||
|
use deno_cache_dir::file_fetcher::FileOrRedirect;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
use deno_core::futures;
|
use deno_core::futures;
|
||||||
use deno_core::futures::FutureExt;
|
use deno_core::futures::FutureExt;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
|
@ -23,6 +24,7 @@ use deno_graph::source::Loader;
|
||||||
use deno_runtime::deno_fs;
|
use deno_runtime::deno_fs;
|
||||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||||
use node_resolver::InNpmPackageChecker;
|
use node_resolver::InNpmPackageChecker;
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -67,8 +69,11 @@ pub const CACHE_PERM: u32 = 0o644;
|
||||||
pub struct RealDenoCacheEnv;
|
pub struct RealDenoCacheEnv;
|
||||||
|
|
||||||
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
||||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
|
fn read_file_bytes(
|
||||||
std::fs::read(path)
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
) -> std::io::Result<Cow<'static, [u8]>> {
|
||||||
|
std::fs::read(path).map(Cow::Owned)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn atomic_write_file(
|
fn atomic_write_file(
|
||||||
|
@ -112,7 +117,10 @@ pub struct DenoCacheEnvFsAdapter<'a>(
|
||||||
);
|
);
|
||||||
|
|
||||||
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
|
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
|
||||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
|
fn read_file_bytes(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
) -> std::io::Result<Cow<'static, [u8]>> {
|
||||||
self
|
self
|
||||||
.0
|
.0
|
||||||
.read_file_sync(path, None)
|
.read_file_sync(path, None)
|
||||||
|
@ -183,7 +191,7 @@ pub struct FetchCacherOptions {
|
||||||
/// a concise interface to the DENO_DIR when building module graphs.
|
/// a concise interface to the DENO_DIR when building module graphs.
|
||||||
pub struct FetchCacher {
|
pub struct FetchCacher {
|
||||||
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
fs: Arc<dyn deno_fs::FileSystem>,
|
fs: Arc<dyn deno_fs::FileSystem>,
|
||||||
global_http_cache: Arc<GlobalHttpCache>,
|
global_http_cache: Arc<GlobalHttpCache>,
|
||||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||||
|
@ -195,7 +203,7 @@ pub struct FetchCacher {
|
||||||
|
|
||||||
impl FetchCacher {
|
impl FetchCacher {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
fs: Arc<dyn deno_fs::FileSystem>,
|
fs: Arc<dyn deno_fs::FileSystem>,
|
||||||
global_http_cache: Arc<GlobalHttpCache>,
|
global_http_cache: Arc<GlobalHttpCache>,
|
||||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||||
|
@ -313,18 +321,18 @@ impl Loader for FetchCacher {
|
||||||
LoaderCacheSetting::Only => Some(CacheSetting::Only),
|
LoaderCacheSetting::Only => Some(CacheSetting::Only),
|
||||||
};
|
};
|
||||||
file_fetcher
|
file_fetcher
|
||||||
.fetch_no_follow_with_options(FetchNoFollowOptions {
|
.fetch_no_follow(
|
||||||
fetch_options: FetchOptions {
|
&specifier,
|
||||||
specifier: &specifier,
|
FetchPermissionsOptionRef::Restricted(&permissions,
|
||||||
permissions: if is_statically_analyzable {
|
if is_statically_analyzable {
|
||||||
FetchPermissionsOptionRef::StaticContainer(&permissions)
|
deno_runtime::deno_permissions::CheckSpecifierKind::Static
|
||||||
} else {
|
} else {
|
||||||
FetchPermissionsOptionRef::DynamicContainer(&permissions)
|
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic
|
||||||
},
|
}),
|
||||||
maybe_auth: None,
|
FetchNoFollowOptions {
|
||||||
maybe_accept: None,
|
maybe_auth: None,
|
||||||
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
maybe_accept: None,
|
||||||
},
|
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
||||||
maybe_checksum: options.maybe_checksum.as_ref(),
|
maybe_checksum: options.maybe_checksum.as_ref(),
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
@ -341,7 +349,7 @@ impl Loader for FetchCacher {
|
||||||
(None, None) => None,
|
(None, None) => None,
|
||||||
};
|
};
|
||||||
Ok(Some(LoadResponse::Module {
|
Ok(Some(LoadResponse::Module {
|
||||||
specifier: file.specifier,
|
specifier: file.url,
|
||||||
maybe_headers,
|
maybe_headers,
|
||||||
content: file.source,
|
content: file.source,
|
||||||
}))
|
}))
|
||||||
|
@ -354,18 +362,46 @@ impl Loader for FetchCacher {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|err| {
|
.unwrap_or_else(|err| {
|
||||||
if let Some(io_err) = err.downcast_ref::<std::io::Error>() {
|
let err = err.into_kind();
|
||||||
if io_err.kind() == std::io::ErrorKind::NotFound {
|
match err {
|
||||||
return Ok(None);
|
CliFetchNoFollowErrorKind::FetchNoFollow(err) => {
|
||||||
} else {
|
let err = err.into_kind();
|
||||||
return Err(err);
|
match err {
|
||||||
}
|
FetchNoFollowErrorKind::NotFound(_) => Ok(None),
|
||||||
}
|
FetchNoFollowErrorKind::UrlToFilePath { .. } |
|
||||||
let error_class_name = get_error_class_name(&err);
|
FetchNoFollowErrorKind::ReadingBlobUrl { .. } |
|
||||||
match error_class_name {
|
FetchNoFollowErrorKind::ReadingFile { .. } |
|
||||||
"NotFound" => Ok(None),
|
FetchNoFollowErrorKind::FetchingRemote { .. } |
|
||||||
"NotCached" if options.cache_setting == LoaderCacheSetting::Only => Ok(None),
|
FetchNoFollowErrorKind::ClientError { .. } |
|
||||||
_ => Err(err),
|
FetchNoFollowErrorKind::NoRemote { .. } |
|
||||||
|
FetchNoFollowErrorKind::DataUrlDecode { .. } |
|
||||||
|
FetchNoFollowErrorKind::RedirectResolution { .. } |
|
||||||
|
FetchNoFollowErrorKind::CacheRead { .. } |
|
||||||
|
FetchNoFollowErrorKind::CacheSave { .. } |
|
||||||
|
FetchNoFollowErrorKind::UnsupportedScheme { .. } |
|
||||||
|
FetchNoFollowErrorKind::RedirectHeaderParse { .. } |
|
||||||
|
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)),
|
||||||
|
FetchNoFollowErrorKind::NotCached { .. } => {
|
||||||
|
if options.cache_setting == LoaderCacheSetting::Only {
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
Err(AnyError::from(err))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FetchNoFollowErrorKind::ChecksumIntegrity(err) => {
|
||||||
|
// convert to the equivalent deno_graph error so that it
|
||||||
|
// enhances it if this is passed to deno_graph
|
||||||
|
Err(
|
||||||
|
deno_graph::source::ChecksumIntegrityError {
|
||||||
|
actual: err.actual,
|
||||||
|
expected: err.expected,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
24
cli/cache/module_info.rs
vendored
24
cli/cache/module_info.rs
vendored
|
@ -284,6 +284,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use deno_graph::JsDocImportInfo;
|
||||||
use deno_graph::PositionRange;
|
use deno_graph::PositionRange;
|
||||||
use deno_graph::SpecifierWithRange;
|
use deno_graph::SpecifierWithRange;
|
||||||
|
|
||||||
|
@ -308,18 +309,21 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut module_info = ModuleInfo::default();
|
let mut module_info = ModuleInfo::default();
|
||||||
module_info.jsdoc_imports.push(SpecifierWithRange {
|
module_info.jsdoc_imports.push(JsDocImportInfo {
|
||||||
range: PositionRange {
|
specifier: SpecifierWithRange {
|
||||||
start: deno_graph::Position {
|
range: PositionRange {
|
||||||
line: 0,
|
start: deno_graph::Position {
|
||||||
character: 3,
|
line: 0,
|
||||||
},
|
character: 3,
|
||||||
end: deno_graph::Position {
|
},
|
||||||
line: 1,
|
end: deno_graph::Position {
|
||||||
character: 2,
|
line: 1,
|
||||||
|
character: 2,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
text: "test".to_string(),
|
||||||
},
|
},
|
||||||
text: "test".to_string(),
|
resolution_mode: None,
|
||||||
});
|
});
|
||||||
cache
|
cache
|
||||||
.set_module_info(
|
.set_module_info(
|
||||||
|
|
10
cli/cache/parsed_source.rs
vendored
10
cli/cache/parsed_source.rs
vendored
|
@ -95,11 +95,21 @@ impl ParsedSourceCache {
|
||||||
self.sources.lock().remove(specifier);
|
self.sources.lock().remove(specifier);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fress all parsed sources from memory.
|
||||||
|
pub fn free_all(&self) {
|
||||||
|
self.sources.lock().clear();
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates a parser that will reuse a ParsedSource from the store
|
/// Creates a parser that will reuse a ParsedSource from the store
|
||||||
/// if it exists, or else parse.
|
/// if it exists, or else parse.
|
||||||
pub fn as_capturing_parser(&self) -> CapturingEsParser {
|
pub fn as_capturing_parser(&self) -> CapturingEsParser {
|
||||||
CapturingEsParser::new(None, self)
|
CapturingEsParser::new(None, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.sources.lock().len()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// It's ok that this is racy since in non-LSP situations
|
/// It's ok that this is racy since in non-LSP situations
|
||||||
|
|
35
cli/emit.rs
35
cli/emit.rs
|
@ -5,6 +5,7 @@ use crate::cache::FastInsecureHasher;
|
||||||
use crate::cache::ParsedSourceCache;
|
use crate::cache::ParsedSourceCache;
|
||||||
use crate::resolver::CjsTracker;
|
use crate::resolver::CjsTracker;
|
||||||
|
|
||||||
|
use deno_ast::EmittedSourceText;
|
||||||
use deno_ast::ModuleKind;
|
use deno_ast::ModuleKind;
|
||||||
use deno_ast::SourceMapOption;
|
use deno_ast::SourceMapOption;
|
||||||
use deno_ast::SourceRange;
|
use deno_ast::SourceRange;
|
||||||
|
@ -132,6 +133,7 @@ impl Emitter {
|
||||||
&transpile_and_emit_options.0,
|
&transpile_and_emit_options.0,
|
||||||
&transpile_and_emit_options.1,
|
&transpile_and_emit_options.1,
|
||||||
)
|
)
|
||||||
|
.map(|r| r.text)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
@ -166,7 +168,8 @@ impl Emitter {
|
||||||
source.clone(),
|
source.clone(),
|
||||||
&self.transpile_and_emit_options.0,
|
&self.transpile_and_emit_options.0,
|
||||||
&self.transpile_and_emit_options.1,
|
&self.transpile_and_emit_options.1,
|
||||||
)?;
|
)?
|
||||||
|
.text;
|
||||||
helper.post_emit_parsed_source(
|
helper.post_emit_parsed_source(
|
||||||
specifier,
|
specifier,
|
||||||
&transpiled_source,
|
&transpiled_source,
|
||||||
|
@ -177,6 +180,31 @@ impl Emitter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn emit_parsed_source_for_deno_compile(
|
||||||
|
&self,
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
|
media_type: MediaType,
|
||||||
|
module_kind: deno_ast::ModuleKind,
|
||||||
|
source: &Arc<str>,
|
||||||
|
) -> Result<(String, String), AnyError> {
|
||||||
|
let mut emit_options = self.transpile_and_emit_options.1.clone();
|
||||||
|
emit_options.inline_sources = false;
|
||||||
|
emit_options.source_map = SourceMapOption::Separate;
|
||||||
|
// strip off the path to have more deterministic builds as we don't care
|
||||||
|
// about the source name because we manually provide the source map to v8
|
||||||
|
emit_options.source_map_base = Some(deno_path_util::url_parent(specifier));
|
||||||
|
let source = EmitParsedSourceHelper::transpile(
|
||||||
|
&self.parsed_source_cache,
|
||||||
|
specifier,
|
||||||
|
media_type,
|
||||||
|
module_kind,
|
||||||
|
source.clone(),
|
||||||
|
&self.transpile_and_emit_options.0,
|
||||||
|
&emit_options,
|
||||||
|
)?;
|
||||||
|
Ok((source.text, source.source_map.unwrap()))
|
||||||
|
}
|
||||||
|
|
||||||
/// Expects a file URL, panics otherwise.
|
/// Expects a file URL, panics otherwise.
|
||||||
pub async fn load_and_emit_for_hmr(
|
pub async fn load_and_emit_for_hmr(
|
||||||
&self,
|
&self,
|
||||||
|
@ -282,7 +310,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
||||||
source: Arc<str>,
|
source: Arc<str>,
|
||||||
transpile_options: &deno_ast::TranspileOptions,
|
transpile_options: &deno_ast::TranspileOptions,
|
||||||
emit_options: &deno_ast::EmitOptions,
|
emit_options: &deno_ast::EmitOptions,
|
||||||
) -> Result<String, AnyError> {
|
) -> Result<EmittedSourceText, AnyError> {
|
||||||
// nothing else needs the parsed source at this point, so remove from
|
// nothing else needs the parsed source at this point, so remove from
|
||||||
// the cache in order to not transpile owned
|
// the cache in order to not transpile owned
|
||||||
let parsed_source = parsed_source_cache
|
let parsed_source = parsed_source_cache
|
||||||
|
@ -302,8 +330,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
||||||
source
|
source
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
debug_assert!(transpiled_source.source_map.is_none());
|
Ok(transpiled_source)
|
||||||
Ok(transpiled_source.text)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn post_emit_parsed_source(
|
pub fn post_emit_parsed_source(
|
||||||
|
|
|
@ -22,7 +22,7 @@ use crate::cache::ModuleInfoCache;
|
||||||
use crate::cache::NodeAnalysisCache;
|
use crate::cache::NodeAnalysisCache;
|
||||||
use crate::cache::ParsedSourceCache;
|
use crate::cache::ParsedSourceCache;
|
||||||
use crate::emit::Emitter;
|
use crate::emit::Emitter;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::graph_container::MainModuleGraphContainer;
|
use crate::graph_container::MainModuleGraphContainer;
|
||||||
use crate::graph_util::FileWatcherReporter;
|
use crate::graph_util::FileWatcherReporter;
|
||||||
use crate::graph_util::ModuleGraphBuilder;
|
use crate::graph_util::ModuleGraphBuilder;
|
||||||
|
@ -48,7 +48,6 @@ use crate::resolver::CliNpmReqResolver;
|
||||||
use crate::resolver::CliResolver;
|
use crate::resolver::CliResolver;
|
||||||
use crate::resolver::CliResolverOptions;
|
use crate::resolver::CliResolverOptions;
|
||||||
use crate::resolver::CliSloppyImportsResolver;
|
use crate::resolver::CliSloppyImportsResolver;
|
||||||
use crate::resolver::IsCjsResolverOptions;
|
|
||||||
use crate::resolver::NpmModuleLoader;
|
use crate::resolver::NpmModuleLoader;
|
||||||
use crate::resolver::SloppyImportsCachedFs;
|
use crate::resolver::SloppyImportsCachedFs;
|
||||||
use crate::standalone::DenoCompileBinaryWriter;
|
use crate::standalone::DenoCompileBinaryWriter;
|
||||||
|
@ -72,6 +71,7 @@ use deno_core::error::AnyError;
|
||||||
use deno_core::futures::FutureExt;
|
use deno_core::futures::FutureExt;
|
||||||
use deno_core::FeatureChecker;
|
use deno_core::FeatureChecker;
|
||||||
|
|
||||||
|
use deno_resolver::cjs::IsCjsResolutionMode;
|
||||||
use deno_resolver::npm::NpmReqResolverOptions;
|
use deno_resolver::npm::NpmReqResolverOptions;
|
||||||
use deno_resolver::DenoResolverOptions;
|
use deno_resolver::DenoResolverOptions;
|
||||||
use deno_resolver::NodeAndNpmReqResolver;
|
use deno_resolver::NodeAndNpmReqResolver;
|
||||||
|
@ -185,7 +185,7 @@ struct CliFactoryServices {
|
||||||
emit_cache: Deferred<Arc<EmitCache>>,
|
emit_cache: Deferred<Arc<EmitCache>>,
|
||||||
emitter: Deferred<Arc<Emitter>>,
|
emitter: Deferred<Arc<Emitter>>,
|
||||||
feature_checker: Deferred<Arc<FeatureChecker>>,
|
feature_checker: Deferred<Arc<FeatureChecker>>,
|
||||||
file_fetcher: Deferred<Arc<FileFetcher>>,
|
file_fetcher: Deferred<Arc<CliFileFetcher>>,
|
||||||
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
|
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
|
||||||
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
|
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
|
||||||
http_cache: Deferred<Arc<dyn HttpCache>>,
|
http_cache: Deferred<Arc<dyn HttpCache>>,
|
||||||
|
@ -350,16 +350,17 @@ impl CliFactory {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
|
pub fn file_fetcher(&self) -> Result<&Arc<CliFileFetcher>, AnyError> {
|
||||||
self.services.file_fetcher.get_or_try_init(|| {
|
self.services.file_fetcher.get_or_try_init(|| {
|
||||||
let cli_options = self.cli_options()?;
|
let cli_options = self.cli_options()?;
|
||||||
Ok(Arc::new(FileFetcher::new(
|
Ok(Arc::new(CliFileFetcher::new(
|
||||||
self.http_cache()?.clone(),
|
self.http_cache()?.clone(),
|
||||||
cli_options.cache_setting(),
|
|
||||||
!cli_options.no_remote(),
|
|
||||||
self.http_client_provider().clone(),
|
self.http_client_provider().clone(),
|
||||||
self.blob_store().clone(),
|
self.blob_store().clone(),
|
||||||
Some(self.text_only_progress_bar().clone()),
|
Some(self.text_only_progress_bar().clone()),
|
||||||
|
!cli_options.no_remote(),
|
||||||
|
cli_options.cache_setting(),
|
||||||
|
log::Level::Info,
|
||||||
)))
|
)))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -504,7 +505,12 @@ impl CliFactory {
|
||||||
let resolver = cli_options
|
let resolver = cli_options
|
||||||
.create_workspace_resolver(
|
.create_workspace_resolver(
|
||||||
self.file_fetcher()?,
|
self.file_fetcher()?,
|
||||||
if cli_options.use_byonm() {
|
if cli_options.use_byonm()
|
||||||
|
&& !matches!(
|
||||||
|
cli_options.sub_command(),
|
||||||
|
DenoSubcommand::Publish(_)
|
||||||
|
)
|
||||||
|
{
|
||||||
PackageJsonDepResolution::Disabled
|
PackageJsonDepResolution::Disabled
|
||||||
} else {
|
} else {
|
||||||
// todo(dsherret): this should be false for nodeModulesDir: true
|
// todo(dsherret): this should be false for nodeModulesDir: true
|
||||||
|
@ -845,9 +851,12 @@ impl CliFactory {
|
||||||
Ok(Arc::new(CjsTracker::new(
|
Ok(Arc::new(CjsTracker::new(
|
||||||
self.in_npm_pkg_checker()?.clone(),
|
self.in_npm_pkg_checker()?.clone(),
|
||||||
self.pkg_json_resolver().clone(),
|
self.pkg_json_resolver().clone(),
|
||||||
IsCjsResolverOptions {
|
if options.is_node_main() || options.unstable_detect_cjs() {
|
||||||
detect_cjs: options.detect_cjs(),
|
IsCjsResolutionMode::ImplicitTypeCommonJs
|
||||||
is_node_main: options.is_node_main(),
|
} else if options.detect_cjs() {
|
||||||
|
IsCjsResolutionMode::ExplicitTypeCommonJs
|
||||||
|
} else {
|
||||||
|
IsCjsResolutionMode::Disabled
|
||||||
},
|
},
|
||||||
)))
|
)))
|
||||||
})
|
})
|
||||||
|
@ -976,6 +985,7 @@ impl CliFactory {
|
||||||
cli_options.sub_command().clone(),
|
cli_options.sub_command().clone(),
|
||||||
self.create_cli_main_worker_options()?,
|
self.create_cli_main_worker_options()?,
|
||||||
self.cli_options()?.otel_config(),
|
self.cli_options()?.otel_config(),
|
||||||
|
self.cli_options()?.default_npm_caching_strategy(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
1390
cli/file_fetcher.rs
1390
cli/file_fetcher.rs
File diff suppressed because it is too large
Load diff
|
@ -4,14 +4,16 @@ use crate::args::config_to_deno_graph_workspace_member;
|
||||||
use crate::args::jsr_url;
|
use crate::args::jsr_url;
|
||||||
use crate::args::CliLockfile;
|
use crate::args::CliLockfile;
|
||||||
use crate::args::CliOptions;
|
use crate::args::CliOptions;
|
||||||
|
pub use crate::args::NpmCachingStrategy;
|
||||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||||
use crate::cache;
|
use crate::cache;
|
||||||
|
use crate::cache::FetchCacher;
|
||||||
use crate::cache::GlobalHttpCache;
|
use crate::cache::GlobalHttpCache;
|
||||||
use crate::cache::ModuleInfoCache;
|
use crate::cache::ModuleInfoCache;
|
||||||
use crate::cache::ParsedSourceCache;
|
use crate::cache::ParsedSourceCache;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
use crate::errors::get_error_class_name;
|
use crate::errors::get_error_class_name;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::resolver::CjsTracker;
|
use crate::resolver::CjsTracker;
|
||||||
use crate::resolver::CliResolver;
|
use crate::resolver::CliResolver;
|
||||||
|
@ -25,7 +27,7 @@ use deno_config::deno_json::JsxImportSourceConfig;
|
||||||
use deno_config::workspace::JsrPackageConfig;
|
use deno_config::workspace::JsrPackageConfig;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_graph::source::LoaderChecksum;
|
use deno_graph::source::LoaderChecksum;
|
||||||
use deno_graph::source::ResolutionMode;
|
use deno_graph::source::ResolutionKind;
|
||||||
use deno_graph::FillFromLockfileOptions;
|
use deno_graph::FillFromLockfileOptions;
|
||||||
use deno_graph::JsrLoadError;
|
use deno_graph::JsrLoadError;
|
||||||
use deno_graph::ModuleLoadError;
|
use deno_graph::ModuleLoadError;
|
||||||
|
@ -44,12 +46,13 @@ use deno_graph::ModuleGraphError;
|
||||||
use deno_graph::ResolutionError;
|
use deno_graph::ResolutionError;
|
||||||
use deno_graph::SpecifierError;
|
use deno_graph::SpecifierError;
|
||||||
use deno_path_util::url_to_file_path;
|
use deno_path_util::url_to_file_path;
|
||||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
|
||||||
use deno_runtime::deno_fs::FileSystem;
|
use deno_runtime::deno_fs::FileSystem;
|
||||||
use deno_runtime::deno_node;
|
use deno_runtime::deno_node;
|
||||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||||
use deno_semver::jsr::JsrDepPackageReq;
|
use deno_semver::jsr::JsrDepPackageReq;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
|
use deno_semver::SmallStackString;
|
||||||
use import_map::ImportMapError;
|
use import_map::ImportMapError;
|
||||||
use node_resolver::InNpmPackageChecker;
|
use node_resolver::InNpmPackageChecker;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
@ -108,6 +111,25 @@ pub fn graph_valid(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fill_graph_from_lockfile(
|
||||||
|
graph: &mut ModuleGraph,
|
||||||
|
lockfile: &deno_lockfile::Lockfile,
|
||||||
|
) {
|
||||||
|
graph.fill_from_lockfile(FillFromLockfileOptions {
|
||||||
|
redirects: lockfile
|
||||||
|
.content
|
||||||
|
.redirects
|
||||||
|
.iter()
|
||||||
|
.map(|(from, to)| (from.as_str(), to.as_str())),
|
||||||
|
package_specifiers: lockfile
|
||||||
|
.content
|
||||||
|
.packages
|
||||||
|
.specifiers
|
||||||
|
.iter()
|
||||||
|
.map(|(dep, id)| (dep, id.as_str())),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct GraphWalkErrorsOptions {
|
pub struct GraphWalkErrorsOptions {
|
||||||
pub check_js: bool,
|
pub check_js: bool,
|
||||||
|
@ -198,6 +220,7 @@ pub struct CreateGraphOptions<'a> {
|
||||||
pub is_dynamic: bool,
|
pub is_dynamic: bool,
|
||||||
/// Specify `None` to use the default CLI loader.
|
/// Specify `None` to use the default CLI loader.
|
||||||
pub loader: Option<&'a mut dyn Loader>,
|
pub loader: Option<&'a mut dyn Loader>,
|
||||||
|
pub npm_caching: NpmCachingStrategy,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ModuleGraphCreator {
|
pub struct ModuleGraphCreator {
|
||||||
|
@ -226,10 +249,11 @@ impl ModuleGraphCreator {
|
||||||
&self,
|
&self,
|
||||||
graph_kind: GraphKind,
|
graph_kind: GraphKind,
|
||||||
roots: Vec<ModuleSpecifier>,
|
roots: Vec<ModuleSpecifier>,
|
||||||
|
npm_caching: NpmCachingStrategy,
|
||||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||||
let mut cache = self.module_graph_builder.create_graph_loader();
|
let mut cache = self.module_graph_builder.create_graph_loader();
|
||||||
self
|
self
|
||||||
.create_graph_with_loader(graph_kind, roots, &mut cache)
|
.create_graph_with_loader(graph_kind, roots, &mut cache, npm_caching)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,6 +262,7 @@ impl ModuleGraphCreator {
|
||||||
graph_kind: GraphKind,
|
graph_kind: GraphKind,
|
||||||
roots: Vec<ModuleSpecifier>,
|
roots: Vec<ModuleSpecifier>,
|
||||||
loader: &mut dyn Loader,
|
loader: &mut dyn Loader,
|
||||||
|
npm_caching: NpmCachingStrategy,
|
||||||
) -> Result<ModuleGraph, AnyError> {
|
) -> Result<ModuleGraph, AnyError> {
|
||||||
self
|
self
|
||||||
.create_graph_with_options(CreateGraphOptions {
|
.create_graph_with_options(CreateGraphOptions {
|
||||||
|
@ -245,6 +270,7 @@ impl ModuleGraphCreator {
|
||||||
graph_kind,
|
graph_kind,
|
||||||
roots,
|
roots,
|
||||||
loader: Some(loader),
|
loader: Some(loader),
|
||||||
|
npm_caching,
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -254,6 +280,23 @@ impl ModuleGraphCreator {
|
||||||
package_configs: &[JsrPackageConfig],
|
package_configs: &[JsrPackageConfig],
|
||||||
build_fast_check_graph: bool,
|
build_fast_check_graph: bool,
|
||||||
) -> Result<ModuleGraph, AnyError> {
|
) -> Result<ModuleGraph, AnyError> {
|
||||||
|
struct PublishLoader(FetchCacher);
|
||||||
|
impl Loader for PublishLoader {
|
||||||
|
fn load(
|
||||||
|
&self,
|
||||||
|
specifier: &deno_ast::ModuleSpecifier,
|
||||||
|
options: deno_graph::source::LoadOptions,
|
||||||
|
) -> deno_graph::source::LoadFuture {
|
||||||
|
if specifier.scheme() == "bun" {
|
||||||
|
return Box::pin(std::future::ready(Ok(Some(
|
||||||
|
deno_graph::source::LoadResponse::External {
|
||||||
|
specifier: specifier.clone(),
|
||||||
|
},
|
||||||
|
))));
|
||||||
|
}
|
||||||
|
self.0.load(specifier, options)
|
||||||
|
}
|
||||||
|
}
|
||||||
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
|
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
|
||||||
// Earlier on, we marked external non-JSR modules as external.
|
// Earlier on, we marked external non-JSR modules as external.
|
||||||
// If the graph contains any of those, it would cause type checking
|
// If the graph contains any of those, it would cause type checking
|
||||||
|
@ -271,12 +314,16 @@ impl ModuleGraphCreator {
|
||||||
for package_config in package_configs {
|
for package_config in package_configs {
|
||||||
roots.extend(package_config.config_file.resolve_export_value_urls()?);
|
roots.extend(package_config.config_file.resolve_export_value_urls()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let loader = self.module_graph_builder.create_graph_loader();
|
||||||
|
let mut publish_loader = PublishLoader(loader);
|
||||||
let mut graph = self
|
let mut graph = self
|
||||||
.create_graph_with_options(CreateGraphOptions {
|
.create_graph_with_options(CreateGraphOptions {
|
||||||
is_dynamic: false,
|
is_dynamic: false,
|
||||||
graph_kind: deno_graph::GraphKind::All,
|
graph_kind: deno_graph::GraphKind::All,
|
||||||
roots,
|
roots,
|
||||||
loader: None,
|
loader: Some(&mut publish_loader),
|
||||||
|
npm_caching: self.options.default_npm_caching_strategy(),
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
self.graph_valid(&graph)?;
|
self.graph_valid(&graph)?;
|
||||||
|
@ -336,6 +383,7 @@ impl ModuleGraphCreator {
|
||||||
graph_kind,
|
graph_kind,
|
||||||
roots,
|
roots,
|
||||||
loader: None,
|
loader: None,
|
||||||
|
npm_caching: self.options.default_npm_caching_strategy(),
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -384,7 +432,7 @@ pub struct ModuleGraphBuilder {
|
||||||
caches: Arc<cache::Caches>,
|
caches: Arc<cache::Caches>,
|
||||||
cjs_tracker: Arc<CjsTracker>,
|
cjs_tracker: Arc<CjsTracker>,
|
||||||
cli_options: Arc<CliOptions>,
|
cli_options: Arc<CliOptions>,
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
fs: Arc<dyn FileSystem>,
|
fs: Arc<dyn FileSystem>,
|
||||||
global_http_cache: Arc<GlobalHttpCache>,
|
global_http_cache: Arc<GlobalHttpCache>,
|
||||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||||
|
@ -403,7 +451,7 @@ impl ModuleGraphBuilder {
|
||||||
caches: Arc<cache::Caches>,
|
caches: Arc<cache::Caches>,
|
||||||
cjs_tracker: Arc<CjsTracker>,
|
cjs_tracker: Arc<CjsTracker>,
|
||||||
cli_options: Arc<CliOptions>,
|
cli_options: Arc<CliOptions>,
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
fs: Arc<dyn FileSystem>,
|
fs: Arc<dyn FileSystem>,
|
||||||
global_http_cache: Arc<GlobalHttpCache>,
|
global_http_cache: Arc<GlobalHttpCache>,
|
||||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||||
|
@ -525,7 +573,8 @@ impl ModuleGraphBuilder {
|
||||||
};
|
};
|
||||||
let cli_resolver = &self.resolver;
|
let cli_resolver = &self.resolver;
|
||||||
let graph_resolver = self.create_graph_resolver()?;
|
let graph_resolver = self.create_graph_resolver()?;
|
||||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
let graph_npm_resolver =
|
||||||
|
cli_resolver.create_graph_npm_resolver(options.npm_caching);
|
||||||
let maybe_file_watcher_reporter = self
|
let maybe_file_watcher_reporter = self
|
||||||
.maybe_file_watcher_reporter
|
.maybe_file_watcher_reporter
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -552,6 +601,7 @@ impl ModuleGraphBuilder {
|
||||||
resolver: Some(&graph_resolver),
|
resolver: Some(&graph_resolver),
|
||||||
locker: locker.as_mut().map(|l| l as _),
|
locker: locker.as_mut().map(|l| l as _),
|
||||||
},
|
},
|
||||||
|
options.npm_caching,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -562,6 +612,7 @@ impl ModuleGraphBuilder {
|
||||||
roots: Vec<ModuleSpecifier>,
|
roots: Vec<ModuleSpecifier>,
|
||||||
loader: &'a mut dyn deno_graph::source::Loader,
|
loader: &'a mut dyn deno_graph::source::Loader,
|
||||||
options: deno_graph::BuildOptions<'a>,
|
options: deno_graph::BuildOptions<'a>,
|
||||||
|
npm_caching: NpmCachingStrategy,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
// ensure an "npm install" is done if the user has explicitly
|
// ensure an "npm install" is done if the user has explicitly
|
||||||
// opted into using a node_modules directory
|
// opted into using a node_modules directory
|
||||||
|
@ -572,7 +623,13 @@ impl ModuleGraphBuilder {
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
{
|
{
|
||||||
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
let already_done =
|
||||||
|
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||||
|
if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) {
|
||||||
|
npm_resolver
|
||||||
|
.cache_packages(crate::npm::PackageCaching::All)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -582,19 +639,7 @@ impl ModuleGraphBuilder {
|
||||||
// populate the information from the lockfile
|
// populate the information from the lockfile
|
||||||
if let Some(lockfile) = &self.lockfile {
|
if let Some(lockfile) = &self.lockfile {
|
||||||
let lockfile = lockfile.lock();
|
let lockfile = lockfile.lock();
|
||||||
graph.fill_from_lockfile(FillFromLockfileOptions {
|
fill_graph_from_lockfile(graph, &lockfile);
|
||||||
redirects: lockfile
|
|
||||||
.content
|
|
||||||
.redirects
|
|
||||||
.iter()
|
|
||||||
.map(|(from, to)| (from.as_str(), to.as_str())),
|
|
||||||
package_specifiers: lockfile
|
|
||||||
.content
|
|
||||||
.packages
|
|
||||||
.specifiers
|
|
||||||
.iter()
|
|
||||||
.map(|(dep, id)| (dep, id.as_str())),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -636,7 +681,7 @@ impl ModuleGraphBuilder {
|
||||||
for (from, to) in graph.packages.mappings() {
|
for (from, to) in graph.packages.mappings() {
|
||||||
lockfile.insert_package_specifier(
|
lockfile.insert_package_specifier(
|
||||||
JsrDepPackageReq::jsr(from.clone()),
|
JsrDepPackageReq::jsr(from.clone()),
|
||||||
to.version.to_string(),
|
to.version.to_custom_string::<SmallStackString>(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -673,7 +718,9 @@ impl ModuleGraphBuilder {
|
||||||
let parser = self.parsed_source_cache.as_capturing_parser();
|
let parser = self.parsed_source_cache.as_capturing_parser();
|
||||||
let cli_resolver = &self.resolver;
|
let cli_resolver = &self.resolver;
|
||||||
let graph_resolver = self.create_graph_resolver()?;
|
let graph_resolver = self.create_graph_resolver()?;
|
||||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(
|
||||||
|
self.cli_options.default_npm_caching_strategy(),
|
||||||
|
);
|
||||||
|
|
||||||
graph.build_fast_check_type_graph(
|
graph.build_fast_check_type_graph(
|
||||||
deno_graph::BuildFastCheckTypeGraphOptions {
|
deno_graph::BuildFastCheckTypeGraphOptions {
|
||||||
|
@ -795,7 +842,7 @@ fn enhanced_sloppy_imports_error_message(
|
||||||
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
|
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
|
||||||
| ModuleError::Missing(specifier, _) => {
|
| ModuleError::Missing(specifier, _) => {
|
||||||
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
|
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
|
||||||
.resolve(specifier, SloppyImportsResolutionMode::Execution)?
|
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
|
||||||
.as_suggestion_message();
|
.as_suggestion_message();
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{} {} or run with --unstable-sloppy-imports",
|
"{} {} or run with --unstable-sloppy-imports",
|
||||||
|
@ -1100,12 +1147,12 @@ impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format_range_with_colors(range: &deno_graph::Range) -> String {
|
pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String {
|
||||||
format!(
|
format!(
|
||||||
"{}:{}:{}",
|
"{}:{}:{}",
|
||||||
colors::cyan(range.specifier.as_str()),
|
colors::cyan(referrer.specifier.as_str()),
|
||||||
colors::yellow(&(range.start.line + 1).to_string()),
|
colors::yellow(&(referrer.range.start.line + 1).to_string()),
|
||||||
colors::yellow(&(range.start.character + 1).to_string())
|
colors::yellow(&(referrer.range.start.character + 1).to_string())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1195,26 +1242,54 @@ impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
|
||||||
&self,
|
&self,
|
||||||
raw_specifier: &str,
|
raw_specifier: &str,
|
||||||
referrer_range: &deno_graph::Range,
|
referrer_range: &deno_graph::Range,
|
||||||
mode: ResolutionMode,
|
resolution_kind: ResolutionKind,
|
||||||
) -> Result<ModuleSpecifier, ResolveError> {
|
) -> Result<ModuleSpecifier, ResolveError> {
|
||||||
self.resolver.resolve(
|
self.resolver.resolve(
|
||||||
raw_specifier,
|
raw_specifier,
|
||||||
referrer_range,
|
&referrer_range.specifier,
|
||||||
self
|
referrer_range.range.start,
|
||||||
.cjs_tracker
|
referrer_range
|
||||||
.get_referrer_kind(&referrer_range.specifier),
|
.resolution_mode
|
||||||
mode,
|
.map(to_node_resolution_mode)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
self
|
||||||
|
.cjs_tracker
|
||||||
|
.get_referrer_kind(&referrer_range.specifier)
|
||||||
|
}),
|
||||||
|
to_node_resolution_kind(resolution_kind),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_node_resolution_kind(
|
||||||
|
kind: ResolutionKind,
|
||||||
|
) -> node_resolver::NodeResolutionKind {
|
||||||
|
match kind {
|
||||||
|
ResolutionKind::Execution => node_resolver::NodeResolutionKind::Execution,
|
||||||
|
ResolutionKind::Types => node_resolver::NodeResolutionKind::Types,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_node_resolution_mode(
|
||||||
|
mode: deno_graph::source::ResolutionMode,
|
||||||
|
) -> node_resolver::ResolutionMode {
|
||||||
|
match mode {
|
||||||
|
deno_graph::source::ResolutionMode::Import => {
|
||||||
|
node_resolver::ResolutionMode::Import
|
||||||
|
}
|
||||||
|
deno_graph::source::ResolutionMode::Require => {
|
||||||
|
node_resolver::ResolutionMode::Require
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_graph::source::ResolveError;
|
use deno_graph::source::ResolveError;
|
||||||
use deno_graph::Position;
|
use deno_graph::PositionRange;
|
||||||
use deno_graph::Range;
|
use deno_graph::Range;
|
||||||
use deno_graph::ResolutionError;
|
use deno_graph::ResolutionError;
|
||||||
use deno_graph::SpecifierError;
|
use deno_graph::SpecifierError;
|
||||||
|
@ -1235,8 +1310,8 @@ mod test {
|
||||||
specifier: input.to_string(),
|
specifier: input.to_string(),
|
||||||
range: Range {
|
range: Range {
|
||||||
specifier,
|
specifier,
|
||||||
start: Position::zeroed(),
|
resolution_mode: None,
|
||||||
end: Position::zeroed(),
|
range: PositionRange::zeroed(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
assert_eq!(get_resolution_error_bare_node_specifier(&err), output);
|
assert_eq!(get_resolution_error_bare_node_specifier(&err), output);
|
||||||
|
@ -1251,8 +1326,8 @@ mod test {
|
||||||
let err = ResolutionError::InvalidSpecifier {
|
let err = ResolutionError::InvalidSpecifier {
|
||||||
range: Range {
|
range: Range {
|
||||||
specifier,
|
specifier,
|
||||||
start: Position::zeroed(),
|
resolution_mode: None,
|
||||||
end: Position::zeroed(),
|
range: PositionRange::zeroed(),
|
||||||
},
|
},
|
||||||
error: SpecifierError::ImportPrefixMissing {
|
error: SpecifierError::ImportPrefixMissing {
|
||||||
specifier: input.to_string(),
|
specifier: input.to_string(),
|
||||||
|
|
907
cli/http_util.rs
907
cli/http_util.rs
File diff suppressed because it is too large
Load diff
1097
cli/js/40_lint.js
Normal file
1097
cli/js/40_lint.js
Normal file
File diff suppressed because it is too large
Load diff
1014
cli/js/40_lint_selector.js
Normal file
1014
cli/js/40_lint_selector.js
Normal file
File diff suppressed because it is too large
Load diff
132
cli/js/40_lint_types.d.ts
vendored
Normal file
132
cli/js/40_lint_types.d.ts
vendored
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
export interface NodeFacade {
|
||||||
|
type: string;
|
||||||
|
range: [number, number];
|
||||||
|
[key: string]: unknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AstContext {
|
||||||
|
buf: Uint8Array;
|
||||||
|
strTable: Map<number, string>;
|
||||||
|
strTableOffset: number;
|
||||||
|
rootOffset: number;
|
||||||
|
nodes: Map<number, NodeFacade>;
|
||||||
|
strByType: number[];
|
||||||
|
strByProp: number[];
|
||||||
|
typeByStr: Map<string, number>;
|
||||||
|
propByStr: Map<string, number>;
|
||||||
|
matcher: MatchContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(@marvinhagemeister) Remove once we land "official" types
|
||||||
|
export interface RuleContext {
|
||||||
|
id: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(@marvinhagemeister) Remove once we land "official" types
|
||||||
|
export interface LintRule {
|
||||||
|
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
|
||||||
|
destroy?(ctx: RuleContext): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(@marvinhagemeister) Remove once we land "official" types
|
||||||
|
export interface LintPlugin {
|
||||||
|
name: string;
|
||||||
|
rules: Record<string, LintRule>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LintState {
|
||||||
|
plugins: LintPlugin[];
|
||||||
|
installedPlugins: Set<string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type VisitorFn = (node: unknown) => void;
|
||||||
|
|
||||||
|
export interface CompiledVisitor {
|
||||||
|
matcher: (ctx: MatchContext, offset: number) => boolean;
|
||||||
|
info: { enter: VisitorFn; exit: VisitorFn };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AttrExists {
|
||||||
|
type: 3;
|
||||||
|
prop: number[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AttrBin {
|
||||||
|
type: 4;
|
||||||
|
prop: number[];
|
||||||
|
op: number;
|
||||||
|
// deno-lint-ignore no-explicit-any
|
||||||
|
value: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AttrSelector = AttrExists | AttrBin;
|
||||||
|
|
||||||
|
export interface ElemSelector {
|
||||||
|
type: 1;
|
||||||
|
wildcard: boolean;
|
||||||
|
elem: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PseudoNthChild {
|
||||||
|
type: 5;
|
||||||
|
op: string | null;
|
||||||
|
step: number;
|
||||||
|
stepOffset: number;
|
||||||
|
of: Selector | null;
|
||||||
|
repeat: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PseudoHas {
|
||||||
|
type: 6;
|
||||||
|
selectors: Selector[];
|
||||||
|
}
|
||||||
|
export interface PseudoNot {
|
||||||
|
type: 7;
|
||||||
|
selectors: Selector[];
|
||||||
|
}
|
||||||
|
export interface PseudoFirstChild {
|
||||||
|
type: 8;
|
||||||
|
}
|
||||||
|
export interface PseudoLastChild {
|
||||||
|
type: 9;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Relation {
|
||||||
|
type: 2;
|
||||||
|
op: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Selector = Array<
|
||||||
|
| ElemSelector
|
||||||
|
| Relation
|
||||||
|
| AttrExists
|
||||||
|
| AttrBin
|
||||||
|
| PseudoNthChild
|
||||||
|
| PseudoNot
|
||||||
|
| PseudoHas
|
||||||
|
| PseudoFirstChild
|
||||||
|
| PseudoLastChild
|
||||||
|
>;
|
||||||
|
|
||||||
|
export interface SelectorParseCtx {
|
||||||
|
root: Selector;
|
||||||
|
current: Selector;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MatchContext {
|
||||||
|
getFirstChild(id: number): number;
|
||||||
|
getLastChild(id: number): number;
|
||||||
|
getSiblings(id: number): number[];
|
||||||
|
getParent(id: number): number;
|
||||||
|
getType(id: number): number;
|
||||||
|
hasAttrPath(id: number, propIds: number[], idx: number): boolean;
|
||||||
|
getAttrPathValue(id: number, propIds: number[], idx: number): unknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type NextFn = (ctx: MatchContext, id: number) => boolean;
|
||||||
|
export type MatcherFn = (ctx: MatchContext, id: number) => boolean;
|
||||||
|
export type TransformFn = (value: string) => number;
|
||||||
|
|
||||||
|
export {};
|
|
@ -1,7 +1,7 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use crate::args::jsr_url;
|
use crate::args::jsr_url;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use deno_core::serde_json;
|
use deno_core::serde_json;
|
||||||
use deno_graph::packages::JsrPackageInfo;
|
use deno_graph::packages::JsrPackageInfo;
|
||||||
|
@ -19,11 +19,11 @@ pub struct JsrFetchResolver {
|
||||||
/// It can be large and we don't want to store it.
|
/// It can be large and we don't want to store it.
|
||||||
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
|
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
|
||||||
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
|
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl JsrFetchResolver {
|
impl JsrFetchResolver {
|
||||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
nv_by_req: Default::default(),
|
nv_by_req: Default::default(),
|
||||||
info_by_nv: Default::default(),
|
info_by_nv: Default::default(),
|
||||||
|
|
|
@ -15,7 +15,6 @@ use crate::lsp::search::PackageSearchApi;
|
||||||
use crate::tools::lint::CliLinter;
|
use crate::tools::lint::CliLinter;
|
||||||
use crate::util::path::relative_specifier;
|
use crate::util::path::relative_specifier;
|
||||||
use deno_config::workspace::MappedResolution;
|
use deno_config::workspace::MappedResolution;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_lint::diagnostic::LintDiagnosticRange;
|
use deno_lint::diagnostic::LintDiagnosticRange;
|
||||||
|
|
||||||
use deno_ast::SourceRange;
|
use deno_ast::SourceRange;
|
||||||
|
@ -37,9 +36,12 @@ use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageNvReference;
|
use deno_semver::package::PackageNvReference;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use deno_semver::package::PackageReqReference;
|
use deno_semver::package::PackageReqReference;
|
||||||
|
use deno_semver::SmallStackString;
|
||||||
|
use deno_semver::StackString;
|
||||||
use deno_semver::Version;
|
use deno_semver::Version;
|
||||||
use import_map::ImportMap;
|
use import_map::ImportMap;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
|
use node_resolver::ResolutionMode;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
@ -270,13 +272,24 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if specifier.scheme() == "node" {
|
||||||
|
return Some(specifier.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
|
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
|
||||||
{
|
{
|
||||||
let mut segments = jsr_path.split('/');
|
let mut segments = jsr_path.split('/');
|
||||||
let name = if jsr_path.starts_with('@') {
|
let name = if jsr_path.starts_with('@') {
|
||||||
format!("{}/{}", segments.next()?, segments.next()?)
|
let scope = segments.next()?;
|
||||||
|
let name = segments.next()?;
|
||||||
|
capacity_builder::StringBuilder::<StackString>::build(|builder| {
|
||||||
|
builder.append(scope);
|
||||||
|
builder.append("/");
|
||||||
|
builder.append(name);
|
||||||
|
})
|
||||||
|
.unwrap()
|
||||||
} else {
|
} else {
|
||||||
segments.next()?.to_string()
|
StackString::from(segments.next()?)
|
||||||
};
|
};
|
||||||
let version = Version::parse_standard(segments.next()?).ok()?;
|
let version = Version::parse_standard(segments.next()?).ok()?;
|
||||||
let nv = PackageNv { name, version };
|
let nv = PackageNv { name, version };
|
||||||
|
@ -286,7 +299,9 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
&path,
|
&path,
|
||||||
Some(&self.file_referrer),
|
Some(&self.file_referrer),
|
||||||
)?;
|
)?;
|
||||||
let sub_path = (export != ".").then_some(export);
|
let sub_path = (export != ".")
|
||||||
|
.then_some(export)
|
||||||
|
.map(SmallStackString::from_string);
|
||||||
let mut req = None;
|
let mut req = None;
|
||||||
req = req.or_else(|| {
|
req = req.or_else(|| {
|
||||||
let import_map = self.maybe_import_map?;
|
let import_map = self.maybe_import_map?;
|
||||||
|
@ -353,7 +368,12 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
let pkg_reqs = npm_resolver.resolve_pkg_reqs_from_pkg_id(&pkg_id);
|
let pkg_reqs = npm_resolver.resolve_pkg_reqs_from_pkg_id(&pkg_id);
|
||||||
// check if any pkg reqs match what is found in an import map
|
// check if any pkg reqs match what is found in an import map
|
||||||
if !pkg_reqs.is_empty() {
|
if !pkg_reqs.is_empty() {
|
||||||
let sub_path = self.resolve_package_path(specifier);
|
let sub_path = npm_resolver
|
||||||
|
.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||||
|
.ok()
|
||||||
|
.and_then(|pkg_folder| {
|
||||||
|
self.resolve_package_path(specifier, &pkg_folder)
|
||||||
|
});
|
||||||
if let Some(import_map) = self.maybe_import_map {
|
if let Some(import_map) = self.maybe_import_map {
|
||||||
let pkg_reqs = pkg_reqs.iter().collect::<HashSet<_>>();
|
let pkg_reqs = pkg_reqs.iter().collect::<HashSet<_>>();
|
||||||
let mut matches = Vec::new();
|
let mut matches = Vec::new();
|
||||||
|
@ -368,8 +388,13 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
if let Some(key_sub_path) =
|
if let Some(key_sub_path) =
|
||||||
sub_path.strip_prefix(value_sub_path)
|
sub_path.strip_prefix(value_sub_path)
|
||||||
{
|
{
|
||||||
matches
|
// keys that don't end in a slash can't be mapped to a subpath
|
||||||
.push(format!("{}{}", entry.raw_key, key_sub_path));
|
if entry.raw_key.ends_with('/')
|
||||||
|
|| key_sub_path.is_empty()
|
||||||
|
{
|
||||||
|
matches
|
||||||
|
.push(format!("{}{}", entry.raw_key, key_sub_path));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -413,10 +438,16 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
fn resolve_package_path(
|
fn resolve_package_path(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
|
package_root_folder: &Path,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
let package_json = self
|
let package_json = self
|
||||||
.resolver
|
.resolver
|
||||||
.get_closest_package_json(specifier)
|
.pkg_json_resolver(specifier)
|
||||||
|
// the specifier might have a closer package.json, but we
|
||||||
|
// want the root of the package's package.json
|
||||||
|
.get_closest_package_json_from_file_path(
|
||||||
|
&package_root_folder.join("package.json"),
|
||||||
|
)
|
||||||
.ok()
|
.ok()
|
||||||
.flatten()?;
|
.flatten()?;
|
||||||
let root_folder = package_json.path.parent()?;
|
let root_folder = package_json.path.parent()?;
|
||||||
|
@ -467,7 +498,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
&self,
|
&self,
|
||||||
specifier: &str,
|
specifier: &str,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
|
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
|
||||||
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
|
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
|
||||||
|
@ -481,13 +512,10 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
.as_cli_resolver(Some(&self.file_referrer))
|
.as_cli_resolver(Some(&self.file_referrer))
|
||||||
.resolve(
|
.resolve(
|
||||||
&specifier,
|
&specifier,
|
||||||
&deno_graph::Range {
|
referrer,
|
||||||
specifier: referrer.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
resolution_mode,
|
||||||
end: deno_graph::Position::zeroed(),
|
NodeResolutionKind::Types,
|
||||||
},
|
|
||||||
referrer_kind,
|
|
||||||
ResolutionMode::Types,
|
|
||||||
)
|
)
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
|
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
|
||||||
|
@ -509,20 +537,17 @@ impl<'a> TsResponseImportMapper<'a> {
|
||||||
&self,
|
&self,
|
||||||
specifier_text: &str,
|
specifier_text: &str,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
self
|
self
|
||||||
.resolver
|
.resolver
|
||||||
.as_cli_resolver(Some(&self.file_referrer))
|
.as_cli_resolver(Some(&self.file_referrer))
|
||||||
.resolve(
|
.resolve(
|
||||||
specifier_text,
|
specifier_text,
|
||||||
&deno_graph::Range {
|
referrer,
|
||||||
specifier: referrer.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
resolution_mode,
|
||||||
end: deno_graph::Position::zeroed(),
|
NodeResolutionKind::Types,
|
||||||
},
|
|
||||||
referrer_kind,
|
|
||||||
deno_graph::source::ResolutionMode::Types,
|
|
||||||
)
|
)
|
||||||
.is_ok()
|
.is_ok()
|
||||||
}
|
}
|
||||||
|
@ -589,18 +614,24 @@ fn try_reverse_map_package_json_exports(
|
||||||
/// For a set of tsc changes, can them for any that contain something that looks
|
/// For a set of tsc changes, can them for any that contain something that looks
|
||||||
/// like an import and rewrite the import specifier to include the extension
|
/// like an import and rewrite the import specifier to include the extension
|
||||||
pub fn fix_ts_import_changes(
|
pub fn fix_ts_import_changes(
|
||||||
referrer: &ModuleSpecifier,
|
|
||||||
referrer_kind: NodeModuleKind,
|
|
||||||
changes: &[tsc::FileTextChanges],
|
changes: &[tsc::FileTextChanges],
|
||||||
language_server: &language_server::Inner,
|
language_server: &language_server::Inner,
|
||||||
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
|
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
|
||||||
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
|
|
||||||
let mut r = Vec::new();
|
let mut r = Vec::new();
|
||||||
for change in changes {
|
for change in changes {
|
||||||
|
let Ok(referrer) = ModuleSpecifier::parse(&change.file_name) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let referrer_doc = language_server.get_asset_or_document(&referrer).ok();
|
||||||
|
let resolution_mode = referrer_doc
|
||||||
|
.as_ref()
|
||||||
|
.map(|d| d.resolution_mode())
|
||||||
|
.unwrap_or(ResolutionMode::Import);
|
||||||
|
let import_mapper =
|
||||||
|
language_server.get_ts_response_import_mapper(&referrer);
|
||||||
let mut text_changes = Vec::new();
|
let mut text_changes = Vec::new();
|
||||||
for text_change in &change.text_changes {
|
for text_change in &change.text_changes {
|
||||||
let lines = text_change.new_text.split('\n');
|
let lines = text_change.new_text.split('\n');
|
||||||
|
|
||||||
let new_lines: Vec<String> = lines
|
let new_lines: Vec<String> = lines
|
||||||
.map(|line| {
|
.map(|line| {
|
||||||
// This assumes that there's only one import per line.
|
// This assumes that there's only one import per line.
|
||||||
|
@ -608,7 +639,7 @@ pub fn fix_ts_import_changes(
|
||||||
let specifier =
|
let specifier =
|
||||||
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
|
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
|
||||||
if let Some(new_specifier) = import_mapper
|
if let Some(new_specifier) = import_mapper
|
||||||
.check_unresolved_specifier(specifier, referrer, referrer_kind)
|
.check_unresolved_specifier(specifier, &referrer, resolution_mode)
|
||||||
{
|
{
|
||||||
line.replace(specifier, &new_specifier)
|
line.replace(specifier, &new_specifier)
|
||||||
} else {
|
} else {
|
||||||
|
@ -638,7 +669,7 @@ pub fn fix_ts_import_changes(
|
||||||
/// resolution by Deno (includes the extension).
|
/// resolution by Deno (includes the extension).
|
||||||
fn fix_ts_import_action<'a>(
|
fn fix_ts_import_action<'a>(
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
action: &'a tsc::CodeFixAction,
|
action: &'a tsc::CodeFixAction,
|
||||||
language_server: &language_server::Inner,
|
language_server: &language_server::Inner,
|
||||||
) -> Option<Cow<'a, tsc::CodeFixAction>> {
|
) -> Option<Cow<'a, tsc::CodeFixAction>> {
|
||||||
|
@ -657,9 +688,11 @@ fn fix_ts_import_action<'a>(
|
||||||
return Some(Cow::Borrowed(action));
|
return Some(Cow::Borrowed(action));
|
||||||
};
|
};
|
||||||
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
|
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
|
||||||
if let Some(new_specifier) =
|
if let Some(new_specifier) = import_mapper.check_unresolved_specifier(
|
||||||
import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind)
|
specifier,
|
||||||
{
|
referrer,
|
||||||
|
resolution_mode,
|
||||||
|
) {
|
||||||
let description = action.description.replace(specifier, &new_specifier);
|
let description = action.description.replace(specifier, &new_specifier);
|
||||||
let changes = action
|
let changes = action
|
||||||
.changes
|
.changes
|
||||||
|
@ -689,7 +722,8 @@ fn fix_ts_import_action<'a>(
|
||||||
fix_id: None,
|
fix_id: None,
|
||||||
fix_all_description: None,
|
fix_all_description: None,
|
||||||
}))
|
}))
|
||||||
} else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) {
|
} else if !import_mapper.is_valid_import(specifier, referrer, resolution_mode)
|
||||||
|
{
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(Cow::Borrowed(action))
|
Some(Cow::Borrowed(action))
|
||||||
|
@ -1023,7 +1057,7 @@ impl CodeActionCollection {
|
||||||
pub fn add_ts_fix_action(
|
pub fn add_ts_fix_action(
|
||||||
&mut self,
|
&mut self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
specifier_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
action: &tsc::CodeFixAction,
|
action: &tsc::CodeFixAction,
|
||||||
diagnostic: &lsp::Diagnostic,
|
diagnostic: &lsp::Diagnostic,
|
||||||
language_server: &language_server::Inner,
|
language_server: &language_server::Inner,
|
||||||
|
@ -1042,7 +1076,7 @@ impl CodeActionCollection {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let Some(action) =
|
let Some(action) =
|
||||||
fix_ts_import_action(specifier, specifier_kind, action, language_server)
|
fix_ts_import_action(specifier, resolution_mode, action, language_server)
|
||||||
else {
|
else {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
@ -1237,12 +1271,12 @@ impl CodeActionCollection {
|
||||||
let text_info = parsed_source.text_info_lazy();
|
let text_info = parsed_source.text_info_lazy();
|
||||||
let specifier_range = SourceRange::new(
|
let specifier_range = SourceRange::new(
|
||||||
text_info.loc_to_source_pos(LineAndColumnIndex {
|
text_info.loc_to_source_pos(LineAndColumnIndex {
|
||||||
line_index: import.specifier_range.start.line,
|
line_index: import.specifier_range.range.start.line,
|
||||||
column_index: import.specifier_range.start.character,
|
column_index: import.specifier_range.range.start.character,
|
||||||
}),
|
}),
|
||||||
text_info.loc_to_source_pos(LineAndColumnIndex {
|
text_info.loc_to_source_pos(LineAndColumnIndex {
|
||||||
line_index: import.specifier_range.end.line,
|
line_index: import.specifier_range.range.end.line,
|
||||||
column_index: import.specifier_range.end.character,
|
column_index: import.specifier_range.range.end.character,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1277,16 +1311,14 @@ impl CodeActionCollection {
|
||||||
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
|
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if !i.specifier_range.includes(&position) {
|
if !i.specifier_range.includes(position) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
import_start_from_specifier(document, i)
|
import_start_from_specifier(document, i)
|
||||||
})?;
|
})?;
|
||||||
let referrer = document.specifier();
|
let referrer = document.specifier();
|
||||||
let referrer_kind = language_server
|
let resolution_mode = document.resolution_mode();
|
||||||
.is_cjs_resolver
|
|
||||||
.get_doc_module_kind(document);
|
|
||||||
let file_referrer = document.file_referrer();
|
let file_referrer = document.file_referrer();
|
||||||
let config_data = language_server
|
let config_data = language_server
|
||||||
.config
|
.config
|
||||||
|
@ -1312,7 +1344,7 @@ impl CodeActionCollection {
|
||||||
if !language_server.resolver.is_bare_package_json_dep(
|
if !language_server.resolver.is_bare_package_json_dep(
|
||||||
&dep_key,
|
&dep_key,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
) {
|
) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -1332,7 +1364,7 @@ impl CodeActionCollection {
|
||||||
}
|
}
|
||||||
if language_server
|
if language_server
|
||||||
.resolver
|
.resolver
|
||||||
.npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer)
|
.npm_to_file_url(&npm_ref, referrer, resolution_mode, file_referrer)
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
// The package import has types.
|
// The package import has types.
|
||||||
|
@ -1376,7 +1408,7 @@ impl CodeActionCollection {
|
||||||
character: import_start.column_index as u32,
|
character: import_start.column_index as u32,
|
||||||
};
|
};
|
||||||
let new_text = format!(
|
let new_text = format!(
|
||||||
"{}// @deno-types=\"{}\"\n",
|
"{}// @ts-types=\"{}\"\n",
|
||||||
if position.character == 0 { "" } else { "\n" },
|
if position.character == 0 { "" } else { "\n" },
|
||||||
&types_specifier_text
|
&types_specifier_text
|
||||||
);
|
);
|
||||||
|
@ -1389,7 +1421,7 @@ impl CodeActionCollection {
|
||||||
};
|
};
|
||||||
Some(lsp::CodeAction {
|
Some(lsp::CodeAction {
|
||||||
title: format!(
|
title: format!(
|
||||||
"Add @deno-types directive for \"{}\"",
|
"Add @ts-types directive for \"{}\"",
|
||||||
&types_specifier_text
|
&types_specifier_text
|
||||||
),
|
),
|
||||||
kind: Some(lsp::CodeActionKind::QUICKFIX),
|
kind: Some(lsp::CodeActionKind::QUICKFIX),
|
||||||
|
|
|
@ -9,16 +9,14 @@ use super::jsr::CliJsrSearchApi;
|
||||||
use super::lsp_custom;
|
use super::lsp_custom;
|
||||||
use super::npm::CliNpmSearchApi;
|
use super::npm::CliNpmSearchApi;
|
||||||
use super::registries::ModuleRegistry;
|
use super::registries::ModuleRegistry;
|
||||||
use super::resolver::LspIsCjsResolver;
|
|
||||||
use super::resolver::LspResolver;
|
use super::resolver::LspResolver;
|
||||||
use super::search::PackageSearchApi;
|
use super::search::PackageSearchApi;
|
||||||
use super::tsc;
|
use super::tsc;
|
||||||
|
|
||||||
|
use crate::graph_util::to_node_resolution_mode;
|
||||||
use crate::jsr::JsrFetchResolver;
|
use crate::jsr::JsrFetchResolver;
|
||||||
use crate::util::path::is_importable_ext;
|
use crate::util::path::is_importable_ext;
|
||||||
use crate::util::path::relative_specifier;
|
use crate::util::path::relative_specifier;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_graph::Range;
|
|
||||||
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
|
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
|
||||||
|
|
||||||
use deno_ast::LineAndColumnIndex;
|
use deno_ast::LineAndColumnIndex;
|
||||||
|
@ -36,7 +34,8 @@ use deno_semver::package::PackageNv;
|
||||||
use import_map::ImportMap;
|
use import_map::ImportMap;
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use lsp_types::CompletionList;
|
use lsp_types::CompletionList;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
|
use node_resolver::ResolutionMode;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tower_lsp::lsp_types as lsp;
|
use tower_lsp::lsp_types as lsp;
|
||||||
|
@ -113,7 +112,7 @@ async fn check_auto_config_registry(
|
||||||
/// which we want to ignore when replacing text.
|
/// which we want to ignore when replacing text.
|
||||||
fn to_narrow_lsp_range(
|
fn to_narrow_lsp_range(
|
||||||
text_info: &SourceTextInfo,
|
text_info: &SourceTextInfo,
|
||||||
range: &deno_graph::Range,
|
range: deno_graph::PositionRange,
|
||||||
) -> lsp::Range {
|
) -> lsp::Range {
|
||||||
let end_byte_index = text_info
|
let end_byte_index = text_info
|
||||||
.loc_to_source_pos(LineAndColumnIndex {
|
.loc_to_source_pos(LineAndColumnIndex {
|
||||||
|
@ -161,26 +160,25 @@ pub async fn get_import_completions(
|
||||||
jsr_search_api: &CliJsrSearchApi,
|
jsr_search_api: &CliJsrSearchApi,
|
||||||
npm_search_api: &CliNpmSearchApi,
|
npm_search_api: &CliNpmSearchApi,
|
||||||
documents: &Documents,
|
documents: &Documents,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: &LspResolver,
|
resolver: &LspResolver,
|
||||||
maybe_import_map: Option<&ImportMap>,
|
maybe_import_map: Option<&ImportMap>,
|
||||||
) -> Option<lsp::CompletionResponse> {
|
) -> Option<lsp::CompletionResponse> {
|
||||||
let document = documents.get(specifier)?;
|
let document = documents.get(specifier)?;
|
||||||
let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document);
|
|
||||||
let file_referrer = document.file_referrer();
|
let file_referrer = document.file_referrer();
|
||||||
let (text, _, range) = document.get_maybe_dependency(position)?;
|
let (text, _, graph_range) = document.get_maybe_dependency(position)?;
|
||||||
let range = to_narrow_lsp_range(document.text_info(), &range);
|
let resolution_mode = graph_range
|
||||||
|
.resolution_mode
|
||||||
|
.map(to_node_resolution_mode)
|
||||||
|
.unwrap_or_else(|| document.resolution_mode());
|
||||||
|
let range = to_narrow_lsp_range(document.text_info(), graph_range.range);
|
||||||
let resolved = resolver
|
let resolved = resolver
|
||||||
.as_cli_resolver(file_referrer)
|
.as_cli_resolver(file_referrer)
|
||||||
.resolve(
|
.resolve(
|
||||||
&text,
|
&text,
|
||||||
&Range {
|
specifier,
|
||||||
specifier: specifier.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
resolution_mode,
|
||||||
end: deno_graph::Position::zeroed(),
|
NodeResolutionKind::Execution,
|
||||||
},
|
|
||||||
specifier_kind,
|
|
||||||
ResolutionMode::Execution,
|
|
||||||
)
|
)
|
||||||
.ok();
|
.ok();
|
||||||
if let Some(completion_list) = get_jsr_completions(
|
if let Some(completion_list) = get_jsr_completions(
|
||||||
|
@ -206,7 +204,7 @@ pub async fn get_import_completions(
|
||||||
// completions for import map specifiers
|
// completions for import map specifiers
|
||||||
Some(lsp::CompletionResponse::List(completion_list))
|
Some(lsp::CompletionResponse::List(completion_list))
|
||||||
} else if let Some(completion_list) =
|
} else if let Some(completion_list) =
|
||||||
get_local_completions(specifier, specifier_kind, &text, &range, resolver)
|
get_local_completions(specifier, resolution_mode, &text, &range, resolver)
|
||||||
{
|
{
|
||||||
// completions for local relative modules
|
// completions for local relative modules
|
||||||
Some(lsp::CompletionResponse::List(completion_list))
|
Some(lsp::CompletionResponse::List(completion_list))
|
||||||
|
@ -361,7 +359,7 @@ fn get_import_map_completions(
|
||||||
/// Return local completions that are relative to the base specifier.
|
/// Return local completions that are relative to the base specifier.
|
||||||
fn get_local_completions(
|
fn get_local_completions(
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
text: &str,
|
text: &str,
|
||||||
range: &lsp::Range,
|
range: &lsp::Range,
|
||||||
resolver: &LspResolver,
|
resolver: &LspResolver,
|
||||||
|
@ -374,13 +372,10 @@ fn get_local_completions(
|
||||||
.as_cli_resolver(Some(referrer))
|
.as_cli_resolver(Some(referrer))
|
||||||
.resolve(
|
.resolve(
|
||||||
parent,
|
parent,
|
||||||
&Range {
|
referrer,
|
||||||
specifier: referrer.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
resolution_mode,
|
||||||
end: deno_graph::Position::zeroed(),
|
NodeResolutionKind::Execution,
|
||||||
},
|
|
||||||
referrer_kind,
|
|
||||||
ResolutionMode::Execution,
|
|
||||||
)
|
)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
|
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
|
||||||
|
@ -748,13 +743,16 @@ fn get_node_completions(
|
||||||
}
|
}
|
||||||
let items = SUPPORTED_BUILTIN_NODE_MODULES
|
let items = SUPPORTED_BUILTIN_NODE_MODULES
|
||||||
.iter()
|
.iter()
|
||||||
.map(|name| {
|
.filter_map(|name| {
|
||||||
|
if name.starts_with('_') {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
let specifier = format!("node:{}", name);
|
let specifier = format!("node:{}", name);
|
||||||
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||||
range: *range,
|
range: *range,
|
||||||
new_text: specifier.clone(),
|
new_text: specifier.clone(),
|
||||||
}));
|
}));
|
||||||
lsp::CompletionItem {
|
Some(lsp::CompletionItem {
|
||||||
label: specifier,
|
label: specifier,
|
||||||
kind: Some(lsp::CompletionItemKind::FILE),
|
kind: Some(lsp::CompletionItemKind::FILE),
|
||||||
detail: Some("(node)".to_string()),
|
detail: Some("(node)".to_string()),
|
||||||
|
@ -763,7 +761,7 @@ fn get_node_completions(
|
||||||
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
|
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
|
||||||
),
|
),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Some(CompletionList {
|
Some(CompletionList {
|
||||||
|
@ -831,7 +829,6 @@ mod tests {
|
||||||
use crate::lsp::documents::LanguageId;
|
use crate::lsp::documents::LanguageId;
|
||||||
use crate::lsp::search::tests::TestPackageSearchApi;
|
use crate::lsp::search::tests::TestPackageSearchApi;
|
||||||
use deno_core::resolve_url;
|
use deno_core::resolve_url;
|
||||||
use deno_graph::Range;
|
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use test_util::TempDir;
|
use test_util::TempDir;
|
||||||
|
@ -912,7 +909,7 @@ mod tests {
|
||||||
ModuleSpecifier::from_file_path(file_c).expect("could not create");
|
ModuleSpecifier::from_file_path(file_c).expect("could not create");
|
||||||
let actual = get_local_completions(
|
let actual = get_local_completions(
|
||||||
&specifier,
|
&specifier,
|
||||||
NodeModuleKind::Esm,
|
ResolutionMode::Import,
|
||||||
"./",
|
"./",
|
||||||
&lsp::Range {
|
&lsp::Range {
|
||||||
start: lsp::Position {
|
start: lsp::Position {
|
||||||
|
@ -1608,8 +1605,7 @@ mod tests {
|
||||||
let text_info = SourceTextInfo::from_string(r#""te""#.to_string());
|
let text_info = SourceTextInfo::from_string(r#""te""#.to_string());
|
||||||
let range = to_narrow_lsp_range(
|
let range = to_narrow_lsp_range(
|
||||||
&text_info,
|
&text_info,
|
||||||
&Range {
|
deno_graph::PositionRange {
|
||||||
specifier: ModuleSpecifier::parse("https://deno.land").unwrap(),
|
|
||||||
start: deno_graph::Position {
|
start: deno_graph::Position {
|
||||||
line: 0,
|
line: 0,
|
||||||
character: 0,
|
character: 0,
|
||||||
|
@ -1632,8 +1628,7 @@ mod tests {
|
||||||
let text_info = SourceTextInfo::from_string(r#""te"#.to_string());
|
let text_info = SourceTextInfo::from_string(r#""te"#.to_string());
|
||||||
let range = to_narrow_lsp_range(
|
let range = to_narrow_lsp_range(
|
||||||
&text_info,
|
&text_info,
|
||||||
&Range {
|
deno_graph::PositionRange {
|
||||||
specifier: ModuleSpecifier::parse("https://deno.land").unwrap(),
|
|
||||||
start: deno_graph::Position {
|
start: deno_graph::Position {
|
||||||
line: 0,
|
line: 0,
|
||||||
character: 0,
|
character: 0,
|
||||||
|
|
|
@ -41,6 +41,7 @@ use deno_path_util::url_to_file_path;
|
||||||
use deno_runtime::deno_node::PackageJson;
|
use deno_runtime::deno_node::PackageJson;
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use lsp_types::ClientCapabilities;
|
use lsp_types::ClientCapabilities;
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -62,7 +63,7 @@ use crate::args::ConfigFile;
|
||||||
use crate::args::LintFlags;
|
use crate::args::LintFlags;
|
||||||
use crate::args::LintOptions;
|
use crate::args::LintOptions;
|
||||||
use crate::cache::FastInsecureHasher;
|
use crate::cache::FastInsecureHasher;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::lsp::logging::lsp_warn;
|
use crate::lsp::logging::lsp_warn;
|
||||||
use crate::resolver::CliSloppyImportsResolver;
|
use crate::resolver::CliSloppyImportsResolver;
|
||||||
use crate::resolver::SloppyImportsCachedFs;
|
use crate::resolver::SloppyImportsCachedFs;
|
||||||
|
@ -458,6 +459,19 @@ impl Default for LanguagePreferences {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct SuggestionActionsSettings {
|
||||||
|
#[serde(default = "is_true")]
|
||||||
|
pub enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SuggestionActionsSettings {
|
||||||
|
fn default() -> Self {
|
||||||
|
SuggestionActionsSettings { enabled: true }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct UpdateImportsOnFileMoveOptions {
|
pub struct UpdateImportsOnFileMoveOptions {
|
||||||
|
@ -489,6 +503,8 @@ pub struct LanguageWorkspaceSettings {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub suggest: CompletionSettings,
|
pub suggest: CompletionSettings,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
pub suggestion_actions: SuggestionActionsSettings,
|
||||||
|
#[serde(default)]
|
||||||
pub update_imports_on_file_move: UpdateImportsOnFileMoveOptions,
|
pub update_imports_on_file_move: UpdateImportsOnFileMoveOptions,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1202,7 +1218,7 @@ impl ConfigData {
|
||||||
specified_config: Option<&Path>,
|
specified_config: Option<&Path>,
|
||||||
scope: &ModuleSpecifier,
|
scope: &ModuleSpecifier,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
file_fetcher: &Arc<FileFetcher>,
|
file_fetcher: &Arc<CliFileFetcher>,
|
||||||
// sync requirement is because the lsp requires sync
|
// sync requirement is because the lsp requires sync
|
||||||
cached_deno_config_fs: &(dyn DenoConfigFs + Sync),
|
cached_deno_config_fs: &(dyn DenoConfigFs + Sync),
|
||||||
deno_json_cache: &(dyn DenoJsonCache + Sync),
|
deno_json_cache: &(dyn DenoJsonCache + Sync),
|
||||||
|
@ -1297,7 +1313,7 @@ impl ConfigData {
|
||||||
member_dir: Arc<WorkspaceDirectory>,
|
member_dir: Arc<WorkspaceDirectory>,
|
||||||
scope: Arc<ModuleSpecifier>,
|
scope: Arc<ModuleSpecifier>,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
file_fetcher: Option<&Arc<FileFetcher>>,
|
file_fetcher: Option<&Arc<CliFileFetcher>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let (settings, workspace_folder) = settings.get_for_specifier(&scope);
|
let (settings, workspace_folder) = settings.get_for_specifier(&scope);
|
||||||
let mut watched_files = HashMap::with_capacity(10);
|
let mut watched_files = HashMap::with_capacity(10);
|
||||||
|
@ -1818,7 +1834,7 @@ impl ConfigTree {
|
||||||
&mut self,
|
&mut self,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
workspace_files: &IndexSet<ModuleSpecifier>,
|
workspace_files: &IndexSet<ModuleSpecifier>,
|
||||||
file_fetcher: &Arc<FileFetcher>,
|
file_fetcher: &Arc<CliFileFetcher>,
|
||||||
) {
|
) {
|
||||||
lsp_log!("Refreshing configuration tree...");
|
lsp_log!("Refreshing configuration tree...");
|
||||||
// since we're resolving a workspace multiple times in different
|
// since we're resolving a workspace multiple times in different
|
||||||
|
@ -2092,7 +2108,7 @@ impl<T: Clone> CachedFsItems<T> {
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct InnerData {
|
struct InnerData {
|
||||||
stat_calls: CachedFsItems<deno_config::fs::FsMetadata>,
|
stat_calls: CachedFsItems<deno_config::fs::FsMetadata>,
|
||||||
read_to_string_calls: CachedFsItems<String>,
|
read_to_string_calls: CachedFsItems<Cow<'static, str>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -2113,7 +2129,7 @@ impl DenoConfigFs for CachedDenoConfigFs {
|
||||||
fn read_to_string_lossy(
|
fn read_to_string_lossy(
|
||||||
&self,
|
&self,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
) -> Result<String, std::io::Error> {
|
) -> Result<Cow<'static, str>, std::io::Error> {
|
||||||
self
|
self
|
||||||
.0
|
.0
|
||||||
.lock()
|
.lock()
|
||||||
|
@ -2291,6 +2307,7 @@ mod tests {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
suggestion_actions: SuggestionActionsSettings { enabled: true },
|
||||||
update_imports_on_file_move: UpdateImportsOnFileMoveOptions {
|
update_imports_on_file_move: UpdateImportsOnFileMoveOptions {
|
||||||
enabled: UpdateImportsOnFileMoveEnabled::Prompt
|
enabled: UpdateImportsOnFileMoveEnabled::Prompt
|
||||||
}
|
}
|
||||||
|
@ -2337,6 +2354,7 @@ mod tests {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
suggestion_actions: SuggestionActionsSettings { enabled: true },
|
||||||
update_imports_on_file_move: UpdateImportsOnFileMoveOptions {
|
update_imports_on_file_move: UpdateImportsOnFileMoveOptions {
|
||||||
enabled: UpdateImportsOnFileMoveEnabled::Prompt
|
enabled: UpdateImportsOnFileMoveEnabled::Prompt
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ use crate::resolver::SloppyImportsCachedFs;
|
||||||
use crate::tools::lint::CliLinter;
|
use crate::tools::lint::CliLinter;
|
||||||
use crate::tools::lint::CliLinterOptions;
|
use crate::tools::lint::CliLinterOptions;
|
||||||
use crate::tools::lint::LintRuleProvider;
|
use crate::tools::lint::LintRuleProvider;
|
||||||
|
use crate::tsc::DiagnosticCategory;
|
||||||
use crate::util::path::to_percent_decoded_str;
|
use crate::util::path::to_percent_decoded_str;
|
||||||
|
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
@ -44,8 +45,9 @@ use deno_graph::source::ResolveError;
|
||||||
use deno_graph::Resolution;
|
use deno_graph::Resolution;
|
||||||
use deno_graph::ResolutionError;
|
use deno_graph::ResolutionError;
|
||||||
use deno_graph::SpecifierError;
|
use deno_graph::SpecifierError;
|
||||||
|
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||||
use deno_resolver::sloppy_imports::SloppyImportsResolution;
|
use deno_resolver::sloppy_imports::SloppyImportsResolution;
|
||||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
|
||||||
use deno_runtime::deno_fs;
|
use deno_runtime::deno_fs;
|
||||||
use deno_runtime::deno_node;
|
use deno_runtime::deno_node;
|
||||||
use deno_runtime::tokio_util::create_basic_runtime;
|
use deno_runtime::tokio_util::create_basic_runtime;
|
||||||
|
@ -833,7 +835,7 @@ fn generate_lint_diagnostics(
|
||||||
lint_rule_provider.resolve_lint_rules(Default::default(), None)
|
lint_rule_provider.resolve_lint_rules(Default::default(), None)
|
||||||
},
|
},
|
||||||
fix: false,
|
fix: false,
|
||||||
deno_lint_config: deno_lint::linter::LintConfig {
|
deno_lint_config: DenoLintConfig {
|
||||||
default_jsx_factory: None,
|
default_jsx_factory: None,
|
||||||
default_jsx_fragment_factory: None,
|
default_jsx_fragment_factory: None,
|
||||||
},
|
},
|
||||||
|
@ -906,8 +908,22 @@ async fn generate_ts_diagnostics(
|
||||||
} else {
|
} else {
|
||||||
Default::default()
|
Default::default()
|
||||||
};
|
};
|
||||||
for (specifier_str, ts_json_diagnostics) in ts_diagnostics_map {
|
for (specifier_str, mut ts_json_diagnostics) in ts_diagnostics_map {
|
||||||
let specifier = resolve_url(&specifier_str)?;
|
let specifier = resolve_url(&specifier_str)?;
|
||||||
|
let suggestion_actions_settings = snapshot
|
||||||
|
.config
|
||||||
|
.language_settings_for_specifier(&specifier)
|
||||||
|
.map(|s| s.suggestion_actions.clone())
|
||||||
|
.unwrap_or_default();
|
||||||
|
if !suggestion_actions_settings.enabled {
|
||||||
|
ts_json_diagnostics.retain(|d| {
|
||||||
|
d.category != DiagnosticCategory::Suggestion
|
||||||
|
// Still show deprecated and unused diagnostics.
|
||||||
|
// https://github.com/microsoft/vscode/blob/ce50bd4876af457f64d83cfd956bc916535285f4/extensions/typescript-language-features/src/languageFeatures/diagnostics.ts#L113-L114
|
||||||
|
|| d.reports_deprecated == Some(true)
|
||||||
|
|| d.reports_unnecessary == Some(true)
|
||||||
|
});
|
||||||
|
}
|
||||||
let version = snapshot
|
let version = snapshot
|
||||||
.documents
|
.documents
|
||||||
.get(&specifier)
|
.get(&specifier)
|
||||||
|
@ -1262,11 +1278,11 @@ impl DenoDiagnostic {
|
||||||
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
|
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
|
||||||
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
|
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
|
||||||
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
||||||
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("npm package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
||||||
Self::NoLocal(specifier) => {
|
Self::NoLocal(specifier) => {
|
||||||
let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
|
let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
|
||||||
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
|
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
|
||||||
).resolve(specifier, SloppyImportsResolutionMode::Execution);
|
).resolve(specifier, SloppyImportsResolutionKind::Execution);
|
||||||
let data = maybe_sloppy_resolution.as_ref().map(|res| {
|
let data = maybe_sloppy_resolution.as_ref().map(|res| {
|
||||||
json!({
|
json!({
|
||||||
"specifier": specifier,
|
"specifier": specifier,
|
||||||
|
@ -1355,7 +1371,7 @@ fn diagnose_resolution(
|
||||||
}
|
}
|
||||||
// don't bother warning about sloppy import redirects from .js to .d.ts
|
// don't bother warning about sloppy import redirects from .js to .d.ts
|
||||||
// because explaining how to fix this via a diagnostic involves using
|
// because explaining how to fix this via a diagnostic involves using
|
||||||
// @deno-types and that's a bit complicated to explain
|
// @ts-types and that's a bit complicated to explain
|
||||||
let is_sloppy_import_dts_redirect = doc_specifier.scheme() == "file"
|
let is_sloppy_import_dts_redirect = doc_specifier.scheme() == "file"
|
||||||
&& doc.media_type().is_declaration()
|
&& doc.media_type().is_declaration()
|
||||||
&& !MediaType::from_specifier(specifier).is_declaration();
|
&& !MediaType::from_specifier(specifier).is_declaration();
|
||||||
|
@ -1523,7 +1539,7 @@ fn diagnose_dependency(
|
||||||
.iter()
|
.iter()
|
||||||
.map(|i| documents::to_lsp_range(&i.specifier_range))
|
.map(|i| documents::to_lsp_range(&i.specifier_range))
|
||||||
.collect();
|
.collect();
|
||||||
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has
|
// TODO(nayeemrmn): This is a crude way of detecting `@ts-types` which has
|
||||||
// a different specifier and therefore needs a separate call to
|
// a different specifier and therefore needs a separate call to
|
||||||
// `diagnose_resolution()`. It would be much cleaner if that were modelled as
|
// `diagnose_resolution()`. It would be much cleaner if that were modelled as
|
||||||
// a separate dependency: https://github.com/denoland/deno_graph/issues/247.
|
// a separate dependency: https://github.com/denoland/deno_graph/issues/247.
|
||||||
|
@ -1531,7 +1547,7 @@ fn diagnose_dependency(
|
||||||
&& !dependency.imports.iter().any(|i| {
|
&& !dependency.imports.iter().any(|i| {
|
||||||
dependency
|
dependency
|
||||||
.maybe_type
|
.maybe_type
|
||||||
.includes(&i.specifier_range.start)
|
.includes(i.specifier_range.range.start)
|
||||||
.is_some()
|
.is_some()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1540,7 +1556,7 @@ fn diagnose_dependency(
|
||||||
snapshot,
|
snapshot,
|
||||||
dependency_key,
|
dependency_key,
|
||||||
if dependency.maybe_code.is_none()
|
if dependency.maybe_code.is_none()
|
||||||
// If not @deno-types, diagnose the types if the code errored because
|
// If not @ts-types, diagnose the types if the code errored because
|
||||||
// it's likely resolving into the node_modules folder, which might be
|
// it's likely resolving into the node_modules folder, which might be
|
||||||
// erroring correctly due to resolution only being for bundlers. Let this
|
// erroring correctly due to resolution only being for bundlers. Let this
|
||||||
// fail at runtime if necessary, but don't bother erroring in the editor
|
// fail at runtime if necessary, but don't bother erroring in the editor
|
||||||
|
@ -1707,7 +1723,6 @@ mod tests {
|
||||||
documents: Arc::new(documents),
|
documents: Arc::new(documents),
|
||||||
assets: Default::default(),
|
assets: Default::default(),
|
||||||
config: Arc::new(config),
|
config: Arc::new(config),
|
||||||
is_cjs_resolver: Default::default(),
|
|
||||||
resolver,
|
resolver,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1952,7 +1967,7 @@ let c: number = "a";
|
||||||
&[(
|
&[(
|
||||||
"a.ts",
|
"a.ts",
|
||||||
r#"
|
r#"
|
||||||
// @deno-types="bad.d.ts"
|
// @ts-types="bad.d.ts"
|
||||||
import "bad.js";
|
import "bad.js";
|
||||||
import "bad.js";
|
import "bad.js";
|
||||||
"#,
|
"#,
|
||||||
|
@ -2006,11 +2021,11 @@ let c: number = "a";
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
"line": 1,
|
"line": 1,
|
||||||
"character": 23
|
"character": 21
|
||||||
},
|
},
|
||||||
"end": {
|
"end": {
|
||||||
"line": 1,
|
"line": 1,
|
||||||
"character": 33
|
"character": 31
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"severity": 1,
|
"severity": 1,
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
use super::cache::calculate_fs_version;
|
use super::cache::calculate_fs_version;
|
||||||
use super::cache::LspCache;
|
use super::cache::LspCache;
|
||||||
use super::config::Config;
|
use super::config::Config;
|
||||||
use super::resolver::LspIsCjsResolver;
|
|
||||||
use super::resolver::LspResolver;
|
use super::resolver::LspResolver;
|
||||||
use super::resolver::ScopeDepInfo;
|
use super::resolver::ScopeDepInfo;
|
||||||
use super::resolver::SingleReferrerGraphResolver;
|
use super::resolver::SingleReferrerGraphResolver;
|
||||||
|
@ -27,7 +26,6 @@ use deno_core::futures::future::Shared;
|
||||||
use deno_core::futures::FutureExt;
|
use deno_core::futures::FutureExt;
|
||||||
use deno_core::parking_lot::Mutex;
|
use deno_core::parking_lot::Mutex;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_graph::Resolution;
|
use deno_graph::Resolution;
|
||||||
use deno_path_util::url_to_file_path;
|
use deno_path_util::url_to_file_path;
|
||||||
use deno_runtime::deno_node;
|
use deno_runtime::deno_node;
|
||||||
|
@ -36,7 +34,8 @@ use deno_semver::npm::NpmPackageReqReference;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
|
use node_resolver::ResolutionMode;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -66,6 +65,12 @@ pub enum LanguageId {
|
||||||
Html,
|
Html,
|
||||||
Css,
|
Css,
|
||||||
Yaml,
|
Yaml,
|
||||||
|
Sql,
|
||||||
|
Svelte,
|
||||||
|
Vue,
|
||||||
|
Astro,
|
||||||
|
Vento,
|
||||||
|
Nunjucks,
|
||||||
Unknown,
|
Unknown,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,6 +87,12 @@ impl LanguageId {
|
||||||
LanguageId::Html => Some("html"),
|
LanguageId::Html => Some("html"),
|
||||||
LanguageId::Css => Some("css"),
|
LanguageId::Css => Some("css"),
|
||||||
LanguageId::Yaml => Some("yaml"),
|
LanguageId::Yaml => Some("yaml"),
|
||||||
|
LanguageId::Sql => Some("sql"),
|
||||||
|
LanguageId::Svelte => Some("svelte"),
|
||||||
|
LanguageId::Vue => Some("vue"),
|
||||||
|
LanguageId::Astro => Some("astro"),
|
||||||
|
LanguageId::Vento => Some("vto"),
|
||||||
|
LanguageId::Nunjucks => Some("njk"),
|
||||||
LanguageId::Unknown => None,
|
LanguageId::Unknown => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -97,6 +108,12 @@ impl LanguageId {
|
||||||
LanguageId::Html => Some("text/html"),
|
LanguageId::Html => Some("text/html"),
|
||||||
LanguageId::Css => Some("text/css"),
|
LanguageId::Css => Some("text/css"),
|
||||||
LanguageId::Yaml => Some("application/yaml"),
|
LanguageId::Yaml => Some("application/yaml"),
|
||||||
|
LanguageId::Sql => None,
|
||||||
|
LanguageId::Svelte => None,
|
||||||
|
LanguageId::Vue => None,
|
||||||
|
LanguageId::Astro => None,
|
||||||
|
LanguageId::Vento => None,
|
||||||
|
LanguageId::Nunjucks => None,
|
||||||
LanguageId::Unknown => None,
|
LanguageId::Unknown => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -124,6 +141,12 @@ impl FromStr for LanguageId {
|
||||||
"html" => Ok(Self::Html),
|
"html" => Ok(Self::Html),
|
||||||
"css" => Ok(Self::Css),
|
"css" => Ok(Self::Css),
|
||||||
"yaml" => Ok(Self::Yaml),
|
"yaml" => Ok(Self::Yaml),
|
||||||
|
"sql" => Ok(Self::Sql),
|
||||||
|
"svelte" => Ok(Self::Svelte),
|
||||||
|
"vue" => Ok(Self::Vue),
|
||||||
|
"astro" => Ok(Self::Astro),
|
||||||
|
"vento" => Ok(Self::Vento),
|
||||||
|
"nunjucks" => Ok(Self::Nunjucks),
|
||||||
_ => Ok(Self::Unknown),
|
_ => Ok(Self::Unknown),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -228,6 +251,13 @@ impl AssetOrDocument {
|
||||||
pub fn document_lsp_version(&self) -> Option<i32> {
|
pub fn document_lsp_version(&self) -> Option<i32> {
|
||||||
self.document().and_then(|d| d.maybe_lsp_version())
|
self.document().and_then(|d| d.maybe_lsp_version())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn resolution_mode(&self) -> ResolutionMode {
|
||||||
|
match self {
|
||||||
|
AssetOrDocument::Asset(_) => ResolutionMode::Import,
|
||||||
|
AssetOrDocument::Document(d) => d.resolution_mode(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ModuleResult = Result<deno_graph::JsModule, deno_graph::ModuleGraphError>;
|
type ModuleResult = Result<deno_graph::JsModule, deno_graph::ModuleGraphError>;
|
||||||
|
@ -313,6 +343,7 @@ pub struct Document {
|
||||||
media_type: MediaType,
|
media_type: MediaType,
|
||||||
/// Present if and only if this is an open document.
|
/// Present if and only if this is an open document.
|
||||||
open_data: Option<DocumentOpenData>,
|
open_data: Option<DocumentOpenData>,
|
||||||
|
resolution_mode: ResolutionMode,
|
||||||
resolver: Arc<LspResolver>,
|
resolver: Arc<LspResolver>,
|
||||||
specifier: ModuleSpecifier,
|
specifier: ModuleSpecifier,
|
||||||
text: Arc<str>,
|
text: Arc<str>,
|
||||||
|
@ -328,7 +359,6 @@ impl Document {
|
||||||
maybe_lsp_version: Option<i32>,
|
maybe_lsp_version: Option<i32>,
|
||||||
maybe_language_id: Option<LanguageId>,
|
maybe_language_id: Option<LanguageId>,
|
||||||
maybe_headers: Option<HashMap<String, String>>,
|
maybe_headers: Option<HashMap<String, String>>,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: Arc<LspResolver>,
|
resolver: Arc<LspResolver>,
|
||||||
config: Arc<Config>,
|
config: Arc<Config>,
|
||||||
cache: &Arc<LspCache>,
|
cache: &Arc<LspCache>,
|
||||||
|
@ -340,7 +370,7 @@ impl Document {
|
||||||
.or(file_referrer);
|
.or(file_referrer);
|
||||||
let media_type =
|
let media_type =
|
||||||
resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id);
|
resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id);
|
||||||
let (maybe_parsed_source, maybe_module) =
|
let (maybe_parsed_source, maybe_module, resolution_mode) =
|
||||||
if media_type_is_diagnosable(media_type) {
|
if media_type_is_diagnosable(media_type) {
|
||||||
parse_and_analyze_module(
|
parse_and_analyze_module(
|
||||||
specifier.clone(),
|
specifier.clone(),
|
||||||
|
@ -348,11 +378,10 @@ impl Document {
|
||||||
maybe_headers.as_ref(),
|
maybe_headers.as_ref(),
|
||||||
media_type,
|
media_type,
|
||||||
file_referrer.as_ref(),
|
file_referrer.as_ref(),
|
||||||
is_cjs_resolver,
|
|
||||||
&resolver,
|
&resolver,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
(None, None)
|
(None, None, ResolutionMode::Import)
|
||||||
};
|
};
|
||||||
let maybe_module = maybe_module.and_then(Result::ok);
|
let maybe_module = maybe_module.and_then(Result::ok);
|
||||||
let dependencies = maybe_module
|
let dependencies = maybe_module
|
||||||
|
@ -387,6 +416,7 @@ impl Document {
|
||||||
maybe_parsed_source,
|
maybe_parsed_source,
|
||||||
maybe_semantic_tokens: Default::default(),
|
maybe_semantic_tokens: Default::default(),
|
||||||
}),
|
}),
|
||||||
|
resolution_mode,
|
||||||
resolver,
|
resolver,
|
||||||
specifier,
|
specifier,
|
||||||
text,
|
text,
|
||||||
|
@ -396,7 +426,6 @@ impl Document {
|
||||||
|
|
||||||
fn with_new_config(
|
fn with_new_config(
|
||||||
&self,
|
&self,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: Arc<LspResolver>,
|
resolver: Arc<LspResolver>,
|
||||||
config: Arc<Config>,
|
config: Arc<Config>,
|
||||||
) -> Arc<Self> {
|
) -> Arc<Self> {
|
||||||
|
@ -408,20 +437,20 @@ impl Document {
|
||||||
let dependencies;
|
let dependencies;
|
||||||
let maybe_types_dependency;
|
let maybe_types_dependency;
|
||||||
let maybe_parsed_source;
|
let maybe_parsed_source;
|
||||||
|
let found_resolution_mode;
|
||||||
let is_script;
|
let is_script;
|
||||||
let maybe_test_module_fut;
|
let maybe_test_module_fut;
|
||||||
if media_type != self.media_type {
|
if media_type != self.media_type {
|
||||||
let parsed_source_result =
|
let parsed_source_result =
|
||||||
parse_source(self.specifier.clone(), self.text.clone(), media_type);
|
parse_source(self.specifier.clone(), self.text.clone(), media_type);
|
||||||
let maybe_module = analyze_module(
|
let (maybe_module_result, resolution_mode) = analyze_module(
|
||||||
self.specifier.clone(),
|
self.specifier.clone(),
|
||||||
&parsed_source_result,
|
&parsed_source_result,
|
||||||
self.maybe_headers.as_ref(),
|
self.maybe_headers.as_ref(),
|
||||||
self.file_referrer.as_ref(),
|
self.file_referrer.as_ref(),
|
||||||
is_cjs_resolver,
|
|
||||||
&resolver,
|
&resolver,
|
||||||
)
|
);
|
||||||
.ok();
|
let maybe_module = maybe_module_result.ok();
|
||||||
dependencies = maybe_module
|
dependencies = maybe_module
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|m| Arc::new(m.dependencies.clone()))
|
.map(|m| Arc::new(m.dependencies.clone()))
|
||||||
|
@ -433,17 +462,21 @@ impl Document {
|
||||||
maybe_parsed_source = Some(parsed_source_result);
|
maybe_parsed_source = Some(parsed_source_result);
|
||||||
maybe_test_module_fut =
|
maybe_test_module_fut =
|
||||||
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
|
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
|
||||||
|
found_resolution_mode = resolution_mode;
|
||||||
} else {
|
} else {
|
||||||
let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
|
let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
|
||||||
|
let is_cjs_resolver =
|
||||||
|
resolver.as_is_cjs_resolver(self.file_referrer.as_ref());
|
||||||
let npm_resolver =
|
let npm_resolver =
|
||||||
resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
|
resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
|
||||||
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
|
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
|
||||||
let jsx_import_source_config =
|
let jsx_import_source_config =
|
||||||
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
||||||
|
found_resolution_mode = is_cjs_resolver
|
||||||
|
.get_lsp_resolution_mode(&self.specifier, self.is_script);
|
||||||
let resolver = SingleReferrerGraphResolver {
|
let resolver = SingleReferrerGraphResolver {
|
||||||
valid_referrer: &self.specifier,
|
valid_referrer: &self.specifier,
|
||||||
referrer_kind: is_cjs_resolver
|
module_resolution_mode: found_resolution_mode,
|
||||||
.get_lsp_referrer_kind(&self.specifier, self.is_script),
|
|
||||||
cli_resolver,
|
cli_resolver,
|
||||||
jsx_import_source_config: jsx_import_source_config.as_ref(),
|
jsx_import_source_config: jsx_import_source_config.as_ref(),
|
||||||
};
|
};
|
||||||
|
@ -493,6 +526,7 @@ impl Document {
|
||||||
maybe_language_id: self.maybe_language_id,
|
maybe_language_id: self.maybe_language_id,
|
||||||
maybe_test_module_fut,
|
maybe_test_module_fut,
|
||||||
media_type,
|
media_type,
|
||||||
|
resolution_mode: found_resolution_mode,
|
||||||
open_data: self.open_data.as_ref().map(|d| DocumentOpenData {
|
open_data: self.open_data.as_ref().map(|d| DocumentOpenData {
|
||||||
lsp_version: d.lsp_version,
|
lsp_version: d.lsp_version,
|
||||||
maybe_parsed_source,
|
maybe_parsed_source,
|
||||||
|
@ -508,7 +542,6 @@ impl Document {
|
||||||
|
|
||||||
fn with_change(
|
fn with_change(
|
||||||
&self,
|
&self,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
version: i32,
|
version: i32,
|
||||||
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
||||||
) -> Result<Arc<Self>, AnyError> {
|
) -> Result<Arc<Self>, AnyError> {
|
||||||
|
@ -530,7 +563,7 @@ impl Document {
|
||||||
}
|
}
|
||||||
let text: Arc<str> = content.into();
|
let text: Arc<str> = content.into();
|
||||||
let media_type = self.media_type;
|
let media_type = self.media_type;
|
||||||
let (maybe_parsed_source, maybe_module) = if self
|
let (maybe_parsed_source, maybe_module, resolution_mode) = if self
|
||||||
.maybe_language_id
|
.maybe_language_id
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|li| li.is_diagnosable())
|
.map(|li| li.is_diagnosable())
|
||||||
|
@ -542,11 +575,10 @@ impl Document {
|
||||||
self.maybe_headers.as_ref(),
|
self.maybe_headers.as_ref(),
|
||||||
media_type,
|
media_type,
|
||||||
self.file_referrer.as_ref(),
|
self.file_referrer.as_ref(),
|
||||||
is_cjs_resolver,
|
|
||||||
self.resolver.as_ref(),
|
self.resolver.as_ref(),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
(None, None)
|
(None, None, ResolutionMode::Import)
|
||||||
};
|
};
|
||||||
let maybe_module = maybe_module.and_then(Result::ok);
|
let maybe_module = maybe_module.and_then(Result::ok);
|
||||||
let dependencies = maybe_module
|
let dependencies = maybe_module
|
||||||
|
@ -580,6 +612,7 @@ impl Document {
|
||||||
maybe_navigation_tree: Mutex::new(None),
|
maybe_navigation_tree: Mutex::new(None),
|
||||||
maybe_test_module_fut,
|
maybe_test_module_fut,
|
||||||
media_type,
|
media_type,
|
||||||
|
resolution_mode,
|
||||||
open_data: self.open_data.is_some().then_some(DocumentOpenData {
|
open_data: self.open_data.is_some().then_some(DocumentOpenData {
|
||||||
lsp_version: version,
|
lsp_version: version,
|
||||||
maybe_parsed_source,
|
maybe_parsed_source,
|
||||||
|
@ -613,6 +646,7 @@ impl Document {
|
||||||
maybe_test_module_fut: self.maybe_test_module_fut.clone(),
|
maybe_test_module_fut: self.maybe_test_module_fut.clone(),
|
||||||
media_type: self.media_type,
|
media_type: self.media_type,
|
||||||
open_data: None,
|
open_data: None,
|
||||||
|
resolution_mode: self.resolution_mode,
|
||||||
resolver: self.resolver.clone(),
|
resolver: self.resolver.clone(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -641,6 +675,7 @@ impl Document {
|
||||||
maybe_test_module_fut: self.maybe_test_module_fut.clone(),
|
maybe_test_module_fut: self.maybe_test_module_fut.clone(),
|
||||||
media_type: self.media_type,
|
media_type: self.media_type,
|
||||||
open_data: self.open_data.clone(),
|
open_data: self.open_data.clone(),
|
||||||
|
resolution_mode: self.resolution_mode,
|
||||||
resolver: self.resolver.clone(),
|
resolver: self.resolver.clone(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -664,6 +699,10 @@ impl Document {
|
||||||
&self.text
|
&self.text
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn resolution_mode(&self) -> ResolutionMode {
|
||||||
|
self.resolution_mode
|
||||||
|
}
|
||||||
|
|
||||||
pub fn text_info(&self) -> &SourceTextInfo {
|
pub fn text_info(&self) -> &SourceTextInfo {
|
||||||
// try to get the text info from the parsed source and if
|
// try to get the text info from the parsed source and if
|
||||||
// not then create one in the cell
|
// not then create one in the cell
|
||||||
|
@ -677,14 +716,6 @@ impl Document {
|
||||||
.get_or_init(|| SourceTextInfo::new(self.text.clone()))
|
.get_or_init(|| SourceTextInfo::new(self.text.clone()))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If this is maybe a CJS script and maybe not an ES module.
|
|
||||||
///
|
|
||||||
/// Use `LspIsCjsResolver` to determine for sure.
|
|
||||||
pub fn is_script(&self) -> Option<bool> {
|
|
||||||
self.is_script
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn line_index(&self) -> Arc<LineIndex> {
|
pub fn line_index(&self) -> Arc<LineIndex> {
|
||||||
self.line_index.clone()
|
self.line_index.clone()
|
||||||
}
|
}
|
||||||
|
@ -768,7 +799,7 @@ impl Document {
|
||||||
};
|
};
|
||||||
self.dependencies().iter().find_map(|(s, dep)| {
|
self.dependencies().iter().find_map(|(s, dep)| {
|
||||||
dep
|
dep
|
||||||
.includes(&position)
|
.includes(position)
|
||||||
.map(|r| (s.clone(), dep.clone(), r.clone()))
|
.map(|r| (s.clone(), dep.clone(), r.clone()))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -809,15 +840,15 @@ fn resolve_media_type(
|
||||||
MediaType::from_specifier(specifier)
|
MediaType::from_specifier(specifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_lsp_range(range: &deno_graph::Range) -> lsp::Range {
|
pub fn to_lsp_range(referrer: &deno_graph::Range) -> lsp::Range {
|
||||||
lsp::Range {
|
lsp::Range {
|
||||||
start: lsp::Position {
|
start: lsp::Position {
|
||||||
line: range.start.line as u32,
|
line: referrer.range.start.line as u32,
|
||||||
character: range.start.character as u32,
|
character: referrer.range.start.character as u32,
|
||||||
},
|
},
|
||||||
end: lsp::Position {
|
end: lsp::Position {
|
||||||
line: range.end.line as u32,
|
line: referrer.range.end.line as u32,
|
||||||
character: range.end.character as u32,
|
character: referrer.range.end.character as u32,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -832,7 +863,6 @@ impl FileSystemDocuments {
|
||||||
pub fn get(
|
pub fn get(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: &Arc<LspResolver>,
|
resolver: &Arc<LspResolver>,
|
||||||
config: &Arc<Config>,
|
config: &Arc<Config>,
|
||||||
cache: &Arc<LspCache>,
|
cache: &Arc<LspCache>,
|
||||||
|
@ -856,14 +886,7 @@ impl FileSystemDocuments {
|
||||||
};
|
};
|
||||||
if dirty {
|
if dirty {
|
||||||
// attempt to update the file on the file system
|
// attempt to update the file on the file system
|
||||||
self.refresh_document(
|
self.refresh_document(specifier, resolver, config, cache, file_referrer)
|
||||||
specifier,
|
|
||||||
is_cjs_resolver,
|
|
||||||
resolver,
|
|
||||||
config,
|
|
||||||
cache,
|
|
||||||
file_referrer,
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
old_doc
|
old_doc
|
||||||
}
|
}
|
||||||
|
@ -874,7 +897,6 @@ impl FileSystemDocuments {
|
||||||
fn refresh_document(
|
fn refresh_document(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: &Arc<LspResolver>,
|
resolver: &Arc<LspResolver>,
|
||||||
config: &Arc<Config>,
|
config: &Arc<Config>,
|
||||||
cache: &Arc<LspCache>,
|
cache: &Arc<LspCache>,
|
||||||
|
@ -896,7 +918,6 @@ impl FileSystemDocuments {
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
is_cjs_resolver,
|
|
||||||
resolver.clone(),
|
resolver.clone(),
|
||||||
config.clone(),
|
config.clone(),
|
||||||
cache,
|
cache,
|
||||||
|
@ -913,7 +934,6 @@ impl FileSystemDocuments {
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
is_cjs_resolver,
|
|
||||||
resolver.clone(),
|
resolver.clone(),
|
||||||
config.clone(),
|
config.clone(),
|
||||||
cache,
|
cache,
|
||||||
|
@ -936,7 +956,7 @@ impl FileSystemDocuments {
|
||||||
let content = bytes_to_content(
|
let content = bytes_to_content(
|
||||||
specifier,
|
specifier,
|
||||||
media_type,
|
media_type,
|
||||||
cached_file.content,
|
cached_file.content.into_owned(),
|
||||||
maybe_charset,
|
maybe_charset,
|
||||||
)
|
)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
@ -946,7 +966,6 @@ impl FileSystemDocuments {
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
Some(cached_file.metadata.headers),
|
Some(cached_file.metadata.headers),
|
||||||
is_cjs_resolver,
|
|
||||||
resolver.clone(),
|
resolver.clone(),
|
||||||
config.clone(),
|
config.clone(),
|
||||||
cache,
|
cache,
|
||||||
|
@ -987,8 +1006,6 @@ pub struct Documents {
|
||||||
/// The DENO_DIR that the documents looks for non-file based modules.
|
/// The DENO_DIR that the documents looks for non-file based modules.
|
||||||
cache: Arc<LspCache>,
|
cache: Arc<LspCache>,
|
||||||
config: Arc<Config>,
|
config: Arc<Config>,
|
||||||
/// Resolver for detecting if a document is CJS or ESM.
|
|
||||||
is_cjs_resolver: Arc<LspIsCjsResolver>,
|
|
||||||
/// A resolver that takes into account currently loaded import map and JSX
|
/// A resolver that takes into account currently loaded import map and JSX
|
||||||
/// settings.
|
/// settings.
|
||||||
resolver: Arc<LspResolver>,
|
resolver: Arc<LspResolver>,
|
||||||
|
@ -1024,7 +1041,6 @@ impl Documents {
|
||||||
// the cache for remote modules here in order to get the
|
// the cache for remote modules here in order to get the
|
||||||
// x-typescript-types?
|
// x-typescript-types?
|
||||||
None,
|
None,
|
||||||
&self.is_cjs_resolver,
|
|
||||||
self.resolver.clone(),
|
self.resolver.clone(),
|
||||||
self.config.clone(),
|
self.config.clone(),
|
||||||
&self.cache,
|
&self.cache,
|
||||||
|
@ -1059,7 +1075,7 @@ impl Documents {
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
self.dirty = true;
|
self.dirty = true;
|
||||||
let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?;
|
let doc = doc.with_change(version, changes)?;
|
||||||
self.open_docs.insert(doc.specifier().clone(), doc.clone());
|
self.open_docs.insert(doc.specifier().clone(), doc.clone());
|
||||||
Ok(doc)
|
Ok(doc)
|
||||||
}
|
}
|
||||||
|
@ -1191,7 +1207,6 @@ impl Documents {
|
||||||
if let Some(old_doc) = old_doc {
|
if let Some(old_doc) = old_doc {
|
||||||
self.file_system_docs.get(
|
self.file_system_docs.get(
|
||||||
specifier,
|
specifier,
|
||||||
&self.is_cjs_resolver,
|
|
||||||
&self.resolver,
|
&self.resolver,
|
||||||
&self.config,
|
&self.config,
|
||||||
&self.cache,
|
&self.cache,
|
||||||
|
@ -1216,7 +1231,6 @@ impl Documents {
|
||||||
} else {
|
} else {
|
||||||
self.file_system_docs.get(
|
self.file_system_docs.get(
|
||||||
&specifier,
|
&specifier,
|
||||||
&self.is_cjs_resolver,
|
|
||||||
&self.resolver,
|
&self.resolver,
|
||||||
&self.config,
|
&self.config,
|
||||||
&self.cache,
|
&self.cache,
|
||||||
|
@ -1271,7 +1285,8 @@ impl Documents {
|
||||||
/// tsc when type checking.
|
/// tsc when type checking.
|
||||||
pub fn resolve(
|
pub fn resolve(
|
||||||
&self,
|
&self,
|
||||||
raw_specifiers: &[String],
|
// (is_cjs: bool, raw_specifier: String)
|
||||||
|
raw_specifiers: &[(bool, String)],
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
file_referrer: Option<&ModuleSpecifier>,
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
||||||
|
@ -1281,11 +1296,12 @@ impl Documents {
|
||||||
.and_then(|d| d.file_referrer())
|
.and_then(|d| d.file_referrer())
|
||||||
.or(file_referrer);
|
.or(file_referrer);
|
||||||
let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
|
let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
|
||||||
let referrer_kind = self
|
|
||||||
.is_cjs_resolver
|
|
||||||
.get_maybe_doc_module_kind(referrer, referrer_doc.as_deref());
|
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
for raw_specifier in raw_specifiers {
|
for (is_cjs, raw_specifier) in raw_specifiers {
|
||||||
|
let resolution_mode = match is_cjs {
|
||||||
|
true => ResolutionMode::Require,
|
||||||
|
false => ResolutionMode::Import,
|
||||||
|
};
|
||||||
if raw_specifier.starts_with("asset:") {
|
if raw_specifier.starts_with("asset:") {
|
||||||
if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
|
if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
|
||||||
let media_type = MediaType::from_specifier(&specifier);
|
let media_type = MediaType::from_specifier(&specifier);
|
||||||
|
@ -1300,14 +1316,14 @@ impl Documents {
|
||||||
results.push(self.resolve_dependency(
|
results.push(self.resolve_dependency(
|
||||||
specifier,
|
specifier,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
file_referrer,
|
file_referrer,
|
||||||
));
|
));
|
||||||
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
|
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
|
||||||
results.push(self.resolve_dependency(
|
results.push(self.resolve_dependency(
|
||||||
specifier,
|
specifier,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
file_referrer,
|
file_referrer,
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
|
@ -1316,19 +1332,16 @@ impl Documents {
|
||||||
} else if let Ok(specifier) =
|
} else if let Ok(specifier) =
|
||||||
self.resolver.as_cli_resolver(file_referrer).resolve(
|
self.resolver.as_cli_resolver(file_referrer).resolve(
|
||||||
raw_specifier,
|
raw_specifier,
|
||||||
&deno_graph::Range {
|
referrer,
|
||||||
specifier: referrer.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
resolution_mode,
|
||||||
end: deno_graph::Position::zeroed(),
|
NodeResolutionKind::Types,
|
||||||
},
|
|
||||||
referrer_kind,
|
|
||||||
ResolutionMode::Types,
|
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
results.push(self.resolve_dependency(
|
results.push(self.resolve_dependency(
|
||||||
&specifier,
|
&specifier,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
file_referrer,
|
file_referrer,
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
|
@ -1347,7 +1360,6 @@ impl Documents {
|
||||||
) {
|
) {
|
||||||
self.config = Arc::new(config.clone());
|
self.config = Arc::new(config.clone());
|
||||||
self.cache = Arc::new(cache.clone());
|
self.cache = Arc::new(cache.clone());
|
||||||
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache));
|
|
||||||
self.resolver = resolver.clone();
|
self.resolver = resolver.clone();
|
||||||
|
|
||||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||||
|
@ -1371,21 +1383,14 @@ impl Documents {
|
||||||
if !config.specifier_enabled(doc.specifier()) {
|
if !config.specifier_enabled(doc.specifier()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
*doc = doc.with_new_config(
|
*doc = doc.with_new_config(self.resolver.clone(), self.config.clone());
|
||||||
&self.is_cjs_resolver,
|
|
||||||
self.resolver.clone(),
|
|
||||||
self.config.clone(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
for mut doc in self.file_system_docs.docs.iter_mut() {
|
for mut doc in self.file_system_docs.docs.iter_mut() {
|
||||||
if !config.specifier_enabled(doc.specifier()) {
|
if !config.specifier_enabled(doc.specifier()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
*doc.value_mut() = doc.with_new_config(
|
*doc.value_mut() =
|
||||||
&self.is_cjs_resolver,
|
doc.with_new_config(self.resolver.clone(), self.config.clone());
|
||||||
self.resolver.clone(),
|
|
||||||
self.config.clone(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
self.open_docs = open_docs;
|
self.open_docs = open_docs;
|
||||||
let mut preload_count = 0;
|
let mut preload_count = 0;
|
||||||
|
@ -1402,7 +1407,6 @@ impl Documents {
|
||||||
{
|
{
|
||||||
fs_docs.refresh_document(
|
fs_docs.refresh_document(
|
||||||
specifier,
|
specifier,
|
||||||
&self.is_cjs_resolver,
|
|
||||||
&self.resolver,
|
&self.resolver,
|
||||||
&self.config,
|
&self.config,
|
||||||
&self.cache,
|
&self.cache,
|
||||||
|
@ -1477,27 +1481,24 @@ impl Documents {
|
||||||
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
|
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
|
||||||
let code_specifier = jsx_config.default_specifier.as_ref()?;
|
let code_specifier = jsx_config.default_specifier.as_ref()?;
|
||||||
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
|
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
|
||||||
let range = deno_graph::Range {
|
|
||||||
specifier: jsx_config.base_url.clone(),
|
|
||||||
start: deno_graph::Position::zeroed(),
|
|
||||||
end: deno_graph::Position::zeroed(),
|
|
||||||
};
|
|
||||||
let type_specifier = cli_resolver
|
let type_specifier = cli_resolver
|
||||||
.resolve(
|
.resolve(
|
||||||
type_specifier,
|
type_specifier,
|
||||||
&range,
|
&jsx_config.base_url,
|
||||||
|
deno_graph::Position::zeroed(),
|
||||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||||
deno_package_json::NodeModuleKind::Esm,
|
ResolutionMode::Import,
|
||||||
ResolutionMode::Types,
|
NodeResolutionKind::Types,
|
||||||
)
|
)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
let code_specifier = cli_resolver
|
let code_specifier = cli_resolver
|
||||||
.resolve(
|
.resolve(
|
||||||
code_specifier,
|
code_specifier,
|
||||||
&range,
|
&jsx_config.base_url,
|
||||||
|
deno_graph::Position::zeroed(),
|
||||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||||
deno_package_json::NodeModuleKind::Esm,
|
ResolutionMode::Import,
|
||||||
ResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)
|
)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
dep_info
|
dep_info
|
||||||
|
@ -1542,7 +1543,7 @@ impl Documents {
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
file_referrer: Option<&ModuleSpecifier>,
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||||
if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
|
if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
|
||||||
|
@ -1559,7 +1560,7 @@ impl Documents {
|
||||||
let (s, mt) = self.resolver.npm_to_file_url(
|
let (s, mt) = self.resolver.npm_to_file_url(
|
||||||
&npm_ref,
|
&npm_ref,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
file_referrer,
|
file_referrer,
|
||||||
)?;
|
)?;
|
||||||
specifier = s;
|
specifier = s;
|
||||||
|
@ -1571,8 +1572,12 @@ impl Documents {
|
||||||
return Some((specifier, media_type));
|
return Some((specifier, media_type));
|
||||||
};
|
};
|
||||||
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
|
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
|
||||||
let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc);
|
self.resolve_dependency(
|
||||||
self.resolve_dependency(types, &specifier, specifier_kind, file_referrer)
|
types,
|
||||||
|
&specifier,
|
||||||
|
doc.resolution_mode(),
|
||||||
|
file_referrer,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
Some((doc.specifier().clone(), doc.media_type()))
|
Some((doc.specifier().clone(), doc.media_type()))
|
||||||
}
|
}
|
||||||
|
@ -1640,19 +1645,25 @@ fn parse_and_analyze_module(
|
||||||
maybe_headers: Option<&HashMap<String, String>>,
|
maybe_headers: Option<&HashMap<String, String>>,
|
||||||
media_type: MediaType,
|
media_type: MediaType,
|
||||||
file_referrer: Option<&ModuleSpecifier>,
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: &LspResolver,
|
resolver: &LspResolver,
|
||||||
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
|
) -> (
|
||||||
|
Option<ParsedSourceResult>,
|
||||||
|
Option<ModuleResult>,
|
||||||
|
ResolutionMode,
|
||||||
|
) {
|
||||||
let parsed_source_result = parse_source(specifier.clone(), text, media_type);
|
let parsed_source_result = parse_source(specifier.clone(), text, media_type);
|
||||||
let module_result = analyze_module(
|
let (module_result, resolution_mode) = analyze_module(
|
||||||
specifier,
|
specifier,
|
||||||
&parsed_source_result,
|
&parsed_source_result,
|
||||||
maybe_headers,
|
maybe_headers,
|
||||||
file_referrer,
|
file_referrer,
|
||||||
is_cjs_resolver,
|
|
||||||
resolver,
|
resolver,
|
||||||
);
|
);
|
||||||
(Some(parsed_source_result), Some(module_result))
|
(
|
||||||
|
Some(parsed_source_result),
|
||||||
|
Some(module_result),
|
||||||
|
resolution_mode,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_source(
|
fn parse_source(
|
||||||
|
@ -1675,44 +1686,51 @@ fn analyze_module(
|
||||||
parsed_source_result: &ParsedSourceResult,
|
parsed_source_result: &ParsedSourceResult,
|
||||||
maybe_headers: Option<&HashMap<String, String>>,
|
maybe_headers: Option<&HashMap<String, String>>,
|
||||||
file_referrer: Option<&ModuleSpecifier>,
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
is_cjs_resolver: &LspIsCjsResolver,
|
|
||||||
resolver: &LspResolver,
|
resolver: &LspResolver,
|
||||||
) -> ModuleResult {
|
) -> (ModuleResult, ResolutionMode) {
|
||||||
match parsed_source_result {
|
match parsed_source_result {
|
||||||
Ok(parsed_source) => {
|
Ok(parsed_source) => {
|
||||||
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
|
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
|
||||||
let cli_resolver = resolver.as_cli_resolver(file_referrer);
|
let cli_resolver = resolver.as_cli_resolver(file_referrer);
|
||||||
|
let is_cjs_resolver = resolver.as_is_cjs_resolver(file_referrer);
|
||||||
let config_data = resolver.as_config_data(file_referrer);
|
let config_data = resolver.as_config_data(file_referrer);
|
||||||
let valid_referrer = specifier.clone();
|
let valid_referrer = specifier.clone();
|
||||||
let jsx_import_source_config =
|
let jsx_import_source_config =
|
||||||
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
||||||
|
let module_resolution_mode = is_cjs_resolver.get_lsp_resolution_mode(
|
||||||
|
&specifier,
|
||||||
|
Some(parsed_source.compute_is_script()),
|
||||||
|
);
|
||||||
let resolver = SingleReferrerGraphResolver {
|
let resolver = SingleReferrerGraphResolver {
|
||||||
valid_referrer: &valid_referrer,
|
valid_referrer: &valid_referrer,
|
||||||
referrer_kind: is_cjs_resolver.get_lsp_referrer_kind(
|
module_resolution_mode,
|
||||||
&specifier,
|
|
||||||
Some(parsed_source.compute_is_script()),
|
|
||||||
),
|
|
||||||
cli_resolver,
|
cli_resolver,
|
||||||
jsx_import_source_config: jsx_import_source_config.as_ref(),
|
jsx_import_source_config: jsx_import_source_config.as_ref(),
|
||||||
};
|
};
|
||||||
Ok(deno_graph::parse_module_from_ast(
|
(
|
||||||
deno_graph::ParseModuleFromAstOptions {
|
Ok(deno_graph::parse_module_from_ast(
|
||||||
graph_kind: deno_graph::GraphKind::TypesOnly,
|
deno_graph::ParseModuleFromAstOptions {
|
||||||
specifier,
|
graph_kind: deno_graph::GraphKind::TypesOnly,
|
||||||
maybe_headers,
|
specifier,
|
||||||
parsed_source,
|
maybe_headers,
|
||||||
// use a null file system because there's no need to bother resolving
|
parsed_source,
|
||||||
// dynamic imports like import(`./dir/${something}`) in the LSP
|
// use a null file system because there's no need to bother resolving
|
||||||
file_system: &deno_graph::source::NullFileSystem,
|
// dynamic imports like import(`./dir/${something}`) in the LSP
|
||||||
jsr_url_provider: &CliJsrUrlProvider,
|
file_system: &deno_graph::source::NullFileSystem,
|
||||||
maybe_resolver: Some(&resolver),
|
jsr_url_provider: &CliJsrUrlProvider,
|
||||||
maybe_npm_resolver: Some(&npm_resolver),
|
maybe_resolver: Some(&resolver),
|
||||||
},
|
maybe_npm_resolver: Some(&npm_resolver),
|
||||||
))
|
},
|
||||||
|
)),
|
||||||
|
module_resolution_mode,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
Err(err) => Err(deno_graph::ModuleGraphError::ModuleError(
|
Err(err) => (
|
||||||
deno_graph::ModuleError::ParseErr(specifier, err.clone()),
|
Err(deno_graph::ModuleGraphError::ModuleError(
|
||||||
)),
|
deno_graph::ModuleError::ParseErr(specifier, err.clone()),
|
||||||
|
)),
|
||||||
|
ResolutionMode::Import,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
use crate::args::jsr_api_url;
|
use crate::args::jsr_api_url;
|
||||||
use crate::args::jsr_url;
|
use crate::args::jsr_url;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
|
use crate::file_fetcher::TextDecodedFile;
|
||||||
use crate::jsr::partial_jsr_package_version_info_from_slice;
|
use crate::jsr::partial_jsr_package_version_info_from_slice;
|
||||||
use crate::jsr::JsrFetchResolver;
|
use crate::jsr::JsrFetchResolver;
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
|
@ -17,6 +18,7 @@ use deno_graph::ModuleSpecifier;
|
||||||
use deno_semver::jsr::JsrPackageReqReference;
|
use deno_semver::jsr::JsrPackageReqReference;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
|
use deno_semver::StackString;
|
||||||
use deno_semver::Version;
|
use deno_semver::Version;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -32,8 +34,8 @@ pub struct JsrCacheResolver {
|
||||||
/// The `module_graph` fields of the version infos should be forcibly absent.
|
/// The `module_graph` fields of the version infos should be forcibly absent.
|
||||||
/// It can be large and we don't want to store it.
|
/// It can be large and we don't want to store it.
|
||||||
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
|
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
|
||||||
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
|
info_by_name: DashMap<StackString, Option<Arc<JsrPackageInfo>>>,
|
||||||
workspace_scope_by_name: HashMap<String, ModuleSpecifier>,
|
workspace_scope_by_name: HashMap<StackString, ModuleSpecifier>,
|
||||||
cache: Arc<dyn HttpCache>,
|
cache: Arc<dyn HttpCache>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,7 +60,7 @@ impl JsrCacheResolver {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let nv = PackageNv {
|
let nv = PackageNv {
|
||||||
name: jsr_pkg_config.name.clone(),
|
name: jsr_pkg_config.name.as_str().into(),
|
||||||
version: version.clone(),
|
version: version.clone(),
|
||||||
};
|
};
|
||||||
info_by_name.insert(
|
info_by_name.insert(
|
||||||
|
@ -124,8 +126,8 @@ impl JsrCacheResolver {
|
||||||
return nv.value().clone();
|
return nv.value().clone();
|
||||||
}
|
}
|
||||||
let maybe_get_nv = || {
|
let maybe_get_nv = || {
|
||||||
let name = req.name.clone();
|
let name = &req.name;
|
||||||
let package_info = self.package_info(&name)?;
|
let package_info = self.package_info(name)?;
|
||||||
// Find the first matching version of the package which is cached.
|
// Find the first matching version of the package which is cached.
|
||||||
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
|
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
|
||||||
versions.sort();
|
versions.sort();
|
||||||
|
@ -143,7 +145,10 @@ impl JsrCacheResolver {
|
||||||
self.package_version_info(&nv).is_some()
|
self.package_version_info(&nv).is_some()
|
||||||
})
|
})
|
||||||
.cloned()?;
|
.cloned()?;
|
||||||
Some(PackageNv { name, version })
|
Some(PackageNv {
|
||||||
|
name: name.clone(),
|
||||||
|
version,
|
||||||
|
})
|
||||||
};
|
};
|
||||||
let nv = maybe_get_nv();
|
let nv = maybe_get_nv();
|
||||||
self.nv_by_req.insert(req.clone(), nv.clone());
|
self.nv_by_req.insert(req.clone(), nv.clone());
|
||||||
|
@ -215,7 +220,10 @@ impl JsrCacheResolver {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn package_info(&self, name: &str) -> Option<Arc<JsrPackageInfo>> {
|
pub fn package_info(
|
||||||
|
&self,
|
||||||
|
name: &StackString,
|
||||||
|
) -> Option<Arc<JsrPackageInfo>> {
|
||||||
if let Some(info) = self.info_by_name.get(name) {
|
if let Some(info) = self.info_by_name.get(name) {
|
||||||
return info.value().clone();
|
return info.value().clone();
|
||||||
}
|
}
|
||||||
|
@ -225,7 +233,7 @@ impl JsrCacheResolver {
|
||||||
serde_json::from_slice::<JsrPackageInfo>(&meta_bytes).ok()
|
serde_json::from_slice::<JsrPackageInfo>(&meta_bytes).ok()
|
||||||
};
|
};
|
||||||
let info = read_cached_package_info().map(Arc::new);
|
let info = read_cached_package_info().map(Arc::new);
|
||||||
self.info_by_name.insert(name.to_string(), info.clone());
|
self.info_by_name.insert(name.clone(), info.clone());
|
||||||
info
|
info
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,12 +270,12 @@ fn read_cached_url(
|
||||||
cache
|
cache
|
||||||
.get(&cache.cache_item_key(url).ok()?, None)
|
.get(&cache.cache_item_key(url).ok()?, None)
|
||||||
.ok()?
|
.ok()?
|
||||||
.map(|f| f.content)
|
.map(|f| f.content.into_owned())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct CliJsrSearchApi {
|
pub struct CliJsrSearchApi {
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
resolver: JsrFetchResolver,
|
resolver: JsrFetchResolver,
|
||||||
search_cache: DashMap<String, Arc<Vec<String>>>,
|
search_cache: DashMap<String, Arc<Vec<String>>>,
|
||||||
versions_cache: DashMap<String, Arc<Vec<Version>>>,
|
versions_cache: DashMap<String, Arc<Vec<Version>>>,
|
||||||
|
@ -275,7 +283,7 @@ pub struct CliJsrSearchApi {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CliJsrSearchApi {
|
impl CliJsrSearchApi {
|
||||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
|
||||||
let resolver = JsrFetchResolver::new(file_fetcher.clone());
|
let resolver = JsrFetchResolver::new(file_fetcher.clone());
|
||||||
Self {
|
Self {
|
||||||
file_fetcher,
|
file_fetcher,
|
||||||
|
@ -309,10 +317,8 @@ impl PackageSearchApi for CliJsrSearchApi {
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
// spawn due to the lsp's `Send` requirement
|
// spawn due to the lsp's `Send` requirement
|
||||||
let file = deno_core::unsync::spawn(async move {
|
let file = deno_core::unsync::spawn(async move {
|
||||||
file_fetcher
|
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
|
||||||
.fetch_bypass_permissions(&search_url)
|
TextDecodedFile::decode(file)
|
||||||
.await?
|
|
||||||
.into_text_decoded()
|
|
||||||
})
|
})
|
||||||
.await??;
|
.await??;
|
||||||
let names = Arc::new(parse_jsr_search_response(&file.source)?);
|
let names = Arc::new(parse_jsr_search_response(&file.source)?);
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_config::workspace::WorkspaceDirectory;
|
use deno_config::workspace::WorkspaceDirectory;
|
||||||
use deno_config::workspace::WorkspaceDiscoverOptions;
|
use deno_config::workspace::WorkspaceDiscoverOptions;
|
||||||
use deno_core::anyhow::anyhow;
|
use deno_core::anyhow::anyhow;
|
||||||
|
@ -22,7 +23,8 @@ use deno_semver::jsr::JsrPackageReqReference;
|
||||||
use indexmap::Equivalent;
|
use indexmap::Equivalent;
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use log::error;
|
use log::error;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
|
use node_resolver::ResolutionMode;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::from_value;
|
use serde_json::from_value;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
@ -78,7 +80,6 @@ use super::parent_process_checker;
|
||||||
use super::performance::Performance;
|
use super::performance::Performance;
|
||||||
use super::refactor;
|
use super::refactor;
|
||||||
use super::registries::ModuleRegistry;
|
use super::registries::ModuleRegistry;
|
||||||
use super::resolver::LspIsCjsResolver;
|
|
||||||
use super::resolver::LspResolver;
|
use super::resolver::LspResolver;
|
||||||
use super::testing;
|
use super::testing;
|
||||||
use super::text;
|
use super::text;
|
||||||
|
@ -95,13 +96,12 @@ use crate::args::create_default_npmrc;
|
||||||
use crate::args::get_root_cert_store;
|
use crate::args::get_root_cert_store;
|
||||||
use crate::args::has_flag_env_var;
|
use crate::args::has_flag_env_var;
|
||||||
use crate::args::CaData;
|
use crate::args::CaData;
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::args::CliOptions;
|
use crate::args::CliOptions;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::args::InternalFlags;
|
use crate::args::InternalFlags;
|
||||||
use crate::args::UnstableFmtOptions;
|
use crate::args::UnstableFmtOptions;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::graph_util;
|
use crate::graph_util;
|
||||||
use crate::http_util::HttpClientProvider;
|
use crate::http_util::HttpClientProvider;
|
||||||
use crate::lsp::config::ConfigWatchedFileType;
|
use crate::lsp::config::ConfigWatchedFileType;
|
||||||
|
@ -146,7 +146,6 @@ pub struct StateSnapshot {
|
||||||
pub project_version: usize,
|
pub project_version: usize,
|
||||||
pub assets: AssetsSnapshot,
|
pub assets: AssetsSnapshot,
|
||||||
pub config: Arc<Config>,
|
pub config: Arc<Config>,
|
||||||
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
|
|
||||||
pub documents: Arc<Documents>,
|
pub documents: Arc<Documents>,
|
||||||
pub resolver: Arc<LspResolver>,
|
pub resolver: Arc<LspResolver>,
|
||||||
}
|
}
|
||||||
|
@ -206,7 +205,6 @@ pub struct Inner {
|
||||||
pub documents: Documents,
|
pub documents: Documents,
|
||||||
http_client_provider: Arc<HttpClientProvider>,
|
http_client_provider: Arc<HttpClientProvider>,
|
||||||
initial_cwd: PathBuf,
|
initial_cwd: PathBuf,
|
||||||
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
|
|
||||||
jsr_search_api: CliJsrSearchApi,
|
jsr_search_api: CliJsrSearchApi,
|
||||||
/// Handles module registries, which allow discovery of modules
|
/// Handles module registries, which allow discovery of modules
|
||||||
module_registry: ModuleRegistry,
|
module_registry: ModuleRegistry,
|
||||||
|
@ -272,7 +270,12 @@ impl LanguageServer {
|
||||||
open_docs: &open_docs,
|
open_docs: &open_docs,
|
||||||
};
|
};
|
||||||
let graph = module_graph_creator
|
let graph = module_graph_creator
|
||||||
.create_graph_with_loader(GraphKind::All, roots.clone(), &mut loader)
|
.create_graph_with_loader(
|
||||||
|
GraphKind::All,
|
||||||
|
roots.clone(),
|
||||||
|
&mut loader,
|
||||||
|
graph_util::NpmCachingStrategy::Eager,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
graph_util::graph_valid(
|
graph_util::graph_valid(
|
||||||
&graph,
|
&graph,
|
||||||
|
@ -484,7 +487,6 @@ impl Inner {
|
||||||
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
|
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
|
||||||
panic!("Could not resolve current working directory")
|
panic!("Could not resolve current working directory")
|
||||||
});
|
});
|
||||||
let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache));
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
assets,
|
assets,
|
||||||
|
@ -496,7 +498,6 @@ impl Inner {
|
||||||
documents,
|
documents,
|
||||||
http_client_provider,
|
http_client_provider,
|
||||||
initial_cwd: initial_cwd.clone(),
|
initial_cwd: initial_cwd.clone(),
|
||||||
is_cjs_resolver,
|
|
||||||
jsr_search_api,
|
jsr_search_api,
|
||||||
project_version: 0,
|
project_version: 0,
|
||||||
task_queue: Default::default(),
|
task_queue: Default::default(),
|
||||||
|
@ -607,7 +608,6 @@ impl Inner {
|
||||||
project_version: self.project_version,
|
project_version: self.project_version,
|
||||||
assets: self.assets.snapshot(),
|
assets: self.assets.snapshot(),
|
||||||
config: Arc::new(self.config.clone()),
|
config: Arc::new(self.config.clone()),
|
||||||
is_cjs_resolver: self.is_cjs_resolver.clone(),
|
|
||||||
documents: Arc::new(self.documents.clone()),
|
documents: Arc::new(self.documents.clone()),
|
||||||
resolver: self.resolver.snapshot(),
|
resolver: self.resolver.snapshot(),
|
||||||
})
|
})
|
||||||
|
@ -629,7 +629,6 @@ impl Inner {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
self.cache = LspCache::new(global_cache_url);
|
self.cache = LspCache::new(global_cache_url);
|
||||||
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache));
|
|
||||||
let deno_dir = self.cache.deno_dir();
|
let deno_dir = self.cache.deno_dir();
|
||||||
let workspace_settings = self.config.workspace_settings();
|
let workspace_settings = self.config.workspace_settings();
|
||||||
let maybe_root_path = self
|
let maybe_root_path = self
|
||||||
|
@ -959,15 +958,15 @@ impl Inner {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn refresh_config_tree(&mut self) {
|
async fn refresh_config_tree(&mut self) {
|
||||||
let mut file_fetcher = FileFetcher::new(
|
let file_fetcher = CliFileFetcher::new(
|
||||||
self.cache.global().clone(),
|
self.cache.global().clone(),
|
||||||
CacheSetting::RespectHeaders,
|
|
||||||
true,
|
|
||||||
self.http_client_provider.clone(),
|
self.http_client_provider.clone(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
None,
|
None,
|
||||||
|
true,
|
||||||
|
CacheSetting::RespectHeaders,
|
||||||
|
super::logging::lsp_log_level(),
|
||||||
);
|
);
|
||||||
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
|
|
||||||
let file_fetcher = Arc::new(file_fetcher);
|
let file_fetcher = Arc::new(file_fetcher);
|
||||||
self
|
self
|
||||||
.config
|
.config
|
||||||
|
@ -993,13 +992,10 @@ impl Inner {
|
||||||
let resolver = inner.resolver.as_cli_resolver(Some(&referrer));
|
let resolver = inner.resolver.as_cli_resolver(Some(&referrer));
|
||||||
let Ok(specifier) = resolver.resolve(
|
let Ok(specifier) = resolver.resolve(
|
||||||
&specifier,
|
&specifier,
|
||||||
&deno_graph::Range {
|
&referrer,
|
||||||
specifier: referrer.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
ResolutionMode::Import,
|
||||||
end: deno_graph::Position::zeroed(),
|
NodeResolutionKind::Types,
|
||||||
},
|
|
||||||
NodeModuleKind::Esm,
|
|
||||||
deno_graph::source::ResolutionMode::Types,
|
|
||||||
) else {
|
) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
@ -1640,8 +1636,8 @@ impl Inner {
|
||||||
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
|
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
|
||||||
let specifier_kind = asset_or_doc
|
let specifier_kind = asset_or_doc
|
||||||
.document()
|
.document()
|
||||||
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
|
.map(|d| d.resolution_mode())
|
||||||
.unwrap_or(NodeModuleKind::Esm);
|
.unwrap_or(ResolutionMode::Import);
|
||||||
let mut includes_no_cache = false;
|
let mut includes_no_cache = false;
|
||||||
for diagnostic in &fixable_diagnostics {
|
for diagnostic in &fixable_diagnostics {
|
||||||
match diagnostic.source.as_deref() {
|
match diagnostic.source.as_deref() {
|
||||||
|
@ -1859,20 +1855,12 @@ impl Inner {
|
||||||
}
|
}
|
||||||
|
|
||||||
let changes = if code_action_data.fix_id == "fixMissingImport" {
|
let changes = if code_action_data.fix_id == "fixMissingImport" {
|
||||||
fix_ts_import_changes(
|
fix_ts_import_changes(&combined_code_actions.changes, self).map_err(
|
||||||
&code_action_data.specifier,
|
|err| {
|
||||||
maybe_asset_or_doc
|
error!("Unable to remap changes: {:#}", err);
|
||||||
.as_ref()
|
LspError::internal_error()
|
||||||
.and_then(|d| d.document())
|
},
|
||||||
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
|
)?
|
||||||
.unwrap_or(NodeModuleKind::Esm),
|
|
||||||
&combined_code_actions.changes,
|
|
||||||
self,
|
|
||||||
)
|
|
||||||
.map_err(|err| {
|
|
||||||
error!("Unable to remap changes: {:#}", err);
|
|
||||||
LspError::internal_error()
|
|
||||||
})?
|
|
||||||
} else {
|
} else {
|
||||||
combined_code_actions.changes
|
combined_code_actions.changes
|
||||||
};
|
};
|
||||||
|
@ -1916,20 +1904,16 @@ impl Inner {
|
||||||
asset_or_doc.scope().cloned(),
|
asset_or_doc.scope().cloned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
if kind_suffix == ".rewrite.function.returnType" {
|
if kind_suffix == ".rewrite.function.returnType"
|
||||||
refactor_edit_info.edits = fix_ts_import_changes(
|
|| kind_suffix == ".move.newFile"
|
||||||
&action_data.specifier,
|
{
|
||||||
asset_or_doc
|
refactor_edit_info.edits =
|
||||||
.document()
|
fix_ts_import_changes(&refactor_edit_info.edits, self).map_err(
|
||||||
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
|
|err| {
|
||||||
.unwrap_or(NodeModuleKind::Esm),
|
error!("Unable to remap changes: {:#}", err);
|
||||||
&refactor_edit_info.edits,
|
LspError::internal_error()
|
||||||
self,
|
},
|
||||||
)
|
)?
|
||||||
.map_err(|err| {
|
|
||||||
error!("Unable to remap changes: {:#}", err);
|
|
||||||
LspError::internal_error()
|
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
code_action.edit = refactor_edit_info.to_workspace_edit(self)?;
|
code_action.edit = refactor_edit_info.to_workspace_edit(self)?;
|
||||||
code_action
|
code_action
|
||||||
|
@ -2272,7 +2256,6 @@ impl Inner {
|
||||||
&self.jsr_search_api,
|
&self.jsr_search_api,
|
||||||
&self.npm_search_api,
|
&self.npm_search_api,
|
||||||
&self.documents,
|
&self.documents,
|
||||||
&self.is_cjs_resolver,
|
|
||||||
self.resolver.as_ref(),
|
self.resolver.as_ref(),
|
||||||
self
|
self
|
||||||
.config
|
.config
|
||||||
|
@ -3681,6 +3664,7 @@ impl Inner {
|
||||||
.unwrap_or_else(create_default_npmrc),
|
.unwrap_or_else(create_default_npmrc),
|
||||||
workspace,
|
workspace,
|
||||||
force_global_cache,
|
force_global_cache,
|
||||||
|
None,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable);
|
let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable);
|
||||||
|
@ -3781,14 +3765,11 @@ impl Inner {
|
||||||
fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> {
|
fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> {
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
for config_file in self.config.tree.config_files() {
|
for config_file in self.config.tree.config_files() {
|
||||||
if let Some(tasks) = json!(&config_file.json.tasks).as_object() {
|
if let Some(tasks) = config_file.to_tasks_config().ok().flatten() {
|
||||||
for (name, value) in tasks {
|
for (name, def) in tasks {
|
||||||
let Some(command) = value.as_str() else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
result.push(TaskDefinition {
|
result.push(TaskDefinition {
|
||||||
name: name.clone(),
|
name: name.clone(),
|
||||||
command: command.to_string(),
|
command: def.command.clone(),
|
||||||
source_uri: url_to_uri(&config_file.specifier)
|
source_uri: url_to_uri(&config_file.specifier)
|
||||||
.map_err(|_| LspError::internal_error())?,
|
.map_err(|_| LspError::internal_error())?,
|
||||||
});
|
});
|
||||||
|
@ -3800,7 +3781,7 @@ impl Inner {
|
||||||
for (name, command) in scripts {
|
for (name, command) in scripts {
|
||||||
result.push(TaskDefinition {
|
result.push(TaskDefinition {
|
||||||
name: name.clone(),
|
name: name.clone(),
|
||||||
command: command.clone(),
|
command: Some(command.clone()),
|
||||||
source_uri: url_to_uri(&package_json.specifier())
|
source_uri: url_to_uri(&package_json.specifier())
|
||||||
.map_err(|_| LspError::internal_error())?,
|
.map_err(|_| LspError::internal_error())?,
|
||||||
});
|
});
|
||||||
|
|
|
@ -14,9 +14,7 @@ pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str =
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct TaskDefinition {
|
pub struct TaskDefinition {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
// TODO(nayeemrmn): Rename this to `command` in vscode_deno.
|
pub command: Option<String>,
|
||||||
#[serde(rename = "detail")]
|
|
||||||
pub command: String,
|
|
||||||
pub source_uri: lsp::Uri,
|
pub source_uri: lsp::Uri,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,9 +56,6 @@ pub async fn start() -> Result<(), AnyError> {
|
||||||
LanguageServer::performance_request,
|
LanguageServer::performance_request,
|
||||||
)
|
)
|
||||||
.custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions)
|
.custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions)
|
||||||
// TODO(nayeemrmn): Rename this to `deno/taskDefinitions` in vscode_deno and
|
|
||||||
// remove this alias.
|
|
||||||
.custom_method("deno/task", LanguageServer::task_definitions)
|
|
||||||
.custom_method(testing::TEST_RUN_REQUEST, LanguageServer::test_run_request)
|
.custom_method(testing::TEST_RUN_REQUEST, LanguageServer::test_run_request)
|
||||||
.custom_method(
|
.custom_method(
|
||||||
testing::TEST_RUN_CANCEL_REQUEST,
|
testing::TEST_RUN_CANCEL_REQUEST,
|
||||||
|
|
|
@ -11,21 +11,22 @@ use serde::Deserialize;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::args::npm_registry_url;
|
use crate::args::npm_registry_url;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
|
use crate::file_fetcher::TextDecodedFile;
|
||||||
use crate::npm::NpmFetchResolver;
|
use crate::npm::NpmFetchResolver;
|
||||||
|
|
||||||
use super::search::PackageSearchApi;
|
use super::search::PackageSearchApi;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct CliNpmSearchApi {
|
pub struct CliNpmSearchApi {
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
resolver: NpmFetchResolver,
|
resolver: NpmFetchResolver,
|
||||||
search_cache: DashMap<String, Arc<Vec<String>>>,
|
search_cache: DashMap<String, Arc<Vec<String>>>,
|
||||||
versions_cache: DashMap<String, Arc<Vec<Version>>>,
|
versions_cache: DashMap<String, Arc<Vec<Version>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CliNpmSearchApi {
|
impl CliNpmSearchApi {
|
||||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
|
||||||
let resolver = NpmFetchResolver::new(
|
let resolver = NpmFetchResolver::new(
|
||||||
file_fetcher.clone(),
|
file_fetcher.clone(),
|
||||||
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
|
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
|
||||||
|
@ -57,10 +58,8 @@ impl PackageSearchApi for CliNpmSearchApi {
|
||||||
.append_pair("text", &format!("{} boost-exact:false", query));
|
.append_pair("text", &format!("{} boost-exact:false", query));
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
let file = deno_core::unsync::spawn(async move {
|
let file = deno_core::unsync::spawn(async move {
|
||||||
file_fetcher
|
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
|
||||||
.fetch_bypass_permissions(&search_url)
|
TextDecodedFile::decode(file)
|
||||||
.await?
|
|
||||||
.into_text_decoded()
|
|
||||||
})
|
})
|
||||||
.await??;
|
.await??;
|
||||||
let names = Arc::new(parse_npm_search_response(&file.source)?);
|
let names = Arc::new(parse_npm_search_response(&file.source)?);
|
||||||
|
|
|
@ -12,14 +12,15 @@ use super::path_to_regex::StringOrNumber;
|
||||||
use super::path_to_regex::StringOrVec;
|
use super::path_to_regex::StringOrVec;
|
||||||
use super::path_to_regex::Token;
|
use super::path_to_regex::Token;
|
||||||
|
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::cache::GlobalHttpCache;
|
use crate::cache::GlobalHttpCache;
|
||||||
use crate::cache::HttpCache;
|
use crate::cache::HttpCache;
|
||||||
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::file_fetcher::FetchOptions;
|
use crate::file_fetcher::FetchOptions;
|
||||||
use crate::file_fetcher::FetchPermissionsOptionRef;
|
use crate::file_fetcher::FetchPermissionsOptionRef;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::TextDecodedFile;
|
||||||
use crate::http_util::HttpClientProvider;
|
use crate::http_util::HttpClientProvider;
|
||||||
|
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_core::anyhow::anyhow;
|
use deno_core::anyhow::anyhow;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::serde::Deserialize;
|
use deno_core::serde::Deserialize;
|
||||||
|
@ -418,7 +419,7 @@ enum VariableItems {
|
||||||
pub struct ModuleRegistry {
|
pub struct ModuleRegistry {
|
||||||
origins: HashMap<String, Vec<RegistryConfiguration>>,
|
origins: HashMap<String, Vec<RegistryConfiguration>>,
|
||||||
pub location: PathBuf,
|
pub location: PathBuf,
|
||||||
pub file_fetcher: Arc<FileFetcher>,
|
pub file_fetcher: Arc<CliFileFetcher>,
|
||||||
http_cache: Arc<GlobalHttpCache>,
|
http_cache: Arc<GlobalHttpCache>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,15 +433,15 @@ impl ModuleRegistry {
|
||||||
location.clone(),
|
location.clone(),
|
||||||
crate::cache::RealDenoCacheEnv,
|
crate::cache::RealDenoCacheEnv,
|
||||||
));
|
));
|
||||||
let mut file_fetcher = FileFetcher::new(
|
let file_fetcher = CliFileFetcher::new(
|
||||||
http_cache.clone(),
|
http_cache.clone(),
|
||||||
CacheSetting::RespectHeaders,
|
|
||||||
true,
|
|
||||||
http_client_provider,
|
http_client_provider,
|
||||||
Default::default(),
|
Default::default(),
|
||||||
None,
|
None,
|
||||||
|
true,
|
||||||
|
CacheSetting::RespectHeaders,
|
||||||
|
super::logging::lsp_log_level(),
|
||||||
);
|
);
|
||||||
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
origins: HashMap::new(),
|
origins: HashMap::new(),
|
||||||
|
@ -479,13 +480,15 @@ impl ModuleRegistry {
|
||||||
let specifier = specifier.clone();
|
let specifier = specifier.clone();
|
||||||
async move {
|
async move {
|
||||||
file_fetcher
|
file_fetcher
|
||||||
.fetch_with_options(FetchOptions {
|
.fetch_with_options(
|
||||||
specifier: &specifier,
|
&specifier,
|
||||||
permissions: FetchPermissionsOptionRef::AllowAll,
|
FetchPermissionsOptionRef::AllowAll,
|
||||||
maybe_auth: None,
|
FetchOptions {
|
||||||
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
|
maybe_auth: None,
|
||||||
maybe_cache_setting: None,
|
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
|
||||||
})
|
maybe_cache_setting: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}).await?;
|
}).await?;
|
||||||
|
@ -500,7 +503,7 @@ impl ModuleRegistry {
|
||||||
);
|
);
|
||||||
self.http_cache.set(specifier, headers_map, &[])?;
|
self.http_cache.set(specifier, headers_map, &[])?;
|
||||||
}
|
}
|
||||||
let file = fetch_result?.into_text_decoded()?;
|
let file = TextDecodedFile::decode(fetch_result?)?;
|
||||||
let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?;
|
let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?;
|
||||||
validate_config(&config)?;
|
validate_config(&config)?;
|
||||||
Ok(config.registries)
|
Ok(config.registries)
|
||||||
|
@ -584,12 +587,11 @@ impl ModuleRegistry {
|
||||||
// spawn due to the lsp's `Send` requirement
|
// spawn due to the lsp's `Send` requirement
|
||||||
let file = deno_core::unsync::spawn({
|
let file = deno_core::unsync::spawn({
|
||||||
async move {
|
async move {
|
||||||
file_fetcher
|
let file = file_fetcher
|
||||||
.fetch_bypass_permissions(&endpoint)
|
.fetch_bypass_permissions(&endpoint)
|
||||||
.await
|
.await
|
||||||
.ok()?
|
.ok()?;
|
||||||
.into_text_decoded()
|
TextDecodedFile::decode(file).ok()
|
||||||
.ok()
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
@ -983,12 +985,11 @@ impl ModuleRegistry {
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
// spawn due to the lsp's `Send` requirement
|
// spawn due to the lsp's `Send` requirement
|
||||||
let file = deno_core::unsync::spawn(async move {
|
let file = deno_core::unsync::spawn(async move {
|
||||||
file_fetcher
|
let file = file_fetcher
|
||||||
.fetch_bypass_permissions(&specifier)
|
.fetch_bypass_permissions(&specifier)
|
||||||
.await
|
.await
|
||||||
.ok()?
|
.ok()?;
|
||||||
.into_text_decoded()
|
TextDecodedFile::decode(file).ok()
|
||||||
.ok()
|
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.ok()??;
|
.ok()??;
|
||||||
|
@ -1049,7 +1050,7 @@ impl ModuleRegistry {
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
let specifier = specifier.clone();
|
let specifier = specifier.clone();
|
||||||
async move {
|
async move {
|
||||||
file_fetcher
|
let file = file_fetcher
|
||||||
.fetch_bypass_permissions(&specifier)
|
.fetch_bypass_permissions(&specifier)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
|
@ -1058,9 +1059,8 @@ impl ModuleRegistry {
|
||||||
specifier, err
|
specifier, err
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.ok()?
|
.ok()?;
|
||||||
.into_text_decoded()
|
TextDecodedFile::decode(file).ok()
|
||||||
.ok()
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
@ -1095,7 +1095,7 @@ impl ModuleRegistry {
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
let specifier = specifier.clone();
|
let specifier = specifier.clone();
|
||||||
async move {
|
async move {
|
||||||
file_fetcher
|
let file = file_fetcher
|
||||||
.fetch_bypass_permissions(&specifier)
|
.fetch_bypass_permissions(&specifier)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
|
@ -1104,9 +1104,8 @@ impl ModuleRegistry {
|
||||||
specifier, err
|
specifier, err
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.ok()?
|
.ok()?;
|
||||||
.into_text_decoded()
|
TextDecodedFile::decode(file).ok()
|
||||||
.ok()
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_cache_dir::npm::NpmCacheDir;
|
use deno_cache_dir::npm::NpmCacheDir;
|
||||||
use deno_cache_dir::HttpCache;
|
use deno_cache_dir::HttpCache;
|
||||||
use deno_config::deno_json::JsxImportSourceConfig;
|
use deno_config::deno_json::JsxImportSourceConfig;
|
||||||
|
@ -9,29 +10,26 @@ use deno_config::workspace::PackageJsonDepResolution;
|
||||||
use deno_config::workspace::WorkspaceResolver;
|
use deno_config::workspace::WorkspaceResolver;
|
||||||
use deno_core::parking_lot::Mutex;
|
use deno_core::parking_lot::Mutex;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_graph::GraphImport;
|
use deno_graph::GraphImport;
|
||||||
use deno_graph::ModuleSpecifier;
|
use deno_graph::ModuleSpecifier;
|
||||||
use deno_graph::Range;
|
use deno_graph::Range;
|
||||||
use deno_npm::NpmSystemInfo;
|
use deno_npm::NpmSystemInfo;
|
||||||
use deno_path_util::url_from_directory_path;
|
|
||||||
use deno_path_util::url_to_file_path;
|
use deno_path_util::url_to_file_path;
|
||||||
|
use deno_resolver::cjs::IsCjsResolutionMode;
|
||||||
use deno_resolver::npm::NpmReqResolverOptions;
|
use deno_resolver::npm::NpmReqResolverOptions;
|
||||||
use deno_resolver::DenoResolverOptions;
|
use deno_resolver::DenoResolverOptions;
|
||||||
use deno_resolver::NodeAndNpmReqResolver;
|
use deno_resolver::NodeAndNpmReqResolver;
|
||||||
use deno_runtime::deno_fs;
|
use deno_runtime::deno_fs;
|
||||||
use deno_runtime::deno_node::NodeResolver;
|
use deno_runtime::deno_node::NodeResolver;
|
||||||
use deno_runtime::deno_node::PackageJson;
|
|
||||||
use deno_runtime::deno_node::PackageJsonResolver;
|
use deno_runtime::deno_node::PackageJsonResolver;
|
||||||
use deno_semver::jsr::JsrPackageReqReference;
|
use deno_semver::jsr::JsrPackageReqReference;
|
||||||
use deno_semver::npm::NpmPackageReqReference;
|
use deno_semver::npm::NpmPackageReqReference;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use node_resolver::errors::ClosestPkgJsonError;
|
|
||||||
use node_resolver::InNpmPackageChecker;
|
use node_resolver::InNpmPackageChecker;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
use node_resolver::NodeResolutionMode;
|
use node_resolver::ResolutionMode;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
|
@ -40,14 +38,14 @@ use std::collections::HashSet;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::cache::LspCache;
|
use super::cache::LspCache;
|
||||||
use super::documents::Document;
|
|
||||||
use super::jsr::JsrCacheResolver;
|
use super::jsr::JsrCacheResolver;
|
||||||
use crate::args::create_default_npmrc;
|
use crate::args::create_default_npmrc;
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::args::CliLockfile;
|
use crate::args::CliLockfile;
|
||||||
use crate::args::NpmInstallDepsProvider;
|
use crate::args::NpmInstallDepsProvider;
|
||||||
use crate::cache::DenoCacheEnvFsAdapter;
|
use crate::cache::DenoCacheEnvFsAdapter;
|
||||||
use crate::factory::Deferred;
|
use crate::factory::Deferred;
|
||||||
|
use crate::graph_util::to_node_resolution_kind;
|
||||||
|
use crate::graph_util::to_node_resolution_mode;
|
||||||
use crate::graph_util::CliJsrUrlProvider;
|
use crate::graph_util::CliJsrUrlProvider;
|
||||||
use crate::http_util::HttpClientProvider;
|
use crate::http_util::HttpClientProvider;
|
||||||
use crate::lsp::config::Config;
|
use crate::lsp::config::Config;
|
||||||
|
@ -70,7 +68,6 @@ use crate::resolver::CliResolverOptions;
|
||||||
use crate::resolver::IsCjsResolver;
|
use crate::resolver::IsCjsResolver;
|
||||||
use crate::resolver::WorkerCliNpmGraphResolver;
|
use crate::resolver::WorkerCliNpmGraphResolver;
|
||||||
use crate::tsc::into_specifier_and_media_type;
|
use crate::tsc::into_specifier_and_media_type;
|
||||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
use crate::util::progress_bar::ProgressBarStyle;
|
use crate::util::progress_bar::ProgressBarStyle;
|
||||||
|
|
||||||
|
@ -78,6 +75,7 @@ use crate::util::progress_bar::ProgressBarStyle;
|
||||||
struct LspScopeResolver {
|
struct LspScopeResolver {
|
||||||
resolver: Arc<CliResolver>,
|
resolver: Arc<CliResolver>,
|
||||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||||
|
is_cjs_resolver: Arc<IsCjsResolver>,
|
||||||
jsr_resolver: Option<Arc<JsrCacheResolver>>,
|
jsr_resolver: Option<Arc<JsrCacheResolver>>,
|
||||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||||
node_resolver: Option<Arc<NodeResolver>>,
|
node_resolver: Option<Arc<NodeResolver>>,
|
||||||
|
@ -96,6 +94,7 @@ impl Default for LspScopeResolver {
|
||||||
Self {
|
Self {
|
||||||
resolver: factory.cli_resolver().clone(),
|
resolver: factory.cli_resolver().clone(),
|
||||||
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
|
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
|
||||||
|
is_cjs_resolver: factory.is_cjs_resolver().clone(),
|
||||||
jsr_resolver: None,
|
jsr_resolver: None,
|
||||||
npm_resolver: None,
|
npm_resolver: None,
|
||||||
node_resolver: None,
|
node_resolver: None,
|
||||||
|
@ -134,7 +133,8 @@ impl LspScopeResolver {
|
||||||
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
|
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
|
||||||
config_data.and_then(|d| d.lockfile.clone()),
|
config_data.and_then(|d| d.lockfile.clone()),
|
||||||
)));
|
)));
|
||||||
let npm_graph_resolver = cli_resolver.create_graph_npm_resolver();
|
let npm_graph_resolver = cli_resolver
|
||||||
|
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager);
|
||||||
let maybe_jsx_import_source_config =
|
let maybe_jsx_import_source_config =
|
||||||
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
||||||
let graph_imports = config_data
|
let graph_imports = config_data
|
||||||
|
@ -146,7 +146,7 @@ impl LspScopeResolver {
|
||||||
.map(|(referrer, imports)| {
|
.map(|(referrer, imports)| {
|
||||||
let resolver = SingleReferrerGraphResolver {
|
let resolver = SingleReferrerGraphResolver {
|
||||||
valid_referrer: &referrer,
|
valid_referrer: &referrer,
|
||||||
referrer_kind: NodeModuleKind::Esm,
|
module_resolution_mode: ResolutionMode::Import,
|
||||||
cli_resolver: &cli_resolver,
|
cli_resolver: &cli_resolver,
|
||||||
jsx_import_source_config: maybe_jsx_import_source_config
|
jsx_import_source_config: maybe_jsx_import_source_config
|
||||||
.as_ref(),
|
.as_ref(),
|
||||||
|
@ -180,16 +180,16 @@ impl LspScopeResolver {
|
||||||
&req_ref,
|
&req_ref,
|
||||||
&referrer,
|
&referrer,
|
||||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||||
NodeModuleKind::Esm,
|
ResolutionMode::Import,
|
||||||
NodeResolutionMode::Types,
|
NodeResolutionKind::Types,
|
||||||
)
|
)
|
||||||
.or_else(|_| {
|
.or_else(|_| {
|
||||||
npm_pkg_req_resolver.resolve_req_reference(
|
npm_pkg_req_resolver.resolve_req_reference(
|
||||||
&req_ref,
|
&req_ref,
|
||||||
&referrer,
|
&referrer,
|
||||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||||
NodeModuleKind::Esm,
|
ResolutionMode::Import,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.ok()?,
|
.ok()?,
|
||||||
|
@ -205,6 +205,7 @@ impl LspScopeResolver {
|
||||||
Self {
|
Self {
|
||||||
resolver: cli_resolver,
|
resolver: cli_resolver,
|
||||||
in_npm_pkg_checker,
|
in_npm_pkg_checker,
|
||||||
|
is_cjs_resolver: factory.is_cjs_resolver().clone(),
|
||||||
jsr_resolver,
|
jsr_resolver,
|
||||||
npm_pkg_req_resolver,
|
npm_pkg_req_resolver,
|
||||||
npm_resolver,
|
npm_resolver,
|
||||||
|
@ -228,6 +229,7 @@ impl LspScopeResolver {
|
||||||
Arc::new(Self {
|
Arc::new(Self {
|
||||||
resolver: factory.cli_resolver().clone(),
|
resolver: factory.cli_resolver().clone(),
|
||||||
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
|
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
|
||||||
|
is_cjs_resolver: factory.is_cjs_resolver().clone(),
|
||||||
jsr_resolver: self.jsr_resolver.clone(),
|
jsr_resolver: self.jsr_resolver.clone(),
|
||||||
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
|
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
|
||||||
npm_resolver: factory.npm_resolver().cloned(),
|
npm_resolver: factory.npm_resolver().cloned(),
|
||||||
|
@ -342,7 +344,17 @@ impl LspResolver {
|
||||||
file_referrer: Option<&ModuleSpecifier>,
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
) -> WorkerCliNpmGraphResolver {
|
) -> WorkerCliNpmGraphResolver {
|
||||||
let resolver = self.get_scope_resolver(file_referrer);
|
let resolver = self.get_scope_resolver(file_referrer);
|
||||||
resolver.resolver.create_graph_npm_resolver()
|
resolver
|
||||||
|
.resolver
|
||||||
|
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_is_cjs_resolver(
|
||||||
|
&self,
|
||||||
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
|
) -> &IsCjsResolver {
|
||||||
|
let resolver = self.get_scope_resolver(file_referrer);
|
||||||
|
resolver.is_cjs_resolver.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_config_data(
|
pub fn as_config_data(
|
||||||
|
@ -369,6 +381,14 @@ impl LspResolver {
|
||||||
resolver.npm_resolver.as_ref().and_then(|r| r.as_managed())
|
resolver.npm_resolver.as_ref().and_then(|r| r.as_managed())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn pkg_json_resolver(
|
||||||
|
&self,
|
||||||
|
referrer: &ModuleSpecifier,
|
||||||
|
) -> &Arc<PackageJsonResolver> {
|
||||||
|
let resolver = self.get_scope_resolver(Some(referrer));
|
||||||
|
&resolver.pkg_json_resolver
|
||||||
|
}
|
||||||
|
|
||||||
pub fn graph_imports_by_referrer(
|
pub fn graph_imports_by_referrer(
|
||||||
&self,
|
&self,
|
||||||
file_referrer: &ModuleSpecifier,
|
file_referrer: &ModuleSpecifier,
|
||||||
|
@ -424,7 +444,7 @@ impl LspResolver {
|
||||||
&self,
|
&self,
|
||||||
req_ref: &NpmPackageReqReference,
|
req_ref: &NpmPackageReqReference,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
file_referrer: Option<&ModuleSpecifier>,
|
file_referrer: Option<&ModuleSpecifier>,
|
||||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||||
let resolver = self.get_scope_resolver(file_referrer);
|
let resolver = self.get_scope_resolver(file_referrer);
|
||||||
|
@ -434,8 +454,8 @@ impl LspResolver {
|
||||||
.resolve_req_reference(
|
.resolve_req_reference(
|
||||||
req_ref,
|
req_ref,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
NodeResolutionMode::Types,
|
NodeResolutionKind::Types,
|
||||||
)
|
)
|
||||||
.ok()?,
|
.ok()?,
|
||||||
)))
|
)))
|
||||||
|
@ -492,7 +512,7 @@ impl LspResolver {
|
||||||
&self,
|
&self,
|
||||||
specifier_text: &str,
|
specifier_text: &str,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
resolution_mode: ResolutionMode,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let resolver = self.get_scope_resolver(Some(referrer));
|
let resolver = self.get_scope_resolver(Some(referrer));
|
||||||
let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref()
|
let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref()
|
||||||
|
@ -503,24 +523,14 @@ impl LspResolver {
|
||||||
.resolve_if_for_npm_pkg(
|
.resolve_if_for_npm_pkg(
|
||||||
specifier_text,
|
specifier_text,
|
||||||
referrer,
|
referrer,
|
||||||
referrer_kind,
|
resolution_mode,
|
||||||
NodeResolutionMode::Types,
|
NodeResolutionKind::Types,
|
||||||
)
|
)
|
||||||
.ok()
|
.ok()
|
||||||
.flatten()
|
.flatten()
|
||||||
.is_some()
|
.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_closest_package_json(
|
|
||||||
&self,
|
|
||||||
referrer: &ModuleSpecifier,
|
|
||||||
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
|
|
||||||
let resolver = self.get_scope_resolver(Some(referrer));
|
|
||||||
resolver
|
|
||||||
.pkg_json_resolver
|
|
||||||
.get_closest_package_json(referrer)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve_redirects(
|
pub fn resolve_redirects(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
|
@ -581,6 +591,7 @@ pub struct ScopeDepInfo {
|
||||||
struct ResolverFactoryServices {
|
struct ResolverFactoryServices {
|
||||||
cli_resolver: Deferred<Arc<CliResolver>>,
|
cli_resolver: Deferred<Arc<CliResolver>>,
|
||||||
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
|
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
|
||||||
|
is_cjs_resolver: Deferred<Arc<IsCjsResolver>>,
|
||||||
node_resolver: Deferred<Option<Arc<NodeResolver>>>,
|
node_resolver: Deferred<Option<Arc<NodeResolver>>>,
|
||||||
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
|
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
|
||||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||||
|
@ -744,6 +755,23 @@ impl<'a> ResolverFactory<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_cjs_resolver(&self) -> &Arc<IsCjsResolver> {
|
||||||
|
self.services.is_cjs_resolver.get_or_init(|| {
|
||||||
|
Arc::new(IsCjsResolver::new(
|
||||||
|
self.in_npm_pkg_checker().clone(),
|
||||||
|
self.pkg_json_resolver().clone(),
|
||||||
|
if self
|
||||||
|
.config_data
|
||||||
|
.is_some_and(|d| d.unstable.contains("detect-cjs"))
|
||||||
|
{
|
||||||
|
IsCjsResolutionMode::ImplicitTypeCommonJs
|
||||||
|
} else {
|
||||||
|
IsCjsResolutionMode::ExplicitTypeCommonJs
|
||||||
|
},
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> {
|
pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> {
|
||||||
self
|
self
|
||||||
.services
|
.services
|
||||||
|
@ -803,99 +831,10 @@ impl std::fmt::Debug for RedirectResolver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct LspIsCjsResolver {
|
|
||||||
inner: IsCjsResolver,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for LspIsCjsResolver {
|
|
||||||
fn default() -> Self {
|
|
||||||
LspIsCjsResolver::new(&Default::default())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LspIsCjsResolver {
|
|
||||||
pub fn new(cache: &LspCache) -> Self {
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct LspInNpmPackageChecker {
|
|
||||||
global_cache_dir: ModuleSpecifier,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LspInNpmPackageChecker {
|
|
||||||
pub fn new(cache: &LspCache) -> Self {
|
|
||||||
let npm_folder_path = cache.deno_dir().npm_folder_path();
|
|
||||||
Self {
|
|
||||||
global_cache_dir: url_from_directory_path(
|
|
||||||
&canonicalize_path_maybe_not_exists(&npm_folder_path)
|
|
||||||
.unwrap_or(npm_folder_path),
|
|
||||||
)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
ModuleSpecifier::parse("file:///invalid/").unwrap()
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InNpmPackageChecker for LspInNpmPackageChecker {
|
|
||||||
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
|
||||||
if specifier.scheme() != "file" {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if specifier
|
|
||||||
.as_str()
|
|
||||||
.starts_with(self.global_cache_dir.as_str())
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
specifier.as_str().contains("/node_modules/")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let fs = Arc::new(deno_fs::RealFs);
|
|
||||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
|
|
||||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
|
||||||
));
|
|
||||||
|
|
||||||
LspIsCjsResolver {
|
|
||||||
inner: IsCjsResolver::new(
|
|
||||||
Arc::new(LspInNpmPackageChecker::new(cache)),
|
|
||||||
pkg_json_resolver,
|
|
||||||
crate::resolver::IsCjsResolverOptions {
|
|
||||||
detect_cjs: true,
|
|
||||||
is_node_main: false,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_maybe_doc_module_kind(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
maybe_document: Option<&Document>,
|
|
||||||
) -> NodeModuleKind {
|
|
||||||
self.get_lsp_referrer_kind(
|
|
||||||
specifier,
|
|
||||||
maybe_document.and_then(|d| d.is_script()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind {
|
|
||||||
self.get_lsp_referrer_kind(document.specifier(), document.is_script())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_lsp_referrer_kind(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
is_script: Option<bool>,
|
|
||||||
) -> NodeModuleKind {
|
|
||||||
self.inner.get_lsp_referrer_kind(specifier, is_script)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SingleReferrerGraphResolver<'a> {
|
pub struct SingleReferrerGraphResolver<'a> {
|
||||||
pub valid_referrer: &'a ModuleSpecifier,
|
pub valid_referrer: &'a ModuleSpecifier,
|
||||||
pub referrer_kind: NodeModuleKind,
|
pub module_resolution_mode: ResolutionMode,
|
||||||
pub cli_resolver: &'a CliResolver,
|
pub cli_resolver: &'a CliResolver,
|
||||||
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
|
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
|
||||||
}
|
}
|
||||||
|
@ -924,16 +863,20 @@ impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
|
||||||
&self,
|
&self,
|
||||||
specifier_text: &str,
|
specifier_text: &str,
|
||||||
referrer_range: &Range,
|
referrer_range: &Range,
|
||||||
mode: ResolutionMode,
|
resolution_kind: deno_graph::source::ResolutionKind,
|
||||||
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
|
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
|
||||||
// this resolver assumes it will only be used with a single referrer
|
// this resolver assumes it will only be used with a single referrer
|
||||||
// with the provided referrer kind
|
// with the provided referrer kind
|
||||||
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
|
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
|
||||||
self.cli_resolver.resolve(
|
self.cli_resolver.resolve(
|
||||||
specifier_text,
|
specifier_text,
|
||||||
referrer_range,
|
&referrer_range.specifier,
|
||||||
self.referrer_kind,
|
referrer_range.range.start,
|
||||||
mode,
|
referrer_range
|
||||||
|
.resolution_mode
|
||||||
|
.map(to_node_resolution_mode)
|
||||||
|
.unwrap_or(self.module_resolution_mode),
|
||||||
|
to_node_resolution_kind(resolution_kind),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1001,9 +944,7 @@ impl RedirectResolver {
|
||||||
if chain.len() > 10 {
|
if chain.len() > 10 {
|
||||||
break None;
|
break None;
|
||||||
}
|
}
|
||||||
let Ok(target) =
|
let Ok(target) = specifier.join(location) else {
|
||||||
deno_core::resolve_import(location, specifier.as_str())
|
|
||||||
else {
|
|
||||||
break None;
|
break None;
|
||||||
};
|
};
|
||||||
chain.push((
|
chain.push((
|
||||||
|
|
|
@ -67,7 +67,9 @@ pub mod tests {
|
||||||
&self,
|
&self,
|
||||||
nv: &PackageNv,
|
nv: &PackageNv,
|
||||||
) -> Result<Arc<Vec<String>>, AnyError> {
|
) -> Result<Arc<Vec<String>>, AnyError> {
|
||||||
let Some(exports_by_version) = self.package_versions.get(&nv.name) else {
|
let Some(exports_by_version) =
|
||||||
|
self.package_versions.get(nv.name.as_str())
|
||||||
|
else {
|
||||||
return Err(anyhow!("Package not found."));
|
return Err(anyhow!("Package not found."));
|
||||||
};
|
};
|
||||||
let Some(exports) = exports_by_version.get(&nv.version) else {
|
let Some(exports) = exports_by_version.get(&nv.version) else {
|
||||||
|
|
110
cli/lsp/tsc.rs
110
cli/lsp/tsc.rs
|
@ -64,13 +64,14 @@ use deno_core::OpState;
|
||||||
use deno_core::PollEventLoopOptions;
|
use deno_core::PollEventLoopOptions;
|
||||||
use deno_core::RuntimeOptions;
|
use deno_core::RuntimeOptions;
|
||||||
use deno_path_util::url_to_file_path;
|
use deno_path_util::url_to_file_path;
|
||||||
|
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
|
||||||
use deno_runtime::inspector_server::InspectorServer;
|
use deno_runtime::inspector_server::InspectorServer;
|
||||||
use deno_runtime::tokio_util::create_basic_runtime;
|
use deno_runtime::tokio_util::create_basic_runtime;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use lazy_regex::lazy_regex;
|
use lazy_regex::lazy_regex;
|
||||||
use log::error;
|
use log::error;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::ResolutionMode;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Captures;
|
use regex::Captures;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
@ -1297,16 +1298,10 @@ impl TsServer {
|
||||||
{
|
{
|
||||||
// When an LSP request is cancelled by the client, the future this is being
|
// When an LSP request is cancelled by the client, the future this is being
|
||||||
// executed under and any local variables here will be dropped at the next
|
// executed under and any local variables here will be dropped at the next
|
||||||
// await point. To pass on that cancellation to the TS thread, we make this
|
// await point. To pass on that cancellation to the TS thread, we use drop_guard
|
||||||
// wrapper which cancels the request's token on drop.
|
// which cancels the request's token on drop.
|
||||||
struct DroppableToken(CancellationToken);
|
|
||||||
impl Drop for DroppableToken {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.0.cancel();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let token = token.child_token();
|
let token = token.child_token();
|
||||||
let droppable_token = DroppableToken(token.clone());
|
let droppable_token = token.clone().drop_guard();
|
||||||
let (tx, mut rx) = oneshot::channel::<Result<String, AnyError>>();
|
let (tx, mut rx) = oneshot::channel::<Result<String, AnyError>>();
|
||||||
let change = self.pending_change.lock().take();
|
let change = self.pending_change.lock().take();
|
||||||
|
|
||||||
|
@ -1320,7 +1315,7 @@ impl TsServer {
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
value = &mut rx => {
|
value = &mut rx => {
|
||||||
let value = value??;
|
let value = value??;
|
||||||
drop(droppable_token);
|
droppable_token.disarm();
|
||||||
Ok(serde_json::from_str(&value)?)
|
Ok(serde_json::from_str(&value)?)
|
||||||
}
|
}
|
||||||
_ = token.cancelled() => {
|
_ = token.cancelled() => {
|
||||||
|
@ -3417,15 +3412,23 @@ fn parse_code_actions(
|
||||||
additional_text_edits.extend(change.text_changes.iter().map(|tc| {
|
additional_text_edits.extend(change.text_changes.iter().map(|tc| {
|
||||||
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
|
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
|
||||||
if let Some(specifier_rewrite) = &data.specifier_rewrite {
|
if let Some(specifier_rewrite) = &data.specifier_rewrite {
|
||||||
text_edit.new_text = text_edit.new_text.replace(
|
let specifier_index = text_edit
|
||||||
&specifier_rewrite.old_specifier,
|
.new_text
|
||||||
&specifier_rewrite.new_specifier,
|
.char_indices()
|
||||||
);
|
.find_map(|(b, c)| (c == '\'' || c == '"').then_some(b));
|
||||||
|
if let Some(i) = specifier_index {
|
||||||
|
let mut specifier_part = text_edit.new_text.split_off(i);
|
||||||
|
specifier_part = specifier_part.replace(
|
||||||
|
&specifier_rewrite.old_specifier,
|
||||||
|
&specifier_rewrite.new_specifier,
|
||||||
|
);
|
||||||
|
text_edit.new_text.push_str(&specifier_part);
|
||||||
|
}
|
||||||
if let Some(deno_types_specifier) =
|
if let Some(deno_types_specifier) =
|
||||||
&specifier_rewrite.new_deno_types_specifier
|
&specifier_rewrite.new_deno_types_specifier
|
||||||
{
|
{
|
||||||
text_edit.new_text = format!(
|
text_edit.new_text = format!(
|
||||||
"// @deno-types=\"{}\"\n{}",
|
"// @ts-types=\"{}\"\n{}",
|
||||||
deno_types_specifier, &text_edit.new_text
|
deno_types_specifier, &text_edit.new_text
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -3593,17 +3596,22 @@ impl CompletionEntryDetails {
|
||||||
&mut insert_replace_edit.new_text
|
&mut insert_replace_edit.new_text
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
*new_text = new_text.replace(
|
let specifier_index = new_text
|
||||||
&specifier_rewrite.old_specifier,
|
.char_indices()
|
||||||
&specifier_rewrite.new_specifier,
|
.find_map(|(b, c)| (c == '\'' || c == '"').then_some(b));
|
||||||
);
|
if let Some(i) = specifier_index {
|
||||||
|
let mut specifier_part = new_text.split_off(i);
|
||||||
|
specifier_part = specifier_part.replace(
|
||||||
|
&specifier_rewrite.old_specifier,
|
||||||
|
&specifier_rewrite.new_specifier,
|
||||||
|
);
|
||||||
|
new_text.push_str(&specifier_part);
|
||||||
|
}
|
||||||
if let Some(deno_types_specifier) =
|
if let Some(deno_types_specifier) =
|
||||||
&specifier_rewrite.new_deno_types_specifier
|
&specifier_rewrite.new_deno_types_specifier
|
||||||
{
|
{
|
||||||
*new_text = format!(
|
*new_text =
|
||||||
"// @deno-types=\"{}\"\n{}",
|
format!("// @ts-types=\"{}\"\n{}", deno_types_specifier, new_text);
|
||||||
deno_types_specifier, new_text
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3737,7 +3745,7 @@ pub struct CompletionItemData {
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
struct CompletionEntryDataAutoImport {
|
struct CompletionEntryDataAutoImport {
|
||||||
module_specifier: String,
|
module_specifier: String,
|
||||||
file_name: String,
|
file_name: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -3794,9 +3802,20 @@ impl CompletionEntry {
|
||||||
else {
|
else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
|
if let Some(file_name) = &raw.file_name {
|
||||||
self.auto_import_data =
|
if let Ok(normalized) = specifier_map.normalize(file_name) {
|
||||||
Some(CompletionNormalizedAutoImportData { raw, normalized });
|
self.auto_import_data =
|
||||||
|
Some(CompletionNormalizedAutoImportData { raw, normalized });
|
||||||
|
}
|
||||||
|
} else if SUPPORTED_BUILTIN_NODE_MODULES
|
||||||
|
.contains(&raw.module_specifier.as_str())
|
||||||
|
{
|
||||||
|
if let Ok(normalized) =
|
||||||
|
resolve_url(&format!("node:{}", &raw.module_specifier))
|
||||||
|
{
|
||||||
|
self.auto_import_data =
|
||||||
|
Some(CompletionNormalizedAutoImportData { raw, normalized });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4449,9 +4468,9 @@ fn op_load<'s>(
|
||||||
version: state.script_version(&specifier),
|
version: state.script_version(&specifier),
|
||||||
is_cjs: doc
|
is_cjs: doc
|
||||||
.document()
|
.document()
|
||||||
.map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d))
|
.map(|d| d.resolution_mode())
|
||||||
.unwrap_or(NodeModuleKind::Esm)
|
.unwrap_or(ResolutionMode::Import)
|
||||||
== NodeModuleKind::Cjs,
|
== ResolutionMode::Require,
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
|
let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
|
||||||
|
@ -4479,17 +4498,9 @@ fn op_release(
|
||||||
fn op_resolve(
|
fn op_resolve(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] base: String,
|
#[string] base: String,
|
||||||
is_base_cjs: bool,
|
#[serde] specifiers: Vec<(bool, String)>,
|
||||||
#[serde] specifiers: Vec<String>,
|
|
||||||
) -> Result<Vec<Option<(String, String)>>, AnyError> {
|
) -> Result<Vec<Option<(String, String)>>, AnyError> {
|
||||||
op_resolve_inner(
|
op_resolve_inner(state, ResolveArgs { base, specifiers })
|
||||||
state,
|
|
||||||
ResolveArgs {
|
|
||||||
base,
|
|
||||||
is_base_cjs,
|
|
||||||
specifiers,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TscRequestArray {
|
struct TscRequestArray {
|
||||||
|
@ -4512,6 +4523,7 @@ impl<'a> ToV8<'a> for TscRequestArray {
|
||||||
|
|
||||||
let method_name = deno_core::FastString::from_static(method_name)
|
let method_name = deno_core::FastString::from_static(method_name)
|
||||||
.v8_string(scope)
|
.v8_string(scope)
|
||||||
|
.unwrap()
|
||||||
.into();
|
.into();
|
||||||
let args = args.unwrap_or_else(|| v8::Array::new(scope, 0).into());
|
let args = args.unwrap_or_else(|| v8::Array::new(scope, 0).into());
|
||||||
let scope_url = serde_v8::to_v8(scope, self.scope)
|
let scope_url = serde_v8::to_v8(scope, self.scope)
|
||||||
|
@ -4692,10 +4704,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
|
||||||
let (types, _) = documents.resolve_dependency(
|
let (types, _) = documents.resolve_dependency(
|
||||||
types,
|
types,
|
||||||
specifier,
|
specifier,
|
||||||
state
|
doc.resolution_mode(),
|
||||||
.state_snapshot
|
|
||||||
.is_cjs_resolver
|
|
||||||
.get_doc_module_kind(doc),
|
|
||||||
doc.file_referrer(),
|
doc.file_referrer(),
|
||||||
)?;
|
)?;
|
||||||
let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
|
let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
|
||||||
|
@ -5534,7 +5543,6 @@ impl TscRequest {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::cache::HttpCache;
|
use crate::cache::HttpCache;
|
||||||
use crate::http_util::HeadersMap;
|
|
||||||
use crate::lsp::cache::LspCache;
|
use crate::lsp::cache::LspCache;
|
||||||
use crate::lsp::config::Config;
|
use crate::lsp::config::Config;
|
||||||
use crate::lsp::config::WorkspaceSettings;
|
use crate::lsp::config::WorkspaceSettings;
|
||||||
|
@ -5579,7 +5587,6 @@ mod tests {
|
||||||
documents: Arc::new(documents),
|
documents: Arc::new(documents),
|
||||||
assets: Default::default(),
|
assets: Default::default(),
|
||||||
config: Arc::new(config),
|
config: Arc::new(config),
|
||||||
is_cjs_resolver: Default::default(),
|
|
||||||
resolver,
|
resolver,
|
||||||
});
|
});
|
||||||
let performance = Arc::new(Performance::default());
|
let performance = Arc::new(Performance::default());
|
||||||
|
@ -5765,6 +5772,7 @@ mod tests {
|
||||||
"sourceLine": " import { A } from \".\";",
|
"sourceLine": " import { A } from \".\";",
|
||||||
"category": 2,
|
"category": 2,
|
||||||
"code": 6133,
|
"code": 6133,
|
||||||
|
"reportsUnnecessary": true,
|
||||||
}]
|
}]
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
@ -5847,6 +5855,7 @@ mod tests {
|
||||||
"sourceLine": " import {",
|
"sourceLine": " import {",
|
||||||
"category": 2,
|
"category": 2,
|
||||||
"code": 6192,
|
"code": 6192,
|
||||||
|
"reportsUnnecessary": true,
|
||||||
}, {
|
}, {
|
||||||
"start": {
|
"start": {
|
||||||
"line": 8,
|
"line": 8,
|
||||||
|
@ -5970,7 +5979,7 @@ mod tests {
|
||||||
.global()
|
.global()
|
||||||
.set(
|
.set(
|
||||||
&specifier_dep,
|
&specifier_dep,
|
||||||
HeadersMap::default(),
|
Default::default(),
|
||||||
b"export const b = \"b\";\n",
|
b"export const b = \"b\";\n",
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -6009,7 +6018,7 @@ mod tests {
|
||||||
.global()
|
.global()
|
||||||
.set(
|
.set(
|
||||||
&specifier_dep,
|
&specifier_dep,
|
||||||
HeadersMap::default(),
|
Default::default(),
|
||||||
b"export const b = \"b\";\n\nexport const a = \"b\";\n",
|
b"export const b = \"b\";\n\nexport const a = \"b\";\n",
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -6430,8 +6439,7 @@ mod tests {
|
||||||
&mut state,
|
&mut state,
|
||||||
ResolveArgs {
|
ResolveArgs {
|
||||||
base: temp_dir.url().join("a.ts").unwrap().to_string(),
|
base: temp_dir.url().join("a.ts").unwrap().to_string(),
|
||||||
is_base_cjs: false,
|
specifiers: vec![(false, "./b.ts".to_string())],
|
||||||
specifiers: vec!["./b.ts".to_string()],
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
11
cli/main.rs
11
cli/main.rs
|
@ -1,7 +1,6 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
mod args;
|
mod args;
|
||||||
mod auth_tokens;
|
|
||||||
mod cache;
|
mod cache;
|
||||||
mod cdp;
|
mod cdp;
|
||||||
mod emit;
|
mod emit;
|
||||||
|
@ -437,20 +436,18 @@ fn resolve_flags_and_init(
|
||||||
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
|
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
|
||||||
{
|
{
|
||||||
// Ignore results to avoid BrokenPipe errors.
|
// Ignore results to avoid BrokenPipe errors.
|
||||||
util::logger::init(None);
|
util::logger::init(None, None);
|
||||||
let _ = err.print();
|
let _ = err.print();
|
||||||
deno_runtime::exit(0);
|
deno_runtime::exit(0);
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
util::logger::init(None);
|
util::logger::init(None, None);
|
||||||
exit_for_error(AnyError::from(err))
|
exit_for_error(AnyError::from(err))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(otel_config) = flags.otel_config() {
|
deno_telemetry::init(crate::args::otel_runtime_config())?;
|
||||||
deno_runtime::ops::otel::init(otel_config)?;
|
util::logger::init(flags.log_level, Some(flags.otel_config()));
|
||||||
}
|
|
||||||
util::logger::init(flags.log_level);
|
|
||||||
|
|
||||||
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
|
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
|
||||||
if flags.unstable_config.legacy_flag_enabled {
|
if flags.unstable_config.legacy_flag_enabled {
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
mod standalone;
|
mod standalone;
|
||||||
|
|
||||||
mod args;
|
mod args;
|
||||||
mod auth_tokens;
|
|
||||||
mod cache;
|
mod cache;
|
||||||
mod emit;
|
mod emit;
|
||||||
mod errors;
|
mod errors;
|
||||||
|
@ -87,17 +86,18 @@ fn main() {
|
||||||
let future = async move {
|
let future = async move {
|
||||||
match standalone {
|
match standalone {
|
||||||
Ok(Some(data)) => {
|
Ok(Some(data)) => {
|
||||||
if let Some(otel_config) = data.metadata.otel_config.clone() {
|
deno_telemetry::init(crate::args::otel_runtime_config())?;
|
||||||
deno_runtime::ops::otel::init(otel_config)?;
|
util::logger::init(
|
||||||
}
|
data.metadata.log_level,
|
||||||
util::logger::init(data.metadata.log_level);
|
Some(data.metadata.otel_config.clone()),
|
||||||
|
);
|
||||||
load_env_vars(&data.metadata.env_vars_from_env_file);
|
load_env_vars(&data.metadata.env_vars_from_env_file);
|
||||||
let exit_code = standalone::run(data).await?;
|
let exit_code = standalone::run(data).await?;
|
||||||
deno_runtime::exit(exit_code);
|
deno_runtime::exit(exit_code);
|
||||||
}
|
}
|
||||||
Ok(None) => Ok(()),
|
Ok(None) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
util::logger::init(None);
|
util::logger::init(None, None);
|
||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,8 @@ use std::path::PathBuf;
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
use std::sync::atomic::AtomicU16;
|
||||||
|
use std::sync::atomic::Ordering;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::args::jsr_url;
|
use crate::args::jsr_url;
|
||||||
|
@ -49,6 +51,7 @@ use deno_core::error::generic_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::futures::future::FutureExt;
|
use deno_core::futures::future::FutureExt;
|
||||||
use deno_core::futures::Future;
|
use deno_core::futures::Future;
|
||||||
|
use deno_core::parking_lot::Mutex;
|
||||||
use deno_core::resolve_url;
|
use deno_core::resolve_url;
|
||||||
use deno_core::ModuleCodeString;
|
use deno_core::ModuleCodeString;
|
||||||
use deno_core::ModuleLoader;
|
use deno_core::ModuleLoader;
|
||||||
|
@ -57,9 +60,7 @@ use deno_core::ModuleSourceCode;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
use deno_core::ModuleType;
|
use deno_core::ModuleType;
|
||||||
use deno_core::RequestedModuleType;
|
use deno_core::RequestedModuleType;
|
||||||
use deno_core::ResolutionKind;
|
|
||||||
use deno_core::SourceCodeCacheInfo;
|
use deno_core::SourceCodeCacheInfo;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_graph::GraphKind;
|
use deno_graph::GraphKind;
|
||||||
use deno_graph::JsModule;
|
use deno_graph::JsModule;
|
||||||
use deno_graph::JsonModule;
|
use deno_graph::JsonModule;
|
||||||
|
@ -76,7 +77,8 @@ use deno_runtime::deno_permissions::PermissionsContainer;
|
||||||
use deno_semver::npm::NpmPackageReqReference;
|
use deno_semver::npm::NpmPackageReqReference;
|
||||||
use node_resolver::errors::ClosestPkgJsonError;
|
use node_resolver::errors::ClosestPkgJsonError;
|
||||||
use node_resolver::InNpmPackageChecker;
|
use node_resolver::InNpmPackageChecker;
|
||||||
use node_resolver::NodeResolutionMode;
|
use node_resolver::NodeResolutionKind;
|
||||||
|
use node_resolver::ResolutionMode;
|
||||||
|
|
||||||
pub struct ModuleLoadPreparer {
|
pub struct ModuleLoadPreparer {
|
||||||
options: Arc<CliOptions>,
|
options: Arc<CliOptions>,
|
||||||
|
@ -154,6 +156,7 @@ impl ModuleLoadPreparer {
|
||||||
graph_kind: graph.graph_kind(),
|
graph_kind: graph.graph_kind(),
|
||||||
roots: roots.to_vec(),
|
roots: roots.to_vec(),
|
||||||
loader: Some(&mut cache),
|
loader: Some(&mut cache),
|
||||||
|
npm_caching: self.options.default_npm_caching_strategy(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -223,6 +226,42 @@ struct SharedCliModuleLoaderState {
|
||||||
npm_module_loader: NpmModuleLoader,
|
npm_module_loader: NpmModuleLoader,
|
||||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||||
resolver: Arc<CliResolver>,
|
resolver: Arc<CliResolver>,
|
||||||
|
in_flight_loads_tracker: InFlightModuleLoadsTracker,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct InFlightModuleLoadsTracker {
|
||||||
|
loads_number: Arc<AtomicU16>,
|
||||||
|
cleanup_task_timeout: u64,
|
||||||
|
cleanup_task_handle: Arc<Mutex<Option<tokio::task::JoinHandle<()>>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InFlightModuleLoadsTracker {
|
||||||
|
pub fn increase(&self) {
|
||||||
|
self.loads_number.fetch_add(1, Ordering::Relaxed);
|
||||||
|
if let Some(task) = self.cleanup_task_handle.lock().take() {
|
||||||
|
task.abort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decrease(&self, parsed_source_cache: &Arc<ParsedSourceCache>) {
|
||||||
|
let prev = self.loads_number.fetch_sub(1, Ordering::Relaxed);
|
||||||
|
if prev == 1 {
|
||||||
|
let parsed_source_cache = parsed_source_cache.clone();
|
||||||
|
let timeout = self.cleanup_task_timeout;
|
||||||
|
let task_handle = tokio::spawn(async move {
|
||||||
|
// We use a timeout here, which is defined to 10s,
|
||||||
|
// so that in situations when dynamic imports are loaded after the startup,
|
||||||
|
// we don't need to recompute and analyze multiple modules.
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(timeout)).await;
|
||||||
|
parsed_source_cache.free_all();
|
||||||
|
});
|
||||||
|
let maybe_prev_task =
|
||||||
|
self.cleanup_task_handle.lock().replace(task_handle);
|
||||||
|
if let Some(prev_task) = maybe_prev_task {
|
||||||
|
prev_task.abort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CliModuleLoaderFactory {
|
pub struct CliModuleLoaderFactory {
|
||||||
|
@ -273,6 +312,11 @@ impl CliModuleLoaderFactory {
|
||||||
npm_module_loader,
|
npm_module_loader,
|
||||||
parsed_source_cache,
|
parsed_source_cache,
|
||||||
resolver,
|
resolver,
|
||||||
|
in_flight_loads_tracker: InFlightModuleLoadsTracker {
|
||||||
|
loads_number: Arc::new(AtomicU16::new(0)),
|
||||||
|
cleanup_task_timeout: 10_000,
|
||||||
|
cleanup_task_handle: Arc::new(Mutex::new(None)),
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -498,13 +542,11 @@ impl<TGraphContainer: ModuleGraphContainer>
|
||||||
}
|
}
|
||||||
Resolution::None => Cow::Owned(self.shared.resolver.resolve(
|
Resolution::None => Cow::Owned(self.shared.resolver.resolve(
|
||||||
raw_specifier,
|
raw_specifier,
|
||||||
&deno_graph::Range {
|
referrer,
|
||||||
specifier: referrer.clone(),
|
deno_graph::Position::zeroed(),
|
||||||
start: deno_graph::Position::zeroed(),
|
// if we're here, that means it's resolving a dynamic import
|
||||||
end: deno_graph::Position::zeroed(),
|
ResolutionMode::Import,
|
||||||
},
|
NodeResolutionKind::Execution,
|
||||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
|
||||||
ResolutionMode::Execution,
|
|
||||||
)?),
|
)?),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -517,8 +559,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
||||||
.resolve_req_reference(
|
.resolve_req_reference(
|
||||||
&reference,
|
&reference,
|
||||||
referrer,
|
referrer,
|
||||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
ResolutionMode::Import,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)
|
)
|
||||||
.map_err(AnyError::from);
|
.map_err(AnyError::from);
|
||||||
}
|
}
|
||||||
|
@ -539,8 +581,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
||||||
&package_folder,
|
&package_folder,
|
||||||
module.nv_reference.sub_path(),
|
module.nv_reference.sub_path(),
|
||||||
Some(referrer),
|
Some(referrer),
|
||||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
ResolutionMode::Import,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)
|
)
|
||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
format!("Could not resolve '{}'.", module.nv_reference)
|
format!("Could not resolve '{}'.", module.nv_reference)
|
||||||
|
@ -806,7 +848,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
||||||
&self,
|
&self,
|
||||||
specifier: &str,
|
specifier: &str,
|
||||||
referrer: &str,
|
referrer: &str,
|
||||||
_kind: ResolutionKind,
|
_kind: deno_core::ResolutionKind,
|
||||||
) -> Result<ModuleSpecifier, AnyError> {
|
) -> Result<ModuleSpecifier, AnyError> {
|
||||||
fn ensure_not_jsr_non_jsr_remote_import(
|
fn ensure_not_jsr_non_jsr_remote_import(
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
|
@ -870,6 +912,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
||||||
_maybe_referrer: Option<String>,
|
_maybe_referrer: Option<String>,
|
||||||
is_dynamic: bool,
|
is_dynamic: bool,
|
||||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||||
|
self.0.shared.in_flight_loads_tracker.increase();
|
||||||
if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
|
if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
|
||||||
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
||||||
}
|
}
|
||||||
|
@ -924,6 +967,14 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
||||||
.boxed_local()
|
.boxed_local()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn finish_load(&self) {
|
||||||
|
self
|
||||||
|
.0
|
||||||
|
.shared
|
||||||
|
.in_flight_loads_tracker
|
||||||
|
.decrease(&self.0.shared.parsed_source_cache);
|
||||||
|
}
|
||||||
|
|
||||||
fn code_cache_ready(
|
fn code_cache_ready(
|
||||||
&self,
|
&self,
|
||||||
specifier: ModuleSpecifier,
|
specifier: ModuleSpecifier,
|
||||||
|
@ -945,7 +996,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
||||||
std::future::ready(()).boxed_local()
|
std::future::ready(()).boxed_local()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
|
fn get_source_map(&self, file_name: &str) -> Option<Cow<[u8]>> {
|
||||||
let specifier = resolve_url(file_name).ok()?;
|
let specifier = resolve_url(file_name).ok()?;
|
||||||
match specifier.scheme() {
|
match specifier.scheme() {
|
||||||
// we should only be looking for emits for schemes that denote external
|
// we should only be looking for emits for schemes that denote external
|
||||||
|
@ -957,7 +1008,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
||||||
.0
|
.0
|
||||||
.load_prepared_module_for_source_map_sync(&specifier)
|
.load_prepared_module_for_source_map_sync(&specifier)
|
||||||
.ok()??;
|
.ok()??;
|
||||||
source_map_from_code(source.code.as_bytes())
|
source_map_from_code(source.code.as_bytes()).map(Cow::Owned)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_source_mapped_source_line(
|
fn get_source_mapped_source_line(
|
||||||
|
@ -1063,7 +1114,10 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
||||||
self.npm_resolver.ensure_read_permission(permissions, path)
|
self.npm_resolver.ensure_read_permission(permissions, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError> {
|
fn load_text_file_lossy(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
) -> Result<Cow<'static, str>, AnyError> {
|
||||||
// todo(dsherret): use the preloaded module from the graph if available?
|
// todo(dsherret): use the preloaded module from the graph if available?
|
||||||
let media_type = MediaType::from_path(path);
|
let media_type = MediaType::from_path(path);
|
||||||
let text = self.fs.read_text_file_lossy_sync(path, None)?;
|
let text = self.fs.read_text_file_lossy_sync(path, None)?;
|
||||||
|
@ -1078,15 +1132,18 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.emitter.emit_parsed_source_sync(
|
self
|
||||||
&specifier,
|
.emitter
|
||||||
media_type,
|
.emit_parsed_source_sync(
|
||||||
// this is probably not super accurate due to require esm, but probably ok.
|
&specifier,
|
||||||
// If we find this causes a lot of churn in the emit cache then we should
|
media_type,
|
||||||
// investigate how we can make this better
|
// this is probably not super accurate due to require esm, but probably ok.
|
||||||
ModuleKind::Cjs,
|
// If we find this causes a lot of churn in the emit cache then we should
|
||||||
&text.into(),
|
// investigate how we can make this better
|
||||||
)
|
ModuleKind::Cjs,
|
||||||
|
&text.into(),
|
||||||
|
)
|
||||||
|
.map(Cow::Owned)
|
||||||
} else {
|
} else {
|
||||||
Ok(text)
|
Ok(text)
|
||||||
}
|
}
|
||||||
|
@ -1100,3 +1157,44 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
||||||
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
|
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use deno_graph::ParsedSourceStore;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_inflight_module_loads_tracker() {
|
||||||
|
let tracker = InFlightModuleLoadsTracker {
|
||||||
|
loads_number: Default::default(),
|
||||||
|
cleanup_task_timeout: 10,
|
||||||
|
cleanup_task_handle: Default::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let specifier = ModuleSpecifier::parse("file:///a.js").unwrap();
|
||||||
|
let source = "const a = 'hello';";
|
||||||
|
let parsed_source_cache = Arc::new(ParsedSourceCache::default());
|
||||||
|
let parsed_source = parsed_source_cache
|
||||||
|
.remove_or_parse_module(&specifier, source.into(), MediaType::JavaScript)
|
||||||
|
.unwrap();
|
||||||
|
parsed_source_cache.set_parsed_source(specifier, parsed_source);
|
||||||
|
|
||||||
|
assert_eq!(parsed_source_cache.len(), 1);
|
||||||
|
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||||
|
tracker.increase();
|
||||||
|
tracker.increase();
|
||||||
|
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||||
|
tracker.decrease(&parsed_source_cache);
|
||||||
|
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||||
|
tracker.decrease(&parsed_source_cache);
|
||||||
|
assert!(tracker.cleanup_task_handle.lock().is_some());
|
||||||
|
assert_eq!(parsed_source_cache.len(), 1);
|
||||||
|
tracker.increase();
|
||||||
|
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||||
|
assert_eq!(parsed_source_cache.len(), 1);
|
||||||
|
tracker.decrease(&parsed_source_cache);
|
||||||
|
// Rather long timeout, but to make sure CI is not flaking on it.
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
|
||||||
|
assert_eq!(parsed_source_cache.len(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -160,7 +160,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
|
||||||
if let Ok(source_from_file) =
|
if let Ok(source_from_file) =
|
||||||
self.fs.read_text_file_lossy_async(path, None).await
|
self.fs.read_text_file_lossy_async(path, None).await
|
||||||
{
|
{
|
||||||
Cow::Owned(source_from_file)
|
source_from_file
|
||||||
} else {
|
} else {
|
||||||
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
|
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
|
||||||
exports: vec![],
|
exports: vec![],
|
||||||
|
|
|
@ -5,8 +5,6 @@ use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use cache::RegistryInfoDownloader;
|
|
||||||
use cache::TarballCache;
|
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_cache_dir::npm::NpmCacheDir;
|
use deno_cache_dir::npm::NpmCacheDir;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
|
@ -22,6 +20,7 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||||
use deno_npm::NpmPackageId;
|
use deno_npm::NpmPackageId;
|
||||||
use deno_npm::NpmResolutionPackage;
|
use deno_npm::NpmResolutionPackage;
|
||||||
use deno_npm::NpmSystemInfo;
|
use deno_npm::NpmSystemInfo;
|
||||||
|
use deno_npm_cache::NpmCacheSetting;
|
||||||
use deno_resolver::npm::CliNpmReqResolver;
|
use deno_resolver::npm::CliNpmReqResolver;
|
||||||
use deno_runtime::colors;
|
use deno_runtime::colors;
|
||||||
use deno_runtime::deno_fs::FileSystem;
|
use deno_runtime::deno_fs::FileSystem;
|
||||||
|
@ -42,23 +41,22 @@ use crate::args::NpmProcessState;
|
||||||
use crate::args::NpmProcessStateKind;
|
use crate::args::NpmProcessStateKind;
|
||||||
use crate::args::PackageJsonDepValueParseWithLocationError;
|
use crate::args::PackageJsonDepValueParseWithLocationError;
|
||||||
use crate::cache::FastInsecureHasher;
|
use crate::cache::FastInsecureHasher;
|
||||||
use crate::http_util::HttpClientProvider;
|
|
||||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
use crate::util::sync::AtomicFlag;
|
use crate::util::sync::AtomicFlag;
|
||||||
|
|
||||||
use self::cache::NpmCache;
|
|
||||||
use self::registry::CliNpmRegistryApi;
|
|
||||||
use self::resolution::NpmResolution;
|
use self::resolution::NpmResolution;
|
||||||
use self::resolvers::create_npm_fs_resolver;
|
use self::resolvers::create_npm_fs_resolver;
|
||||||
use self::resolvers::NpmPackageFsResolver;
|
use self::resolvers::NpmPackageFsResolver;
|
||||||
|
|
||||||
|
use super::CliNpmCache;
|
||||||
|
use super::CliNpmCacheEnv;
|
||||||
|
use super::CliNpmRegistryInfoProvider;
|
||||||
use super::CliNpmResolver;
|
use super::CliNpmResolver;
|
||||||
|
use super::CliNpmTarballCache;
|
||||||
use super::InnerCliNpmResolverRef;
|
use super::InnerCliNpmResolverRef;
|
||||||
use super::ResolvePkgFolderFromDenoReqError;
|
use super::ResolvePkgFolderFromDenoReqError;
|
||||||
|
|
||||||
pub mod cache;
|
|
||||||
mod registry;
|
|
||||||
mod resolution;
|
mod resolution;
|
||||||
mod resolvers;
|
mod resolvers;
|
||||||
|
|
||||||
|
@ -73,7 +71,7 @@ pub struct CliManagedNpmResolverCreateOptions {
|
||||||
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||||
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
|
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
|
||||||
pub npm_cache_dir: Arc<NpmCacheDir>,
|
pub npm_cache_dir: Arc<NpmCacheDir>,
|
||||||
pub cache_setting: crate::args::CacheSetting,
|
pub cache_setting: deno_cache_dir::file_fetcher::CacheSetting,
|
||||||
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||||
pub maybe_node_modules_path: Option<PathBuf>,
|
pub maybe_node_modules_path: Option<PathBuf>,
|
||||||
pub npm_system_info: NpmSystemInfo,
|
pub npm_system_info: NpmSystemInfo,
|
||||||
|
@ -85,8 +83,9 @@ pub struct CliManagedNpmResolverCreateOptions {
|
||||||
pub async fn create_managed_npm_resolver_for_lsp(
|
pub async fn create_managed_npm_resolver_for_lsp(
|
||||||
options: CliManagedNpmResolverCreateOptions,
|
options: CliManagedNpmResolverCreateOptions,
|
||||||
) -> Arc<dyn CliNpmResolver> {
|
) -> Arc<dyn CliNpmResolver> {
|
||||||
let npm_cache = create_cache(&options);
|
let cache_env = create_cache_env(&options);
|
||||||
let npm_api = create_api(&options, npm_cache.clone());
|
let npm_cache = create_cache(cache_env.clone(), &options);
|
||||||
|
let npm_api = create_api(npm_cache.clone(), cache_env.clone(), &options);
|
||||||
// spawn due to the lsp's `Send` requirement
|
// spawn due to the lsp's `Send` requirement
|
||||||
deno_core::unsync::spawn(async move {
|
deno_core::unsync::spawn(async move {
|
||||||
let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await {
|
let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await {
|
||||||
|
@ -97,8 +96,8 @@ pub async fn create_managed_npm_resolver_for_lsp(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
create_inner(
|
create_inner(
|
||||||
|
cache_env,
|
||||||
options.fs,
|
options.fs,
|
||||||
options.http_client_provider,
|
|
||||||
options.maybe_lockfile,
|
options.maybe_lockfile,
|
||||||
npm_api,
|
npm_api,
|
||||||
npm_cache,
|
npm_cache,
|
||||||
|
@ -118,14 +117,15 @@ pub async fn create_managed_npm_resolver_for_lsp(
|
||||||
pub async fn create_managed_npm_resolver(
|
pub async fn create_managed_npm_resolver(
|
||||||
options: CliManagedNpmResolverCreateOptions,
|
options: CliManagedNpmResolverCreateOptions,
|
||||||
) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
|
) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
|
||||||
let npm_cache = create_cache(&options);
|
let npm_cache_env = create_cache_env(&options);
|
||||||
let npm_api = create_api(&options, npm_cache.clone());
|
let npm_cache = create_cache(npm_cache_env.clone(), &options);
|
||||||
let snapshot = resolve_snapshot(&npm_api, options.snapshot).await?;
|
let api = create_api(npm_cache.clone(), npm_cache_env.clone(), &options);
|
||||||
|
let snapshot = resolve_snapshot(&api, options.snapshot).await?;
|
||||||
Ok(create_inner(
|
Ok(create_inner(
|
||||||
|
npm_cache_env,
|
||||||
options.fs,
|
options.fs,
|
||||||
options.http_client_provider,
|
|
||||||
options.maybe_lockfile,
|
options.maybe_lockfile,
|
||||||
npm_api,
|
api,
|
||||||
npm_cache,
|
npm_cache,
|
||||||
options.npmrc,
|
options.npmrc,
|
||||||
options.npm_install_deps_provider,
|
options.npm_install_deps_provider,
|
||||||
|
@ -139,11 +139,11 @@ pub async fn create_managed_npm_resolver(
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn create_inner(
|
fn create_inner(
|
||||||
|
env: Arc<CliNpmCacheEnv>,
|
||||||
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||||
http_client_provider: Arc<HttpClientProvider>,
|
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
npm_api: Arc<CliNpmRegistryApi>,
|
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
|
||||||
npm_cache: Arc<NpmCache>,
|
npm_cache: Arc<CliNpmCache>,
|
||||||
npm_rc: Arc<ResolvedNpmRc>,
|
npm_rc: Arc<ResolvedNpmRc>,
|
||||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||||
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||||
|
@ -153,16 +153,14 @@ fn create_inner(
|
||||||
lifecycle_scripts: LifecycleScriptsConfig,
|
lifecycle_scripts: LifecycleScriptsConfig,
|
||||||
) -> Arc<dyn CliNpmResolver> {
|
) -> Arc<dyn CliNpmResolver> {
|
||||||
let resolution = Arc::new(NpmResolution::from_serialized(
|
let resolution = Arc::new(NpmResolution::from_serialized(
|
||||||
npm_api.clone(),
|
registry_info_provider.clone(),
|
||||||
snapshot,
|
snapshot,
|
||||||
maybe_lockfile.clone(),
|
maybe_lockfile.clone(),
|
||||||
));
|
));
|
||||||
let tarball_cache = Arc::new(TarballCache::new(
|
let tarball_cache = Arc::new(CliNpmTarballCache::new(
|
||||||
npm_cache.clone(),
|
npm_cache.clone(),
|
||||||
fs.clone(),
|
env,
|
||||||
http_client_provider.clone(),
|
|
||||||
npm_rc.clone(),
|
npm_rc.clone(),
|
||||||
text_only_progress_bar.clone(),
|
|
||||||
));
|
));
|
||||||
let fs_resolver = create_npm_fs_resolver(
|
let fs_resolver = create_npm_fs_resolver(
|
||||||
fs.clone(),
|
fs.clone(),
|
||||||
|
@ -179,7 +177,7 @@ fn create_inner(
|
||||||
fs,
|
fs,
|
||||||
fs_resolver,
|
fs_resolver,
|
||||||
maybe_lockfile,
|
maybe_lockfile,
|
||||||
npm_api,
|
registry_info_provider,
|
||||||
npm_cache,
|
npm_cache,
|
||||||
npm_install_deps_provider,
|
npm_install_deps_provider,
|
||||||
resolution,
|
resolution,
|
||||||
|
@ -190,41 +188,55 @@ fn create_inner(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc<NpmCache> {
|
fn create_cache_env(
|
||||||
Arc::new(NpmCache::new(
|
options: &CliManagedNpmResolverCreateOptions,
|
||||||
|
) -> Arc<CliNpmCacheEnv> {
|
||||||
|
Arc::new(CliNpmCacheEnv::new(
|
||||||
|
options.fs.clone(),
|
||||||
|
options.http_client_provider.clone(),
|
||||||
|
options.text_only_progress_bar.clone(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_cache(
|
||||||
|
env: Arc<CliNpmCacheEnv>,
|
||||||
|
options: &CliManagedNpmResolverCreateOptions,
|
||||||
|
) -> Arc<CliNpmCache> {
|
||||||
|
Arc::new(CliNpmCache::new(
|
||||||
options.npm_cache_dir.clone(),
|
options.npm_cache_dir.clone(),
|
||||||
options.cache_setting.clone(),
|
NpmCacheSetting::from_cache_setting(&options.cache_setting),
|
||||||
|
env,
|
||||||
options.npmrc.clone(),
|
options.npmrc.clone(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_api(
|
fn create_api(
|
||||||
|
cache: Arc<CliNpmCache>,
|
||||||
|
env: Arc<CliNpmCacheEnv>,
|
||||||
options: &CliManagedNpmResolverCreateOptions,
|
options: &CliManagedNpmResolverCreateOptions,
|
||||||
npm_cache: Arc<NpmCache>,
|
) -> Arc<CliNpmRegistryInfoProvider> {
|
||||||
) -> Arc<CliNpmRegistryApi> {
|
Arc::new(CliNpmRegistryInfoProvider::new(
|
||||||
Arc::new(CliNpmRegistryApi::new(
|
cache,
|
||||||
npm_cache.clone(),
|
env,
|
||||||
Arc::new(RegistryInfoDownloader::new(
|
options.npmrc.clone(),
|
||||||
npm_cache,
|
|
||||||
options.http_client_provider.clone(),
|
|
||||||
options.npmrc.clone(),
|
|
||||||
options.text_only_progress_bar.clone(),
|
|
||||||
)),
|
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn resolve_snapshot(
|
async fn resolve_snapshot(
|
||||||
api: &CliNpmRegistryApi,
|
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
|
||||||
snapshot: CliNpmResolverManagedSnapshotOption,
|
snapshot: CliNpmResolverManagedSnapshotOption,
|
||||||
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, AnyError> {
|
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, AnyError> {
|
||||||
match snapshot {
|
match snapshot {
|
||||||
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
|
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
|
||||||
if !lockfile.overwrite() {
|
if !lockfile.overwrite() {
|
||||||
let snapshot = snapshot_from_lockfile(lockfile.clone(), api)
|
let snapshot = snapshot_from_lockfile(
|
||||||
.await
|
lockfile.clone(),
|
||||||
.with_context(|| {
|
®istry_info_provider.as_npm_registry_api(),
|
||||||
format!("failed reading lockfile '{}'", lockfile.filename.display())
|
)
|
||||||
})?;
|
.await
|
||||||
|
.with_context(|| {
|
||||||
|
format!("failed reading lockfile '{}'", lockfile.filename.display())
|
||||||
|
})?;
|
||||||
Ok(Some(snapshot))
|
Ok(Some(snapshot))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
|
@ -285,17 +297,23 @@ pub fn create_managed_in_npm_pkg_checker(
|
||||||
Arc::new(ManagedInNpmPackageChecker { root_dir })
|
Arc::new(ManagedInNpmPackageChecker { root_dir })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum PackageCaching<'a> {
|
||||||
|
Only(Cow<'a, [PackageReq]>),
|
||||||
|
All,
|
||||||
|
}
|
||||||
|
|
||||||
/// An npm resolver where the resolution is managed by Deno rather than
|
/// An npm resolver where the resolution is managed by Deno rather than
|
||||||
/// the user bringing their own node_modules (BYONM) on the file system.
|
/// the user bringing their own node_modules (BYONM) on the file system.
|
||||||
pub struct ManagedCliNpmResolver {
|
pub struct ManagedCliNpmResolver {
|
||||||
fs: Arc<dyn FileSystem>,
|
fs: Arc<dyn FileSystem>,
|
||||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
npm_api: Arc<CliNpmRegistryApi>,
|
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
|
||||||
npm_cache: Arc<NpmCache>,
|
npm_cache: Arc<CliNpmCache>,
|
||||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
text_only_progress_bar: ProgressBar,
|
text_only_progress_bar: ProgressBar,
|
||||||
npm_system_info: NpmSystemInfo,
|
npm_system_info: NpmSystemInfo,
|
||||||
top_level_install_flag: AtomicFlag,
|
top_level_install_flag: AtomicFlag,
|
||||||
|
@ -316,11 +334,11 @@ impl ManagedCliNpmResolver {
|
||||||
fs: Arc<dyn FileSystem>,
|
fs: Arc<dyn FileSystem>,
|
||||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
npm_api: Arc<CliNpmRegistryApi>,
|
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
|
||||||
npm_cache: Arc<NpmCache>,
|
npm_cache: Arc<CliNpmCache>,
|
||||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
text_only_progress_bar: ProgressBar,
|
text_only_progress_bar: ProgressBar,
|
||||||
npm_system_info: NpmSystemInfo,
|
npm_system_info: NpmSystemInfo,
|
||||||
lifecycle_scripts: LifecycleScriptsConfig,
|
lifecycle_scripts: LifecycleScriptsConfig,
|
||||||
|
@ -329,7 +347,7 @@ impl ManagedCliNpmResolver {
|
||||||
fs,
|
fs,
|
||||||
fs_resolver,
|
fs_resolver,
|
||||||
maybe_lockfile,
|
maybe_lockfile,
|
||||||
npm_api,
|
registry_info_provider,
|
||||||
npm_cache,
|
npm_cache,
|
||||||
npm_install_deps_provider,
|
npm_install_deps_provider,
|
||||||
text_only_progress_bar,
|
text_only_progress_bar,
|
||||||
|
@ -409,19 +427,44 @@ impl ManagedCliNpmResolver {
|
||||||
|
|
||||||
/// Adds package requirements to the resolver and ensures everything is setup.
|
/// Adds package requirements to the resolver and ensures everything is setup.
|
||||||
/// This includes setting up the `node_modules` directory, if applicable.
|
/// This includes setting up the `node_modules` directory, if applicable.
|
||||||
pub async fn add_package_reqs(
|
pub async fn add_and_cache_package_reqs(
|
||||||
&self,
|
&self,
|
||||||
packages: &[PackageReq],
|
packages: &[PackageReq],
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
self
|
self
|
||||||
.add_package_reqs_raw(packages)
|
.add_package_reqs_raw(
|
||||||
|
packages,
|
||||||
|
Some(PackageCaching::Only(packages.into())),
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.dependencies_result
|
.dependencies_result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn add_package_reqs_raw(
|
pub async fn add_package_reqs_no_cache(
|
||||||
&self,
|
&self,
|
||||||
packages: &[PackageReq],
|
packages: &[PackageReq],
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
self
|
||||||
|
.add_package_reqs_raw(packages, None)
|
||||||
|
.await
|
||||||
|
.dependencies_result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_package_reqs(
|
||||||
|
&self,
|
||||||
|
packages: &[PackageReq],
|
||||||
|
caching: PackageCaching<'_>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
self
|
||||||
|
.add_package_reqs_raw(packages, Some(caching))
|
||||||
|
.await
|
||||||
|
.dependencies_result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_package_reqs_raw<'a>(
|
||||||
|
&self,
|
||||||
|
packages: &[PackageReq],
|
||||||
|
caching: Option<PackageCaching<'a>>,
|
||||||
) -> AddPkgReqsResult {
|
) -> AddPkgReqsResult {
|
||||||
if packages.is_empty() {
|
if packages.is_empty() {
|
||||||
return AddPkgReqsResult {
|
return AddPkgReqsResult {
|
||||||
|
@ -438,7 +481,9 @@ impl ManagedCliNpmResolver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if result.dependencies_result.is_ok() {
|
if result.dependencies_result.is_ok() {
|
||||||
result.dependencies_result = self.cache_packages().await;
|
if let Some(caching) = caching {
|
||||||
|
result.dependencies_result = self.cache_packages(caching).await;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
|
@ -480,16 +525,20 @@ impl ManagedCliNpmResolver {
|
||||||
pub async fn inject_synthetic_types_node_package(
|
pub async fn inject_synthetic_types_node_package(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
|
let reqs = &[PackageReq::from_str("@types/node").unwrap()];
|
||||||
// add and ensure this isn't added to the lockfile
|
// add and ensure this isn't added to the lockfile
|
||||||
self
|
self
|
||||||
.add_package_reqs(&[PackageReq::from_str("@types/node").unwrap()])
|
.add_package_reqs(reqs, PackageCaching::Only(reqs.into()))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn cache_packages(&self) -> Result<(), AnyError> {
|
pub async fn cache_packages(
|
||||||
self.fs_resolver.cache_packages().await
|
&self,
|
||||||
|
caching: PackageCaching<'_>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
self.fs_resolver.cache_packages(caching).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_pkg_folder_from_deno_module(
|
pub fn resolve_pkg_folder_from_deno_module(
|
||||||
|
@ -511,11 +560,11 @@ impl ManagedCliNpmResolver {
|
||||||
&self,
|
&self,
|
||||||
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
|
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
|
||||||
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
|
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
|
||||||
match &err.source {
|
match err.source.as_kind() {
|
||||||
deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => {
|
deno_package_json::PackageJsonDepValueParseErrorKind::VersionReq(_) => {
|
||||||
return Err(Box::new(err.clone()));
|
return Err(Box::new(err.clone()));
|
||||||
}
|
}
|
||||||
deno_package_json::PackageJsonDepValueParseError::Unsupported {
|
deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported {
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
// only warn for this one
|
// only warn for this one
|
||||||
|
@ -534,18 +583,18 @@ impl ManagedCliNpmResolver {
|
||||||
/// Ensures that the top level `package.json` dependencies are installed.
|
/// Ensures that the top level `package.json` dependencies are installed.
|
||||||
/// This may set up the `node_modules` directory.
|
/// This may set up the `node_modules` directory.
|
||||||
///
|
///
|
||||||
/// Returns `true` if any changes (such as caching packages) were made.
|
/// Returns `true` if the top level packages are already installed. A
|
||||||
/// If this returns `false`, `node_modules` has _not_ been set up.
|
/// return value of `false` means that new packages were added to the NPM resolution.
|
||||||
pub async fn ensure_top_level_package_json_install(
|
pub async fn ensure_top_level_package_json_install(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<bool, AnyError> {
|
) -> Result<bool, AnyError> {
|
||||||
if !self.top_level_install_flag.raise() {
|
if !self.top_level_install_flag.raise() {
|
||||||
return Ok(false); // already did this
|
return Ok(true); // already did this
|
||||||
}
|
}
|
||||||
|
|
||||||
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
|
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
|
||||||
if pkg_json_remote_pkgs.is_empty() {
|
if pkg_json_remote_pkgs.is_empty() {
|
||||||
return Ok(false);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if something needs resolving before bothering to load all
|
// check if something needs resolving before bothering to load all
|
||||||
|
@ -559,14 +608,16 @@ impl ManagedCliNpmResolver {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"All package.json deps resolvable. Skipping top level install."
|
"All package.json deps resolvable. Skipping top level install."
|
||||||
);
|
);
|
||||||
return Ok(false); // everything is already resolvable
|
return Ok(true); // everything is already resolvable
|
||||||
}
|
}
|
||||||
|
|
||||||
let pkg_reqs = pkg_json_remote_pkgs
|
let pkg_reqs = pkg_json_remote_pkgs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|pkg| pkg.req.clone())
|
.map(|pkg| pkg.req.clone())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
self.add_package_reqs(&pkg_reqs).await.map(|_| true)
|
self.add_package_reqs_no_cache(&pkg_reqs).await?;
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn cache_package_info(
|
pub async fn cache_package_info(
|
||||||
|
@ -575,7 +626,7 @@ impl ManagedCliNpmResolver {
|
||||||
) -> Result<Arc<NpmPackageInfo>, AnyError> {
|
) -> Result<Arc<NpmPackageInfo>, AnyError> {
|
||||||
// this will internally cache the package information
|
// this will internally cache the package information
|
||||||
self
|
self
|
||||||
.npm_api
|
.registry_info_provider
|
||||||
.package_info(package_name)
|
.package_info(package_name)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| err.into())
|
.map_err(|err| err.into())
|
||||||
|
@ -671,7 +722,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
||||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
|
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
|
||||||
// create a new snapshotted npm resolution and resolver
|
// create a new snapshotted npm resolution and resolver
|
||||||
let npm_resolution = Arc::new(NpmResolution::new(
|
let npm_resolution = Arc::new(NpmResolution::new(
|
||||||
self.npm_api.clone(),
|
self.registry_info_provider.clone(),
|
||||||
self.resolution.snapshot(),
|
self.resolution.snapshot(),
|
||||||
self.maybe_lockfile.clone(),
|
self.maybe_lockfile.clone(),
|
||||||
));
|
));
|
||||||
|
@ -690,7 +741,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
||||||
self.lifecycle_scripts.clone(),
|
self.lifecycle_scripts.clone(),
|
||||||
),
|
),
|
||||||
self.maybe_lockfile.clone(),
|
self.maybe_lockfile.clone(),
|
||||||
self.npm_api.clone(),
|
self.registry_info_provider.clone(),
|
||||||
self.npm_cache.clone(),
|
self.npm_cache.clone(),
|
||||||
self.npm_install_deps_provider.clone(),
|
self.npm_install_deps_provider.clone(),
|
||||||
npm_resolution,
|
npm_resolution,
|
||||||
|
|
|
@ -1,200 +0,0 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use deno_core::anyhow::anyhow;
|
|
||||||
use deno_core::error::AnyError;
|
|
||||||
use deno_core::futures::future::BoxFuture;
|
|
||||||
use deno_core::futures::future::Shared;
|
|
||||||
use deno_core::futures::FutureExt;
|
|
||||||
use deno_core::parking_lot::Mutex;
|
|
||||||
use deno_npm::registry::NpmPackageInfo;
|
|
||||||
use deno_npm::registry::NpmRegistryApi;
|
|
||||||
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
|
|
||||||
|
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::util::sync::AtomicFlag;
|
|
||||||
|
|
||||||
use super::cache::NpmCache;
|
|
||||||
use super::cache::RegistryInfoDownloader;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct CliNpmRegistryApi(Option<Arc<CliNpmRegistryApiInner>>);
|
|
||||||
|
|
||||||
impl CliNpmRegistryApi {
|
|
||||||
pub fn new(
|
|
||||||
cache: Arc<NpmCache>,
|
|
||||||
registry_info_downloader: Arc<RegistryInfoDownloader>,
|
|
||||||
) -> Self {
|
|
||||||
Self(Some(Arc::new(CliNpmRegistryApiInner {
|
|
||||||
cache,
|
|
||||||
force_reload_flag: Default::default(),
|
|
||||||
mem_cache: Default::default(),
|
|
||||||
previously_reloaded_packages: Default::default(),
|
|
||||||
registry_info_downloader,
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clears the internal memory cache.
|
|
||||||
pub fn clear_memory_cache(&self) {
|
|
||||||
self.inner().clear_memory_cache();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn inner(&self) -> &Arc<CliNpmRegistryApiInner> {
|
|
||||||
// this panicking indicates a bug in the code where this
|
|
||||||
// wasn't initialized
|
|
||||||
self.0.as_ref().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait(?Send)]
|
|
||||||
impl NpmRegistryApi for CliNpmRegistryApi {
|
|
||||||
async fn package_info(
|
|
||||||
&self,
|
|
||||||
name: &str,
|
|
||||||
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
|
|
||||||
match self.inner().maybe_package_info(name).await {
|
|
||||||
Ok(Some(info)) => Ok(info),
|
|
||||||
Ok(None) => Err(NpmRegistryPackageInfoLoadError::PackageNotExists {
|
|
||||||
package_name: name.to_string(),
|
|
||||||
}),
|
|
||||||
Err(err) => {
|
|
||||||
Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new(err)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn mark_force_reload(&self) -> bool {
|
|
||||||
self.inner().mark_force_reload()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type CacheItemPendingResult =
|
|
||||||
Result<Option<Arc<NpmPackageInfo>>, Arc<AnyError>>;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum CacheItem {
|
|
||||||
Pending(Shared<BoxFuture<'static, CacheItemPendingResult>>),
|
|
||||||
Resolved(Option<Arc<NpmPackageInfo>>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct CliNpmRegistryApiInner {
|
|
||||||
cache: Arc<NpmCache>,
|
|
||||||
force_reload_flag: AtomicFlag,
|
|
||||||
mem_cache: Mutex<HashMap<String, CacheItem>>,
|
|
||||||
previously_reloaded_packages: Mutex<HashSet<String>>,
|
|
||||||
registry_info_downloader: Arc<RegistryInfoDownloader>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CliNpmRegistryApiInner {
|
|
||||||
pub async fn maybe_package_info(
|
|
||||||
self: &Arc<Self>,
|
|
||||||
name: &str,
|
|
||||||
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
|
|
||||||
let (created, future) = {
|
|
||||||
let mut mem_cache = self.mem_cache.lock();
|
|
||||||
match mem_cache.get(name) {
|
|
||||||
Some(CacheItem::Resolved(maybe_info)) => {
|
|
||||||
return Ok(maybe_info.clone());
|
|
||||||
}
|
|
||||||
Some(CacheItem::Pending(future)) => (false, future.clone()),
|
|
||||||
None => {
|
|
||||||
let future = {
|
|
||||||
let api = self.clone();
|
|
||||||
let name = name.to_string();
|
|
||||||
async move {
|
|
||||||
if (api.cache.cache_setting().should_use_for_npm_package(&name) && !api.force_reload_flag.is_raised())
|
|
||||||
// if this has been previously reloaded, then try loading from the
|
|
||||||
// file system cache
|
|
||||||
|| !api.previously_reloaded_packages.lock().insert(name.to_string())
|
|
||||||
{
|
|
||||||
// attempt to load from the file cache
|
|
||||||
if let Some(info) = api.load_file_cached_package_info(&name).await {
|
|
||||||
let result = Some(Arc::new(info));
|
|
||||||
return Ok(result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
api.registry_info_downloader
|
|
||||||
.load_package_info(&name)
|
|
||||||
.await
|
|
||||||
.map_err(Arc::new)
|
|
||||||
}
|
|
||||||
.boxed()
|
|
||||||
.shared()
|
|
||||||
};
|
|
||||||
mem_cache
|
|
||||||
.insert(name.to_string(), CacheItem::Pending(future.clone()));
|
|
||||||
(true, future)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if created {
|
|
||||||
match future.await {
|
|
||||||
Ok(maybe_info) => {
|
|
||||||
// replace the cache item to say it's resolved now
|
|
||||||
self
|
|
||||||
.mem_cache
|
|
||||||
.lock()
|
|
||||||
.insert(name.to_string(), CacheItem::Resolved(maybe_info.clone()));
|
|
||||||
Ok(maybe_info)
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
// purge the item from the cache so it loads next time
|
|
||||||
self.mem_cache.lock().remove(name);
|
|
||||||
Err(anyhow!("{:#}", err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(future.await.map_err(|err| anyhow!("{:#}", err))?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn mark_force_reload(&self) -> bool {
|
|
||||||
// never force reload the registry information if reloading
|
|
||||||
// is disabled or if we're already reloading
|
|
||||||
if matches!(
|
|
||||||
self.cache.cache_setting(),
|
|
||||||
CacheSetting::Only | CacheSetting::ReloadAll
|
|
||||||
) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if self.force_reload_flag.raise() {
|
|
||||||
self.clear_memory_cache();
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn load_file_cached_package_info(
|
|
||||||
&self,
|
|
||||||
name: &str,
|
|
||||||
) -> Option<NpmPackageInfo> {
|
|
||||||
let result = deno_core::unsync::spawn_blocking({
|
|
||||||
let cache = self.cache.clone();
|
|
||||||
let name = name.to_string();
|
|
||||||
move || cache.load_package_info(&name)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
match result {
|
|
||||||
Ok(value) => value,
|
|
||||||
Err(err) => {
|
|
||||||
if cfg!(debug_assertions) {
|
|
||||||
panic!("error loading cached npm package info for {name}: {err:#}");
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn clear_memory_cache(&self) {
|
|
||||||
self.mem_cache.lock().clear();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,15 +4,15 @@ use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use capacity_builder::StringBuilder;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_lockfile::NpmPackageDependencyLockfileInfo;
|
use deno_lockfile::NpmPackageDependencyLockfileInfo;
|
||||||
use deno_lockfile::NpmPackageLockfileInfo;
|
use deno_lockfile::NpmPackageLockfileInfo;
|
||||||
use deno_npm::registry::NpmRegistryApi;
|
use deno_npm::registry::NpmRegistryApi;
|
||||||
|
use deno_npm::resolution::AddPkgReqsOptions;
|
||||||
use deno_npm::resolution::NpmPackagesPartitioned;
|
use deno_npm::resolution::NpmPackagesPartitioned;
|
||||||
use deno_npm::resolution::NpmResolutionError;
|
use deno_npm::resolution::NpmResolutionError;
|
||||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||||
use deno_npm::resolution::NpmResolutionSnapshotPendingResolver;
|
|
||||||
use deno_npm::resolution::NpmResolutionSnapshotPendingResolverOptions;
|
|
||||||
use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
|
use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
|
||||||
use deno_npm::resolution::PackageNotFoundFromReferrerError;
|
use deno_npm::resolution::PackageNotFoundFromReferrerError;
|
||||||
use deno_npm::resolution::PackageNvNotFoundError;
|
use deno_npm::resolution::PackageNvNotFoundError;
|
||||||
|
@ -25,13 +25,13 @@ use deno_npm::NpmSystemInfo;
|
||||||
use deno_semver::jsr::JsrDepPackageReq;
|
use deno_semver::jsr::JsrDepPackageReq;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
|
use deno_semver::SmallStackString;
|
||||||
use deno_semver::VersionReq;
|
use deno_semver::VersionReq;
|
||||||
|
|
||||||
use crate::args::CliLockfile;
|
use crate::args::CliLockfile;
|
||||||
|
use crate::npm::CliNpmRegistryInfoProvider;
|
||||||
use crate::util::sync::SyncReadAsyncWriteLock;
|
use crate::util::sync::SyncReadAsyncWriteLock;
|
||||||
|
|
||||||
use super::CliNpmRegistryApi;
|
|
||||||
|
|
||||||
pub struct AddPkgReqsResult {
|
pub struct AddPkgReqsResult {
|
||||||
/// Results from adding the individual packages.
|
/// Results from adding the individual packages.
|
||||||
///
|
///
|
||||||
|
@ -48,7 +48,7 @@ pub struct AddPkgReqsResult {
|
||||||
///
|
///
|
||||||
/// This does not interact with the file system.
|
/// This does not interact with the file system.
|
||||||
pub struct NpmResolution {
|
pub struct NpmResolution {
|
||||||
api: Arc<CliNpmRegistryApi>,
|
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
|
||||||
snapshot: SyncReadAsyncWriteLock<NpmResolutionSnapshot>,
|
snapshot: SyncReadAsyncWriteLock<NpmResolutionSnapshot>,
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
}
|
}
|
||||||
|
@ -64,22 +64,22 @@ impl std::fmt::Debug for NpmResolution {
|
||||||
|
|
||||||
impl NpmResolution {
|
impl NpmResolution {
|
||||||
pub fn from_serialized(
|
pub fn from_serialized(
|
||||||
api: Arc<CliNpmRegistryApi>,
|
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
|
||||||
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let snapshot =
|
let snapshot =
|
||||||
NpmResolutionSnapshot::new(initial_snapshot.unwrap_or_default());
|
NpmResolutionSnapshot::new(initial_snapshot.unwrap_or_default());
|
||||||
Self::new(api, snapshot, maybe_lockfile)
|
Self::new(registry_info_provider, snapshot, maybe_lockfile)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
api: Arc<CliNpmRegistryApi>,
|
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
|
||||||
initial_snapshot: NpmResolutionSnapshot,
|
initial_snapshot: NpmResolutionSnapshot,
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
api,
|
registry_info_provider,
|
||||||
snapshot: SyncReadAsyncWriteLock::new(initial_snapshot),
|
snapshot: SyncReadAsyncWriteLock::new(initial_snapshot),
|
||||||
maybe_lockfile,
|
maybe_lockfile,
|
||||||
}
|
}
|
||||||
|
@ -92,7 +92,7 @@ impl NpmResolution {
|
||||||
// only allow one thread in here at a time
|
// only allow one thread in here at a time
|
||||||
let snapshot_lock = self.snapshot.acquire().await;
|
let snapshot_lock = self.snapshot.acquire().await;
|
||||||
let result = add_package_reqs_to_snapshot(
|
let result = add_package_reqs_to_snapshot(
|
||||||
&self.api,
|
&self.registry_info_provider,
|
||||||
package_reqs,
|
package_reqs,
|
||||||
self.maybe_lockfile.clone(),
|
self.maybe_lockfile.clone(),
|
||||||
|| snapshot_lock.read().clone(),
|
|| snapshot_lock.read().clone(),
|
||||||
|
@ -120,7 +120,7 @@ impl NpmResolution {
|
||||||
|
|
||||||
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
|
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
|
||||||
let snapshot = add_package_reqs_to_snapshot(
|
let snapshot = add_package_reqs_to_snapshot(
|
||||||
&self.api,
|
&self.registry_info_provider,
|
||||||
package_reqs,
|
package_reqs,
|
||||||
self.maybe_lockfile.clone(),
|
self.maybe_lockfile.clone(),
|
||||||
|| {
|
|| {
|
||||||
|
@ -257,10 +257,14 @@ impl NpmResolution {
|
||||||
.read()
|
.read()
|
||||||
.as_valid_serialized_for_system(system_info)
|
.as_valid_serialized_for_system(system_info)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn subset(&self, package_reqs: &[PackageReq]) -> NpmResolutionSnapshot {
|
||||||
|
self.snapshot.read().subset(package_reqs)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_package_reqs_to_snapshot(
|
async fn add_package_reqs_to_snapshot(
|
||||||
api: &CliNpmRegistryApi,
|
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
|
||||||
package_reqs: &[PackageReq],
|
package_reqs: &[PackageReq],
|
||||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||||
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
|
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
|
||||||
|
@ -283,23 +287,28 @@ async fn add_package_reqs_to_snapshot(
|
||||||
/* this string is used in tests */
|
/* this string is used in tests */
|
||||||
"Running npm resolution."
|
"Running npm resolution."
|
||||||
);
|
);
|
||||||
let pending_resolver = get_npm_pending_resolver(api);
|
let npm_registry_api = registry_info_provider.as_npm_registry_api();
|
||||||
let result = pending_resolver.add_pkg_reqs(snapshot, package_reqs).await;
|
let result = snapshot
|
||||||
api.clear_memory_cache();
|
.add_pkg_reqs(&npm_registry_api, get_add_pkg_reqs_options(package_reqs))
|
||||||
|
.await;
|
||||||
let result = match &result.dep_graph_result {
|
let result = match &result.dep_graph_result {
|
||||||
Err(NpmResolutionError::Resolution(err)) if api.mark_force_reload() => {
|
Err(NpmResolutionError::Resolution(err))
|
||||||
|
if npm_registry_api.mark_force_reload() =>
|
||||||
|
{
|
||||||
log::debug!("{err:#}");
|
log::debug!("{err:#}");
|
||||||
log::debug!("npm resolution failed. Trying again...");
|
log::debug!("npm resolution failed. Trying again...");
|
||||||
|
|
||||||
// try again
|
// try again with forced reloading
|
||||||
let snapshot = get_new_snapshot();
|
let snapshot = get_new_snapshot();
|
||||||
let result = pending_resolver.add_pkg_reqs(snapshot, package_reqs).await;
|
snapshot
|
||||||
api.clear_memory_cache();
|
.add_pkg_reqs(&npm_registry_api, get_add_pkg_reqs_options(package_reqs))
|
||||||
result
|
.await
|
||||||
}
|
}
|
||||||
_ => result,
|
_ => result,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
registry_info_provider.clear_memory_cache();
|
||||||
|
|
||||||
if let Ok(snapshot) = &result.dep_graph_result {
|
if let Ok(snapshot) = &result.dep_graph_result {
|
||||||
if let Some(lockfile) = maybe_lockfile {
|
if let Some(lockfile) = maybe_lockfile {
|
||||||
populate_lockfile_from_snapshot(&lockfile, snapshot);
|
populate_lockfile_from_snapshot(&lockfile, snapshot);
|
||||||
|
@ -309,19 +318,15 @@ async fn add_package_reqs_to_snapshot(
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_npm_pending_resolver(
|
fn get_add_pkg_reqs_options(package_reqs: &[PackageReq]) -> AddPkgReqsOptions {
|
||||||
api: &CliNpmRegistryApi,
|
AddPkgReqsOptions {
|
||||||
) -> NpmResolutionSnapshotPendingResolver<CliNpmRegistryApi> {
|
package_reqs,
|
||||||
NpmResolutionSnapshotPendingResolver::new(
|
// WARNING: When bumping this version, check if anything needs to be
|
||||||
NpmResolutionSnapshotPendingResolverOptions {
|
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
|
||||||
api,
|
types_node_version_req: Some(
|
||||||
// WARNING: When bumping this version, check if anything needs to be
|
VersionReq::parse_from_npm("22.0.0 - 22.5.4").unwrap(),
|
||||||
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
|
),
|
||||||
types_node_version_req: Some(
|
}
|
||||||
VersionReq::parse_from_npm("22.0.0 - 22.5.4").unwrap(),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn populate_lockfile_from_snapshot(
|
fn populate_lockfile_from_snapshot(
|
||||||
|
@ -333,7 +338,13 @@ fn populate_lockfile_from_snapshot(
|
||||||
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
|
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
|
||||||
lockfile.insert_package_specifier(
|
lockfile.insert_package_specifier(
|
||||||
JsrDepPackageReq::npm(package_req.clone()),
|
JsrDepPackageReq::npm(package_req.clone()),
|
||||||
format!("{}{}", id.nv.version, id.peer_deps_serialized()),
|
{
|
||||||
|
StringBuilder::<SmallStackString>::build(|builder| {
|
||||||
|
builder.append(&id.nv.version);
|
||||||
|
builder.append(&id.peer_dependencies);
|
||||||
|
})
|
||||||
|
.unwrap()
|
||||||
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
for package in snapshot.all_packages_for_every_system() {
|
for package in snapshot.all_packages_for_every_system() {
|
||||||
|
|
|
@ -11,6 +11,7 @@ use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
use super::super::PackageCaching;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
|
@ -24,7 +25,7 @@ use deno_runtime::deno_fs::FileSystem;
|
||||||
use deno_runtime::deno_node::NodePermissions;
|
use deno_runtime::deno_node::NodePermissions;
|
||||||
use node_resolver::errors::PackageFolderResolveError;
|
use node_resolver::errors::PackageFolderResolveError;
|
||||||
|
|
||||||
use crate::npm::managed::cache::TarballCache;
|
use crate::npm::CliNpmTarballCache;
|
||||||
|
|
||||||
/// Part of the resolution that interacts with the file system.
|
/// Part of the resolution that interacts with the file system.
|
||||||
#[async_trait(?Send)]
|
#[async_trait(?Send)]
|
||||||
|
@ -57,7 +58,10 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
) -> Result<Option<NpmPackageCacheFolderId>, AnyError>;
|
) -> Result<Option<NpmPackageCacheFolderId>, AnyError>;
|
||||||
|
|
||||||
async fn cache_packages(&self) -> Result<(), AnyError>;
|
async fn cache_packages<'a>(
|
||||||
|
&self,
|
||||||
|
caching: PackageCaching<'a>,
|
||||||
|
) -> Result<(), AnyError>;
|
||||||
|
|
||||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||||
fn ensure_read_permission<'a>(
|
fn ensure_read_permission<'a>(
|
||||||
|
@ -140,7 +144,7 @@ impl RegistryReadPermissionChecker {
|
||||||
/// Caches all the packages in parallel.
|
/// Caches all the packages in parallel.
|
||||||
pub async fn cache_packages(
|
pub async fn cache_packages(
|
||||||
packages: &[NpmResolutionPackage],
|
packages: &[NpmResolutionPackage],
|
||||||
tarball_cache: &Arc<TarballCache>,
|
tarball_cache: &Arc<CliNpmTarballCache>,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let mut futures_unordered = futures::stream::FuturesUnordered::new();
|
let mut futures_unordered = futures::stream::FuturesUnordered::new();
|
||||||
for package in packages {
|
for package in packages {
|
||||||
|
|
|
@ -28,8 +28,10 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
|
||||||
.id
|
.id
|
||||||
.nv
|
.nv
|
||||||
.name
|
.name
|
||||||
|
.as_str()
|
||||||
.rsplit_once('/')
|
.rsplit_once('/')
|
||||||
.map_or(package.id.nv.name.as_str(), |(_, name)| name)
|
.map(|(_, name)| name)
|
||||||
|
.unwrap_or(package.id.nv.name.as_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn warn_missing_entrypoint(
|
pub fn warn_missing_entrypoint(
|
||||||
|
|
|
@ -9,6 +9,7 @@ use deno_npm::resolution::NpmResolutionSnapshot;
|
||||||
use deno_runtime::deno_io::FromRawIoHandle;
|
use deno_runtime::deno_io::FromRawIoHandle;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::Version;
|
use deno_semver::Version;
|
||||||
|
use deno_task_shell::KillSignal;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
@ -155,6 +156,29 @@ impl<'a> LifecycleScripts<'a> {
|
||||||
packages: &[NpmResolutionPackage],
|
packages: &[NpmResolutionPackage],
|
||||||
root_node_modules_dir_path: &Path,
|
root_node_modules_dir_path: &Path,
|
||||||
progress_bar: &ProgressBar,
|
progress_bar: &ProgressBar,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let kill_signal = KillSignal::default();
|
||||||
|
let _drop_signal = kill_signal.clone().drop_guard();
|
||||||
|
// we don't run with signals forwarded because once signals
|
||||||
|
// are setup then they're process wide.
|
||||||
|
self
|
||||||
|
.finish_with_cancellation(
|
||||||
|
snapshot,
|
||||||
|
packages,
|
||||||
|
root_node_modules_dir_path,
|
||||||
|
progress_bar,
|
||||||
|
kill_signal,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn finish_with_cancellation(
|
||||||
|
self,
|
||||||
|
snapshot: &NpmResolutionSnapshot,
|
||||||
|
packages: &[NpmResolutionPackage],
|
||||||
|
root_node_modules_dir_path: &Path,
|
||||||
|
progress_bar: &ProgressBar,
|
||||||
|
kill_signal: KillSignal,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
self.warn_not_run_scripts()?;
|
self.warn_not_run_scripts()?;
|
||||||
let get_package_path =
|
let get_package_path =
|
||||||
|
@ -182,6 +206,12 @@ impl<'a> LifecycleScripts<'a> {
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut env_vars = crate::task_runner::real_env_vars();
|
let mut env_vars = crate::task_runner::real_env_vars();
|
||||||
|
// so the subprocess can detect that it is running as part of a lifecycle script,
|
||||||
|
// and avoid trying to set up node_modules again
|
||||||
|
env_vars.insert(
|
||||||
|
LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR.to_string(),
|
||||||
|
"1".to_string(),
|
||||||
|
);
|
||||||
// we want to pass the current state of npm resolution down to the deno subprocess
|
// we want to pass the current state of npm resolution down to the deno subprocess
|
||||||
// (that may be running as part of the script). we do this with an inherited temp file
|
// (that may be running as part of the script). we do this with an inherited temp file
|
||||||
//
|
//
|
||||||
|
@ -240,6 +270,7 @@ impl<'a> LifecycleScripts<'a> {
|
||||||
stderr: TaskStdio::piped(),
|
stderr: TaskStdio::piped(),
|
||||||
stdout: TaskStdio::piped(),
|
stdout: TaskStdio::piped(),
|
||||||
}),
|
}),
|
||||||
|
kill_signal: kill_signal.clone(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -303,6 +334,13 @@ impl<'a> LifecycleScripts<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR: &str =
|
||||||
|
"DENO_INTERNAL_IS_LIFECYCLE_SCRIPT";
|
||||||
|
|
||||||
|
pub fn is_running_lifecycle_script() -> bool {
|
||||||
|
std::env::var(LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR).is_ok()
|
||||||
|
}
|
||||||
|
|
||||||
// take in all (non copy) packages from snapshot,
|
// take in all (non copy) packages from snapshot,
|
||||||
// and resolve the set of available binaries to create
|
// and resolve the set of available binaries to create
|
||||||
// custom commands available to the task runner
|
// custom commands available to the task runner
|
||||||
|
|
|
@ -8,6 +8,9 @@ use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
|
use crate::npm::managed::PackageCaching;
|
||||||
|
use crate::npm::CliNpmCache;
|
||||||
|
use crate::npm::CliNpmTarballCache;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
|
@ -24,8 +27,6 @@ use node_resolver::errors::ReferrerNotFoundError;
|
||||||
use crate::args::LifecycleScriptsConfig;
|
use crate::args::LifecycleScriptsConfig;
|
||||||
use crate::cache::FastInsecureHasher;
|
use crate::cache::FastInsecureHasher;
|
||||||
|
|
||||||
use super::super::cache::NpmCache;
|
|
||||||
use super::super::cache::TarballCache;
|
|
||||||
use super::super::resolution::NpmResolution;
|
use super::super::resolution::NpmResolution;
|
||||||
use super::common::cache_packages;
|
use super::common::cache_packages;
|
||||||
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
|
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
|
||||||
|
@ -35,8 +36,8 @@ use super::common::RegistryReadPermissionChecker;
|
||||||
/// Resolves packages from the global npm cache.
|
/// Resolves packages from the global npm cache.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GlobalNpmPackageResolver {
|
pub struct GlobalNpmPackageResolver {
|
||||||
cache: Arc<NpmCache>,
|
cache: Arc<CliNpmCache>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
system_info: NpmSystemInfo,
|
system_info: NpmSystemInfo,
|
||||||
registry_read_permission_checker: RegistryReadPermissionChecker,
|
registry_read_permission_checker: RegistryReadPermissionChecker,
|
||||||
|
@ -45,9 +46,9 @@ pub struct GlobalNpmPackageResolver {
|
||||||
|
|
||||||
impl GlobalNpmPackageResolver {
|
impl GlobalNpmPackageResolver {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cache: Arc<NpmCache>,
|
cache: Arc<CliNpmCache>,
|
||||||
fs: Arc<dyn FileSystem>,
|
fs: Arc<dyn FileSystem>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
system_info: NpmSystemInfo,
|
system_info: NpmSystemInfo,
|
||||||
lifecycle_scripts: LifecycleScriptsConfig,
|
lifecycle_scripts: LifecycleScriptsConfig,
|
||||||
|
@ -150,10 +151,19 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
async fn cache_packages<'a>(
|
||||||
let package_partitions = self
|
&self,
|
||||||
.resolution
|
caching: PackageCaching<'a>,
|
||||||
.all_system_packages_partitioned(&self.system_info);
|
) -> Result<(), AnyError> {
|
||||||
|
let package_partitions = match caching {
|
||||||
|
PackageCaching::All => self
|
||||||
|
.resolution
|
||||||
|
.all_system_packages_partitioned(&self.system_info),
|
||||||
|
PackageCaching::Only(reqs) => self
|
||||||
|
.resolution
|
||||||
|
.subset(&reqs)
|
||||||
|
.all_system_packages_partitioned(&self.system_info),
|
||||||
|
};
|
||||||
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
|
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
|
||||||
|
|
||||||
// create the copy package folders
|
// create the copy package folders
|
||||||
|
|
|
@ -17,6 +17,9 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use crate::args::LifecycleScriptsConfig;
|
use crate::args::LifecycleScriptsConfig;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
|
use crate::npm::managed::PackageCaching;
|
||||||
|
use crate::npm::CliNpmCache;
|
||||||
|
use crate::npm::CliNpmTarballCache;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_cache_dir::npm::mixed_case_package_name_decode;
|
use deno_cache_dir::npm::mixed_case_package_name_decode;
|
||||||
|
@ -35,6 +38,7 @@ use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder;
|
||||||
use deno_runtime::deno_fs;
|
use deno_runtime::deno_fs;
|
||||||
use deno_runtime::deno_node::NodePermissions;
|
use deno_runtime::deno_node::NodePermissions;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
|
use deno_semver::StackString;
|
||||||
use node_resolver::errors::PackageFolderResolveError;
|
use node_resolver::errors::PackageFolderResolveError;
|
||||||
use node_resolver::errors::PackageFolderResolveIoError;
|
use node_resolver::errors::PackageFolderResolveIoError;
|
||||||
use node_resolver::errors::PackageNotFoundError;
|
use node_resolver::errors::PackageNotFoundError;
|
||||||
|
@ -52,8 +56,6 @@ use crate::util::fs::LaxSingleProcessFsFlag;
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
use crate::util::progress_bar::ProgressMessagePrompt;
|
use crate::util::progress_bar::ProgressMessagePrompt;
|
||||||
|
|
||||||
use super::super::cache::NpmCache;
|
|
||||||
use super::super::cache::TarballCache;
|
|
||||||
use super::super::resolution::NpmResolution;
|
use super::super::resolution::NpmResolution;
|
||||||
use super::common::bin_entries;
|
use super::common::bin_entries;
|
||||||
use super::common::NpmPackageFsResolver;
|
use super::common::NpmPackageFsResolver;
|
||||||
|
@ -63,12 +65,12 @@ use super::common::RegistryReadPermissionChecker;
|
||||||
/// and resolves packages from it.
|
/// and resolves packages from it.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct LocalNpmPackageResolver {
|
pub struct LocalNpmPackageResolver {
|
||||||
cache: Arc<NpmCache>,
|
cache: Arc<CliNpmCache>,
|
||||||
fs: Arc<dyn deno_fs::FileSystem>,
|
fs: Arc<dyn deno_fs::FileSystem>,
|
||||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||||
progress_bar: ProgressBar,
|
progress_bar: ProgressBar,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
root_node_modules_path: PathBuf,
|
root_node_modules_path: PathBuf,
|
||||||
root_node_modules_url: Url,
|
root_node_modules_url: Url,
|
||||||
system_info: NpmSystemInfo,
|
system_info: NpmSystemInfo,
|
||||||
|
@ -79,12 +81,12 @@ pub struct LocalNpmPackageResolver {
|
||||||
impl LocalNpmPackageResolver {
|
impl LocalNpmPackageResolver {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cache: Arc<NpmCache>,
|
cache: Arc<CliNpmCache>,
|
||||||
fs: Arc<dyn deno_fs::FileSystem>,
|
fs: Arc<dyn deno_fs::FileSystem>,
|
||||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||||
progress_bar: ProgressBar,
|
progress_bar: ProgressBar,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
node_modules_folder: PathBuf,
|
node_modules_folder: PathBuf,
|
||||||
system_info: NpmSystemInfo,
|
system_info: NpmSystemInfo,
|
||||||
lifecycle_scripts: LifecycleScriptsConfig,
|
lifecycle_scripts: LifecycleScriptsConfig,
|
||||||
|
@ -236,13 +238,33 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
||||||
else {
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let folder_name = folder_path.parent().unwrap().to_string_lossy();
|
// ex. project/node_modules/.deno/preact@10.24.3/node_modules/preact/
|
||||||
Ok(get_package_folder_id_from_folder_name(&folder_name))
|
let Some(node_modules_ancestor) = folder_path
|
||||||
|
.ancestors()
|
||||||
|
.find(|ancestor| ancestor.ends_with("node_modules"))
|
||||||
|
else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
let Some(folder_name) =
|
||||||
|
node_modules_ancestor.parent().and_then(|p| p.file_name())
|
||||||
|
else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
Ok(get_package_folder_id_from_folder_name(
|
||||||
|
&folder_name.to_string_lossy(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
async fn cache_packages<'a>(
|
||||||
|
&self,
|
||||||
|
caching: PackageCaching<'a>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let snapshot = match caching {
|
||||||
|
PackageCaching::All => self.resolution.snapshot(),
|
||||||
|
PackageCaching::Only(reqs) => self.resolution.subset(&reqs),
|
||||||
|
};
|
||||||
sync_resolution_with_fs(
|
sync_resolution_with_fs(
|
||||||
&self.resolution.snapshot(),
|
&snapshot,
|
||||||
&self.cache,
|
&self.cache,
|
||||||
&self.npm_install_deps_provider,
|
&self.npm_install_deps_provider,
|
||||||
&self.progress_bar,
|
&self.progress_bar,
|
||||||
|
@ -284,10 +306,10 @@ fn local_node_modules_package_contents_path(
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
async fn sync_resolution_with_fs(
|
async fn sync_resolution_with_fs(
|
||||||
snapshot: &NpmResolutionSnapshot,
|
snapshot: &NpmResolutionSnapshot,
|
||||||
cache: &Arc<NpmCache>,
|
cache: &Arc<CliNpmCache>,
|
||||||
npm_install_deps_provider: &NpmInstallDepsProvider,
|
npm_install_deps_provider: &NpmInstallDepsProvider,
|
||||||
progress_bar: &ProgressBar,
|
progress_bar: &ProgressBar,
|
||||||
tarball_cache: &Arc<TarballCache>,
|
tarball_cache: &Arc<CliNpmTarballCache>,
|
||||||
root_node_modules_dir_path: &Path,
|
root_node_modules_dir_path: &Path,
|
||||||
system_info: &NpmSystemInfo,
|
system_info: &NpmSystemInfo,
|
||||||
lifecycle_scripts: &LifecycleScriptsConfig,
|
lifecycle_scripts: &LifecycleScriptsConfig,
|
||||||
|
@ -298,6 +320,12 @@ async fn sync_resolution_with_fs(
|
||||||
return Ok(()); // don't create the directory
|
return Ok(()); // don't create the directory
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// don't set up node_modules (and more importantly try to acquire the file lock)
|
||||||
|
// if we're running as part of a lifecycle script
|
||||||
|
if super::common::lifecycle_scripts::is_running_lifecycle_script() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
|
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
|
||||||
let deno_node_modules_dir = deno_local_registry_dir.join("node_modules");
|
let deno_node_modules_dir = deno_local_registry_dir.join("node_modules");
|
||||||
fs::create_dir_all(&deno_node_modules_dir).with_context(|| {
|
fs::create_dir_all(&deno_node_modules_dir).with_context(|| {
|
||||||
|
@ -328,8 +356,10 @@ async fn sync_resolution_with_fs(
|
||||||
let package_partitions =
|
let package_partitions =
|
||||||
snapshot.all_system_packages_partitioned(system_info);
|
snapshot.all_system_packages_partitioned(system_info);
|
||||||
let mut cache_futures = FuturesUnordered::new();
|
let mut cache_futures = FuturesUnordered::new();
|
||||||
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
|
let mut newest_packages_by_name: HashMap<
|
||||||
HashMap::with_capacity(package_partitions.packages.len());
|
&StackString,
|
||||||
|
&NpmResolutionPackage,
|
||||||
|
> = HashMap::with_capacity(package_partitions.packages.len());
|
||||||
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
|
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
|
||||||
let mut lifecycle_scripts =
|
let mut lifecycle_scripts =
|
||||||
super::common::lifecycle_scripts::LifecycleScripts::new(
|
super::common::lifecycle_scripts::LifecycleScripts::new(
|
||||||
|
@ -509,7 +539,7 @@ async fn sync_resolution_with_fs(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut found_names: HashMap<&String, &PackageNv> = HashMap::new();
|
let mut found_names: HashMap<&StackString, &PackageNv> = HashMap::new();
|
||||||
|
|
||||||
// set of node_modules in workspace packages that we've already ensured exist
|
// set of node_modules in workspace packages that we've already ensured exist
|
||||||
let mut existing_child_node_modules_dirs: HashSet<PathBuf> = HashSet::new();
|
let mut existing_child_node_modules_dirs: HashSet<PathBuf> = HashSet::new();
|
||||||
|
@ -985,10 +1015,10 @@ fn get_package_folder_id_from_folder_name(
|
||||||
) -> Option<NpmPackageCacheFolderId> {
|
) -> Option<NpmPackageCacheFolderId> {
|
||||||
let folder_name = folder_name.replace('+', "/");
|
let folder_name = folder_name.replace('+', "/");
|
||||||
let (name, ending) = folder_name.rsplit_once('@')?;
|
let (name, ending) = folder_name.rsplit_once('@')?;
|
||||||
let name = if let Some(encoded_name) = name.strip_prefix('_') {
|
let name: StackString = if let Some(encoded_name) = name.strip_prefix('_') {
|
||||||
mixed_case_package_name_decode(encoded_name)?
|
StackString::from_string(mixed_case_package_name_decode(encoded_name)?)
|
||||||
} else {
|
} else {
|
||||||
name.to_string()
|
name.into()
|
||||||
};
|
};
|
||||||
let (raw_version, copy_index) = match ending.split_once('_') {
|
let (raw_version, copy_index) = match ending.split_once('_') {
|
||||||
Some((raw_version, copy_index)) => {
|
Some((raw_version, copy_index)) => {
|
||||||
|
|
|
@ -12,6 +12,8 @@ use deno_runtime::deno_fs::FileSystem;
|
||||||
|
|
||||||
use crate::args::LifecycleScriptsConfig;
|
use crate::args::LifecycleScriptsConfig;
|
||||||
use crate::args::NpmInstallDepsProvider;
|
use crate::args::NpmInstallDepsProvider;
|
||||||
|
use crate::npm::CliNpmCache;
|
||||||
|
use crate::npm::CliNpmTarballCache;
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
|
|
||||||
pub use self::common::NpmPackageFsResolver;
|
pub use self::common::NpmPackageFsResolver;
|
||||||
|
@ -19,18 +21,16 @@ pub use self::common::NpmPackageFsResolver;
|
||||||
use self::global::GlobalNpmPackageResolver;
|
use self::global::GlobalNpmPackageResolver;
|
||||||
use self::local::LocalNpmPackageResolver;
|
use self::local::LocalNpmPackageResolver;
|
||||||
|
|
||||||
use super::cache::NpmCache;
|
|
||||||
use super::cache::TarballCache;
|
|
||||||
use super::resolution::NpmResolution;
|
use super::resolution::NpmResolution;
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn create_npm_fs_resolver(
|
pub fn create_npm_fs_resolver(
|
||||||
fs: Arc<dyn FileSystem>,
|
fs: Arc<dyn FileSystem>,
|
||||||
npm_cache: Arc<NpmCache>,
|
npm_cache: Arc<CliNpmCache>,
|
||||||
npm_install_deps_provider: &Arc<NpmInstallDepsProvider>,
|
npm_install_deps_provider: &Arc<NpmInstallDepsProvider>,
|
||||||
progress_bar: &ProgressBar,
|
progress_bar: &ProgressBar,
|
||||||
resolution: Arc<NpmResolution>,
|
resolution: Arc<NpmResolution>,
|
||||||
tarball_cache: Arc<TarballCache>,
|
tarball_cache: Arc<CliNpmTarballCache>,
|
||||||
maybe_node_modules_path: Option<PathBuf>,
|
maybe_node_modules_path: Option<PathBuf>,
|
||||||
system_info: NpmSystemInfo,
|
system_info: NpmSystemInfo,
|
||||||
lifecycle_scripts: LifecycleScriptsConfig,
|
lifecycle_scripts: LifecycleScriptsConfig,
|
||||||
|
|
118
cli/npm/mod.rs
118
cli/npm/mod.rs
|
@ -1,33 +1,39 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
mod byonm;
|
mod byonm;
|
||||||
mod common;
|
|
||||||
mod managed;
|
mod managed;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use common::maybe_auth_header_for_npm_registry;
|
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::serde_json;
|
use deno_core::serde_json;
|
||||||
|
use deno_core::url::Url;
|
||||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||||
use deno_npm::registry::NpmPackageInfo;
|
use deno_npm::registry::NpmPackageInfo;
|
||||||
use deno_resolver::npm::ByonmInNpmPackageChecker;
|
use deno_resolver::npm::ByonmInNpmPackageChecker;
|
||||||
use deno_resolver::npm::ByonmNpmResolver;
|
use deno_resolver::npm::ByonmNpmResolver;
|
||||||
use deno_resolver::npm::CliNpmReqResolver;
|
use deno_resolver::npm::CliNpmReqResolver;
|
||||||
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
|
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
|
||||||
|
use deno_runtime::deno_fs::FileSystem;
|
||||||
use deno_runtime::deno_node::NodePermissions;
|
use deno_runtime::deno_node::NodePermissions;
|
||||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use managed::cache::registry_info::get_package_url;
|
use http::HeaderName;
|
||||||
|
use http::HeaderValue;
|
||||||
use managed::create_managed_in_npm_pkg_checker;
|
use managed::create_managed_in_npm_pkg_checker;
|
||||||
use node_resolver::InNpmPackageChecker;
|
use node_resolver::InNpmPackageChecker;
|
||||||
use node_resolver::NpmPackageFolderResolver;
|
use node_resolver::NpmPackageFolderResolver;
|
||||||
|
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
|
use crate::http_util::HttpClientProvider;
|
||||||
|
use crate::util::fs::atomic_write_file_with_retries_and_fs;
|
||||||
|
use crate::util::fs::hard_link_dir_recursive;
|
||||||
|
use crate::util::fs::AtomicWriteFileFsAdapter;
|
||||||
|
use crate::util::progress_bar::ProgressBar;
|
||||||
|
|
||||||
pub use self::byonm::CliByonmNpmResolver;
|
pub use self::byonm::CliByonmNpmResolver;
|
||||||
pub use self::byonm::CliByonmNpmResolverCreateOptions;
|
pub use self::byonm::CliByonmNpmResolverCreateOptions;
|
||||||
|
@ -35,6 +41,100 @@ pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions;
|
||||||
pub use self::managed::CliManagedNpmResolverCreateOptions;
|
pub use self::managed::CliManagedNpmResolverCreateOptions;
|
||||||
pub use self::managed::CliNpmResolverManagedSnapshotOption;
|
pub use self::managed::CliNpmResolverManagedSnapshotOption;
|
||||||
pub use self::managed::ManagedCliNpmResolver;
|
pub use self::managed::ManagedCliNpmResolver;
|
||||||
|
pub use self::managed::PackageCaching;
|
||||||
|
|
||||||
|
pub type CliNpmTarballCache = deno_npm_cache::TarballCache<CliNpmCacheEnv>;
|
||||||
|
pub type CliNpmCache = deno_npm_cache::NpmCache<CliNpmCacheEnv>;
|
||||||
|
pub type CliNpmRegistryInfoProvider =
|
||||||
|
deno_npm_cache::RegistryInfoProvider<CliNpmCacheEnv>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CliNpmCacheEnv {
|
||||||
|
fs: Arc<dyn FileSystem>,
|
||||||
|
http_client_provider: Arc<HttpClientProvider>,
|
||||||
|
progress_bar: ProgressBar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CliNpmCacheEnv {
|
||||||
|
pub fn new(
|
||||||
|
fs: Arc<dyn FileSystem>,
|
||||||
|
http_client_provider: Arc<HttpClientProvider>,
|
||||||
|
progress_bar: ProgressBar,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
fs,
|
||||||
|
http_client_provider,
|
||||||
|
progress_bar,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv {
|
||||||
|
fn exists(&self, path: &Path) -> bool {
|
||||||
|
self.fs.exists_sync(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hard_link_dir_recursive(
|
||||||
|
&self,
|
||||||
|
from: &Path,
|
||||||
|
to: &Path,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
// todo(dsherret): use self.fs here instead
|
||||||
|
hard_link_dir_recursive(from, to)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn atomic_write_file_with_retries(
|
||||||
|
&self,
|
||||||
|
file_path: &Path,
|
||||||
|
data: &[u8],
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
atomic_write_file_with_retries_and_fs(
|
||||||
|
&AtomicWriteFileFsAdapter {
|
||||||
|
fs: self.fs.as_ref(),
|
||||||
|
write_mode: crate::cache::CACHE_PERM,
|
||||||
|
},
|
||||||
|
file_path,
|
||||||
|
data,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_with_retries_on_any_tokio_runtime(
|
||||||
|
&self,
|
||||||
|
url: Url,
|
||||||
|
maybe_auth_header: Option<(HeaderName, HeaderValue)>,
|
||||||
|
) -> Result<Option<Vec<u8>>, deno_npm_cache::DownloadError> {
|
||||||
|
let guard = self.progress_bar.update(url.as_str());
|
||||||
|
let client = self.http_client_provider.get_or_create().map_err(|err| {
|
||||||
|
deno_npm_cache::DownloadError {
|
||||||
|
status_code: None,
|
||||||
|
error: err,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
client
|
||||||
|
.download_with_progress_and_retries(url, maybe_auth_header, &guard)
|
||||||
|
.await
|
||||||
|
.map_err(|err| {
|
||||||
|
use crate::http_util::DownloadErrorKind::*;
|
||||||
|
let status_code = match err.as_kind() {
|
||||||
|
Fetch { .. }
|
||||||
|
| UrlParse { .. }
|
||||||
|
| HttpParse { .. }
|
||||||
|
| Json { .. }
|
||||||
|
| ToStr { .. }
|
||||||
|
| RedirectHeaderParse { .. }
|
||||||
|
| TooManyRedirects => None,
|
||||||
|
BadResponse(bad_response_error) => {
|
||||||
|
Some(bad_response_error.status_code)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
deno_npm_cache::DownloadError {
|
||||||
|
status_code,
|
||||||
|
error: err.into(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub enum CliNpmResolverCreateOptions {
|
pub enum CliNpmResolverCreateOptions {
|
||||||
Managed(CliManagedNpmResolverCreateOptions),
|
Managed(CliManagedNpmResolverCreateOptions),
|
||||||
|
@ -132,13 +232,13 @@ pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
|
||||||
pub struct NpmFetchResolver {
|
pub struct NpmFetchResolver {
|
||||||
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
|
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
|
||||||
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
|
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
npmrc: Arc<ResolvedNpmRc>,
|
npmrc: Arc<ResolvedNpmRc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NpmFetchResolver {
|
impl NpmFetchResolver {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
file_fetcher: Arc<FileFetcher>,
|
file_fetcher: Arc<CliFileFetcher>,
|
||||||
npmrc: Arc<ResolvedNpmRc>,
|
npmrc: Arc<ResolvedNpmRc>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -179,13 +279,15 @@ impl NpmFetchResolver {
|
||||||
if let Some(info) = self.info_by_name.get(name) {
|
if let Some(info) = self.info_by_name.get(name) {
|
||||||
return info.value().clone();
|
return info.value().clone();
|
||||||
}
|
}
|
||||||
|
// todo(#27198): use RegistryInfoProvider instead
|
||||||
let fetch_package_info = || async {
|
let fetch_package_info = || async {
|
||||||
let info_url = get_package_url(&self.npmrc, name);
|
let info_url = deno_npm_cache::get_package_url(&self.npmrc, name);
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
let registry_config = self.npmrc.get_registry_config(name);
|
let registry_config = self.npmrc.get_registry_config(name);
|
||||||
// TODO(bartlomieju): this should error out, not use `.ok()`.
|
// TODO(bartlomieju): this should error out, not use `.ok()`.
|
||||||
let maybe_auth_header =
|
let maybe_auth_header =
|
||||||
maybe_auth_header_for_npm_registry(registry_config).ok()?;
|
deno_npm_cache::maybe_auth_header_for_npm_registry(registry_config)
|
||||||
|
.ok()?;
|
||||||
// spawn due to the lsp's `Send` requirement
|
// spawn due to the lsp's `Send` requirement
|
||||||
let file = deno_core::unsync::spawn(async move {
|
let file = deno_core::unsync::spawn(async move {
|
||||||
file_fetcher
|
file_fetcher
|
||||||
|
|
34
cli/ops/lint.rs
Normal file
34
cli/ops/lint.rs
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_ast::MediaType;
|
||||||
|
use deno_ast::ModuleSpecifier;
|
||||||
|
use deno_core::error::generic_error;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::op2;
|
||||||
|
|
||||||
|
use crate::tools::lint;
|
||||||
|
|
||||||
|
deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],);
|
||||||
|
|
||||||
|
#[op2]
|
||||||
|
#[buffer]
|
||||||
|
fn op_lint_create_serialized_ast(
|
||||||
|
#[string] file_name: &str,
|
||||||
|
#[string] source: String,
|
||||||
|
) -> Result<Vec<u8>, AnyError> {
|
||||||
|
let file_text = deno_ast::strip_bom(source);
|
||||||
|
let path = std::env::current_dir()?.join(file_name);
|
||||||
|
let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| {
|
||||||
|
generic_error(format!("Failed to parse path as URL: {}", path.display()))
|
||||||
|
})?;
|
||||||
|
let media_type = MediaType::from_specifier(&specifier);
|
||||||
|
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
|
||||||
|
specifier,
|
||||||
|
text: file_text.into(),
|
||||||
|
media_type,
|
||||||
|
capture_tokens: false,
|
||||||
|
scope_analysis: false,
|
||||||
|
maybe_syntax: None,
|
||||||
|
})?;
|
||||||
|
Ok(lint::serialize_ast_to_buffer(&parsed_source))
|
||||||
|
}
|
|
@ -2,4 +2,5 @@
|
||||||
|
|
||||||
pub mod bench;
|
pub mod bench;
|
||||||
pub mod jupyter;
|
pub mod jupyter;
|
||||||
|
pub mod lint;
|
||||||
pub mod testing;
|
pub mod testing;
|
||||||
|
|
|
@ -12,7 +12,6 @@ use deno_core::error::AnyError;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use deno_core::ModuleSourceCode;
|
use deno_core::ModuleSourceCode;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_graph::source::ResolveError;
|
use deno_graph::source::ResolveError;
|
||||||
use deno_graph::source::UnknownBuiltInNodeModuleError;
|
use deno_graph::source::UnknownBuiltInNodeModuleError;
|
||||||
use deno_graph::NpmLoadError;
|
use deno_graph::NpmLoadError;
|
||||||
|
@ -25,25 +24,25 @@ use deno_runtime::deno_fs::FileSystem;
|
||||||
use deno_runtime::deno_node::is_builtin_node_module;
|
use deno_runtime::deno_node::is_builtin_node_module;
|
||||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
use node_resolver::NodeResolutionMode;
|
use node_resolver::ResolutionMode;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use crate::args::NpmCachingStrategy;
|
||||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||||
use crate::node::CliNodeCodeTranslator;
|
use crate::node::CliNodeCodeTranslator;
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::npm::InnerCliNpmResolverRef;
|
use crate::npm::InnerCliNpmResolverRef;
|
||||||
use crate::util::sync::AtomicFlag;
|
use crate::util::sync::AtomicFlag;
|
||||||
use crate::util::text_encoding::from_utf8_lossy_owned;
|
use crate::util::text_encoding::from_utf8_lossy_cow;
|
||||||
|
|
||||||
pub type CjsTracker = deno_resolver::cjs::CjsTracker<DenoFsNodeResolverEnv>;
|
pub type CjsTracker = deno_resolver::cjs::CjsTracker<DenoFsNodeResolverEnv>;
|
||||||
pub type IsCjsResolver =
|
pub type IsCjsResolver =
|
||||||
deno_resolver::cjs::IsCjsResolver<DenoFsNodeResolverEnv>;
|
deno_resolver::cjs::IsCjsResolver<DenoFsNodeResolverEnv>;
|
||||||
pub type IsCjsResolverOptions = deno_resolver::cjs::IsCjsResolverOptions;
|
|
||||||
pub type CliSloppyImportsResolver =
|
pub type CliSloppyImportsResolver =
|
||||||
SloppyImportsResolver<SloppyImportsCachedFs>;
|
SloppyImportsResolver<SloppyImportsCachedFs>;
|
||||||
pub type CliDenoResolver = deno_resolver::DenoResolver<
|
pub type CliDenoResolver = deno_resolver::DenoResolver<
|
||||||
|
@ -64,7 +63,10 @@ pub struct ModuleCodeStringSource {
|
||||||
pub struct CliDenoResolverFs(pub Arc<dyn FileSystem>);
|
pub struct CliDenoResolverFs(pub Arc<dyn FileSystem>);
|
||||||
|
|
||||||
impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
|
impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
|
||||||
fn read_to_string_lossy(&self, path: &Path) -> std::io::Result<String> {
|
fn read_to_string_lossy(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
) -> std::io::Result<Cow<'static, str>> {
|
||||||
self
|
self
|
||||||
.0
|
.0
|
||||||
.read_text_file_lossy_sync(path, None)
|
.read_text_file_lossy_sync(path, None)
|
||||||
|
@ -184,18 +186,21 @@ impl NpmModuleLoader {
|
||||||
|
|
||||||
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
|
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
|
||||||
// translate cjs to esm if it's cjs and inject node globals
|
// translate cjs to esm if it's cjs and inject node globals
|
||||||
let code = from_utf8_lossy_owned(code);
|
let code = from_utf8_lossy_cow(code);
|
||||||
ModuleSourceCode::String(
|
ModuleSourceCode::String(
|
||||||
self
|
self
|
||||||
.node_code_translator
|
.node_code_translator
|
||||||
.translate_cjs_to_esm(specifier, Some(Cow::Owned(code)))
|
.translate_cjs_to_esm(specifier, Some(code))
|
||||||
.await?
|
.await?
|
||||||
.into_owned()
|
.into_owned()
|
||||||
.into(),
|
.into(),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
// esm and json code is untouched
|
// esm and json code is untouched
|
||||||
ModuleSourceCode::Bytes(code.into_boxed_slice().into())
|
ModuleSourceCode::Bytes(match code {
|
||||||
|
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
|
||||||
|
Cow::Borrowed(bytes) => bytes.into(),
|
||||||
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(ModuleCodeStringSource {
|
Ok(ModuleCodeStringSource {
|
||||||
|
@ -236,36 +241,29 @@ impl CliResolver {
|
||||||
|
|
||||||
// todo(dsherret): move this off CliResolver as CliResolver is acting
|
// todo(dsherret): move this off CliResolver as CliResolver is acting
|
||||||
// like a factory by doing this (it's beyond its responsibility)
|
// like a factory by doing this (it's beyond its responsibility)
|
||||||
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver {
|
pub fn create_graph_npm_resolver(
|
||||||
|
&self,
|
||||||
|
npm_caching: NpmCachingStrategy,
|
||||||
|
) -> WorkerCliNpmGraphResolver {
|
||||||
WorkerCliNpmGraphResolver {
|
WorkerCliNpmGraphResolver {
|
||||||
npm_resolver: self.npm_resolver.as_ref(),
|
npm_resolver: self.npm_resolver.as_ref(),
|
||||||
found_package_json_dep_flag: &self.found_package_json_dep_flag,
|
found_package_json_dep_flag: &self.found_package_json_dep_flag,
|
||||||
bare_node_builtins_enabled: self.bare_node_builtins_enabled,
|
bare_node_builtins_enabled: self.bare_node_builtins_enabled,
|
||||||
|
npm_caching,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve(
|
pub fn resolve(
|
||||||
&self,
|
&self,
|
||||||
raw_specifier: &str,
|
raw_specifier: &str,
|
||||||
referrer_range: &deno_graph::Range,
|
referrer: &ModuleSpecifier,
|
||||||
referrer_kind: NodeModuleKind,
|
referrer_range_start: deno_graph::Position,
|
||||||
mode: ResolutionMode,
|
resolution_mode: ResolutionMode,
|
||||||
|
resolution_kind: NodeResolutionKind,
|
||||||
) -> Result<ModuleSpecifier, ResolveError> {
|
) -> Result<ModuleSpecifier, ResolveError> {
|
||||||
fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode {
|
|
||||||
match mode {
|
|
||||||
ResolutionMode::Execution => NodeResolutionMode::Execution,
|
|
||||||
ResolutionMode::Types => NodeResolutionMode::Types,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let resolution = self
|
let resolution = self
|
||||||
.deno_resolver
|
.deno_resolver
|
||||||
.resolve(
|
.resolve(raw_specifier, referrer, resolution_mode, resolution_kind)
|
||||||
raw_specifier,
|
|
||||||
&referrer_range.specifier,
|
|
||||||
referrer_kind,
|
|
||||||
to_node_mode(mode),
|
|
||||||
)
|
|
||||||
.map_err(|err| match err.into_kind() {
|
.map_err(|err| match err.into_kind() {
|
||||||
deno_resolver::DenoResolveErrorKind::MappedResolution(
|
deno_resolver::DenoResolveErrorKind::MappedResolution(
|
||||||
mapped_resolution_error,
|
mapped_resolution_error,
|
||||||
|
@ -291,10 +289,11 @@ impl CliResolver {
|
||||||
} => {
|
} => {
|
||||||
if self.warned_pkgs.insert(reference.req().clone()) {
|
if self.warned_pkgs.insert(reference.req().clone()) {
|
||||||
log::warn!(
|
log::warn!(
|
||||||
"{} {}\n at {}",
|
"{} {}\n at {}:{}",
|
||||||
colors::yellow("Warning"),
|
colors::yellow("Warning"),
|
||||||
diagnostic,
|
diagnostic,
|
||||||
referrer_range
|
referrer,
|
||||||
|
referrer_range_start,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -310,6 +309,7 @@ pub struct WorkerCliNpmGraphResolver<'a> {
|
||||||
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
|
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
|
||||||
found_package_json_dep_flag: &'a AtomicFlag,
|
found_package_json_dep_flag: &'a AtomicFlag,
|
||||||
bare_node_builtins_enabled: bool,
|
bare_node_builtins_enabled: bool,
|
||||||
|
npm_caching: NpmCachingStrategy,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait(?Send)]
|
#[async_trait(?Send)]
|
||||||
|
@ -335,13 +335,10 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
|
||||||
module_name: &str,
|
module_name: &str,
|
||||||
range: &deno_graph::Range,
|
range: &deno_graph::Range,
|
||||||
) {
|
) {
|
||||||
let deno_graph::Range {
|
let start = range.range.start;
|
||||||
start, specifier, ..
|
let specifier = &range.specifier;
|
||||||
} = range;
|
|
||||||
let line = start.line + 1;
|
|
||||||
let column = start.character + 1;
|
|
||||||
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
|
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
|
||||||
log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{line}:{column}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning"))
|
log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{start}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -382,7 +379,20 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
|
||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = npm_resolver.add_package_reqs_raw(package_reqs).await;
|
let result = npm_resolver
|
||||||
|
.add_package_reqs_raw(
|
||||||
|
package_reqs,
|
||||||
|
match self.npm_caching {
|
||||||
|
NpmCachingStrategy::Eager => {
|
||||||
|
Some(crate::npm::PackageCaching::All)
|
||||||
|
}
|
||||||
|
NpmCachingStrategy::Lazy => {
|
||||||
|
Some(crate::npm::PackageCaching::Only(package_reqs.into()))
|
||||||
|
}
|
||||||
|
NpmCachingStrategy::Manual => None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
NpmResolvePkgReqsResult {
|
NpmResolvePkgReqsResult {
|
||||||
results: result
|
results: result
|
||||||
|
|
|
@ -291,7 +291,7 @@
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
|
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json"
|
"$ref": "lint-tags.v1.json"
|
||||||
},
|
},
|
||||||
"minItems": 0,
|
"minItems": 0,
|
||||||
"uniqueItems": true
|
"uniqueItems": true
|
||||||
|
@ -300,7 +300,7 @@
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
|
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
|
"$ref": "lint-rules.v1.json"
|
||||||
},
|
},
|
||||||
"minItems": 0,
|
"minItems": 0,
|
||||||
"uniqueItems": true
|
"uniqueItems": true
|
||||||
|
@ -309,7 +309,7 @@
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
|
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
|
"$ref": "lint-rules.v1.json"
|
||||||
},
|
},
|
||||||
"minItems": 0,
|
"minItems": 0,
|
||||||
"uniqueItems": true
|
"uniqueItems": true
|
||||||
|
@ -446,7 +446,6 @@
|
||||||
},
|
},
|
||||||
"command": {
|
"command": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"required": true,
|
|
||||||
"description": "The task to execute"
|
"description": "The task to execute"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
@ -554,6 +553,7 @@
|
||||||
"bare-node-builtins",
|
"bare-node-builtins",
|
||||||
"byonm",
|
"byonm",
|
||||||
"cron",
|
"cron",
|
||||||
|
"detect-cjs",
|
||||||
"ffi",
|
"ffi",
|
||||||
"fs",
|
"fs",
|
||||||
"fmt-component",
|
"fmt-component",
|
||||||
|
|
112
cli/schemas/lint-rules.v1.json
Normal file
112
cli/schemas/lint-rules.v1.json
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"enum": [
|
||||||
|
"adjacent-overload-signatures",
|
||||||
|
"ban-ts-comment",
|
||||||
|
"ban-types",
|
||||||
|
"ban-unknown-rule-code",
|
||||||
|
"ban-untagged-ignore",
|
||||||
|
"ban-untagged-todo",
|
||||||
|
"ban-unused-ignore",
|
||||||
|
"camelcase",
|
||||||
|
"constructor-super",
|
||||||
|
"default-param-last",
|
||||||
|
"eqeqeq",
|
||||||
|
"explicit-function-return-type",
|
||||||
|
"explicit-module-boundary-types",
|
||||||
|
"for-direction",
|
||||||
|
"fresh-handler-export",
|
||||||
|
"fresh-server-event-handlers",
|
||||||
|
"getter-return",
|
||||||
|
"guard-for-in",
|
||||||
|
"no-array-constructor",
|
||||||
|
"no-async-promise-executor",
|
||||||
|
"no-await-in-loop",
|
||||||
|
"no-await-in-sync-fn",
|
||||||
|
"no-boolean-literal-for-arguments",
|
||||||
|
"no-case-declarations",
|
||||||
|
"no-class-assign",
|
||||||
|
"no-compare-neg-zero",
|
||||||
|
"no-cond-assign",
|
||||||
|
"no-console",
|
||||||
|
"no-const-assign",
|
||||||
|
"no-constant-condition",
|
||||||
|
"no-control-regex",
|
||||||
|
"no-debugger",
|
||||||
|
"no-delete-var",
|
||||||
|
"no-deprecated-deno-api",
|
||||||
|
"no-dupe-args",
|
||||||
|
"no-dupe-class-members",
|
||||||
|
"no-dupe-else-if",
|
||||||
|
"no-dupe-keys",
|
||||||
|
"no-duplicate-case",
|
||||||
|
"no-empty",
|
||||||
|
"no-empty-character-class",
|
||||||
|
"no-empty-enum",
|
||||||
|
"no-empty-interface",
|
||||||
|
"no-empty-pattern",
|
||||||
|
"no-eval",
|
||||||
|
"no-ex-assign",
|
||||||
|
"no-explicit-any",
|
||||||
|
"no-external-import",
|
||||||
|
"no-extra-boolean-cast",
|
||||||
|
"no-extra-non-null-assertion",
|
||||||
|
"no-fallthrough",
|
||||||
|
"no-func-assign",
|
||||||
|
"no-global-assign",
|
||||||
|
"no-implicit-declare-namespace-export",
|
||||||
|
"no-import-assertions",
|
||||||
|
"no-import-assign",
|
||||||
|
"no-inferrable-types",
|
||||||
|
"no-inner-declarations",
|
||||||
|
"no-invalid-regexp",
|
||||||
|
"no-invalid-triple-slash-reference",
|
||||||
|
"no-irregular-whitespace",
|
||||||
|
"no-misused-new",
|
||||||
|
"no-namespace",
|
||||||
|
"no-new-symbol",
|
||||||
|
"no-node-globals",
|
||||||
|
"no-non-null-asserted-optional-chain",
|
||||||
|
"no-non-null-assertion",
|
||||||
|
"no-obj-calls",
|
||||||
|
"no-octal",
|
||||||
|
"no-process-globals",
|
||||||
|
"no-prototype-builtins",
|
||||||
|
"no-redeclare",
|
||||||
|
"no-regex-spaces",
|
||||||
|
"no-self-assign",
|
||||||
|
"no-self-compare",
|
||||||
|
"no-setter-return",
|
||||||
|
"no-shadow-restricted-names",
|
||||||
|
"no-sloppy-imports",
|
||||||
|
"no-slow-types",
|
||||||
|
"no-sparse-arrays",
|
||||||
|
"no-sync-fn-in-async-fn",
|
||||||
|
"no-this-alias",
|
||||||
|
"no-this-before-super",
|
||||||
|
"no-throw-literal",
|
||||||
|
"no-top-level-await",
|
||||||
|
"no-undef",
|
||||||
|
"no-unreachable",
|
||||||
|
"no-unsafe-finally",
|
||||||
|
"no-unsafe-negation",
|
||||||
|
"no-unused-labels",
|
||||||
|
"no-unused-vars",
|
||||||
|
"no-var",
|
||||||
|
"no-window",
|
||||||
|
"no-window-prefix",
|
||||||
|
"no-with",
|
||||||
|
"prefer-as-const",
|
||||||
|
"prefer-ascii",
|
||||||
|
"prefer-const",
|
||||||
|
"prefer-namespace-keyword",
|
||||||
|
"prefer-primordials",
|
||||||
|
"require-await",
|
||||||
|
"require-yield",
|
||||||
|
"single-var-declarator",
|
||||||
|
"triple-slash-reference",
|
||||||
|
"use-isnan",
|
||||||
|
"valid-typeof",
|
||||||
|
"verbatim-module-syntax"
|
||||||
|
]
|
||||||
|
}
|
4
cli/schemas/lint-tags.v1.json
Normal file
4
cli/schemas/lint-tags.v1.json
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"enum": ["fresh", "jsr", "jsx", "react", "recommended"]
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ use std::borrow::Cow;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
|
use std::env;
|
||||||
use std::env::current_exe;
|
use std::env::current_exe;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
@ -15,6 +16,7 @@ use std::io::Seek;
|
||||||
use std::io::SeekFrom;
|
use std::io::SeekFrom;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
use std::path::Component;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
@ -42,16 +44,19 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||||
use deno_npm::NpmPackageId;
|
use deno_npm::NpmPackageId;
|
||||||
use deno_npm::NpmSystemInfo;
|
use deno_npm::NpmSystemInfo;
|
||||||
|
use deno_path_util::url_from_directory_path;
|
||||||
|
use deno_path_util::url_from_file_path;
|
||||||
|
use deno_path_util::url_to_file_path;
|
||||||
use deno_runtime::deno_fs;
|
use deno_runtime::deno_fs;
|
||||||
use deno_runtime::deno_fs::FileSystem;
|
use deno_runtime::deno_fs::FileSystem;
|
||||||
use deno_runtime::deno_fs::RealFs;
|
use deno_runtime::deno_fs::RealFs;
|
||||||
use deno_runtime::deno_io::fs::FsError;
|
use deno_runtime::deno_io::fs::FsError;
|
||||||
use deno_runtime::deno_node::PackageJson;
|
use deno_runtime::deno_node::PackageJson;
|
||||||
use deno_runtime::ops::otel::OtelConfig;
|
|
||||||
use deno_semver::npm::NpmVersionReqParseError;
|
use deno_semver::npm::NpmVersionReqParseError;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use deno_semver::Version;
|
use deno_semver::Version;
|
||||||
use deno_semver::VersionReqSpecifierParseError;
|
use deno_semver::VersionReqSpecifierParseError;
|
||||||
|
use deno_telemetry::OtelConfig;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use log::Level;
|
use log::Level;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
@ -66,7 +71,7 @@ use crate::args::UnstableConfig;
|
||||||
use crate::cache::DenoDir;
|
use crate::cache::DenoDir;
|
||||||
use crate::cache::FastInsecureHasher;
|
use crate::cache::FastInsecureHasher;
|
||||||
use crate::emit::Emitter;
|
use crate::emit::Emitter;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::http_util::HttpClientProvider;
|
use crate::http_util::HttpClientProvider;
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::npm::InnerCliNpmResolverRef;
|
use crate::npm::InnerCliNpmResolverRef;
|
||||||
|
@ -74,6 +79,7 @@ use crate::resolver::CjsTracker;
|
||||||
use crate::shared::ReleaseChannel;
|
use crate::shared::ReleaseChannel;
|
||||||
use crate::standalone::virtual_fs::VfsEntry;
|
use crate::standalone::virtual_fs::VfsEntry;
|
||||||
use crate::util::archive;
|
use crate::util::archive;
|
||||||
|
use crate::util::fs::canonicalize_path;
|
||||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
use crate::util::progress_bar::ProgressBarStyle;
|
use crate::util::progress_bar::ProgressBarStyle;
|
||||||
|
@ -85,30 +91,31 @@ use super::serialization::DenoCompileModuleData;
|
||||||
use super::serialization::DeserializedDataSection;
|
use super::serialization::DeserializedDataSection;
|
||||||
use super::serialization::RemoteModulesStore;
|
use super::serialization::RemoteModulesStore;
|
||||||
use super::serialization::RemoteModulesStoreBuilder;
|
use super::serialization::RemoteModulesStoreBuilder;
|
||||||
|
use super::serialization::SourceMapStore;
|
||||||
|
use super::virtual_fs::output_vfs;
|
||||||
|
use super::virtual_fs::BuiltVfs;
|
||||||
use super::virtual_fs::FileBackedVfs;
|
use super::virtual_fs::FileBackedVfs;
|
||||||
use super::virtual_fs::VfsBuilder;
|
use super::virtual_fs::VfsBuilder;
|
||||||
|
use super::virtual_fs::VfsFileSubDataKind;
|
||||||
use super::virtual_fs::VfsRoot;
|
use super::virtual_fs::VfsRoot;
|
||||||
use super::virtual_fs::VirtualDirectory;
|
use super::virtual_fs::VirtualDirectory;
|
||||||
|
use super::virtual_fs::VirtualDirectoryEntries;
|
||||||
|
use super::virtual_fs::WindowsSystemRootablePath;
|
||||||
|
|
||||||
|
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
|
||||||
|
".deno_compile_node_modules";
|
||||||
|
|
||||||
/// A URL that can be designated as the base for relative URLs.
|
/// A URL that can be designated as the base for relative URLs.
|
||||||
///
|
///
|
||||||
/// After creation, this URL may be used to get the key for a
|
/// After creation, this URL may be used to get the key for a
|
||||||
/// module in the binary.
|
/// module in the binary.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url);
|
pub enum StandaloneRelativeFileBaseUrl<'a> {
|
||||||
|
WindowsSystemRoot,
|
||||||
impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> {
|
Path(&'a Url),
|
||||||
fn from(url: &'a Url) -> Self {
|
|
||||||
Self(url)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
|
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
|
||||||
pub fn new(url: &'a Url) -> Self {
|
|
||||||
debug_assert_eq!(url.scheme(), "file");
|
|
||||||
Self(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the module map key of the provided specifier.
|
/// Gets the module map key of the provided specifier.
|
||||||
///
|
///
|
||||||
/// * Descendant file specifiers will be made relative to the base.
|
/// * Descendant file specifiers will be made relative to the base.
|
||||||
|
@ -118,22 +125,29 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> {
|
||||||
if target.scheme() != "file" {
|
if target.scheme() != "file" {
|
||||||
return Cow::Borrowed(target.as_str());
|
return Cow::Borrowed(target.as_str());
|
||||||
}
|
}
|
||||||
|
let base = match self {
|
||||||
|
Self::Path(base) => base,
|
||||||
|
Self::WindowsSystemRoot => return Cow::Borrowed(target.path()),
|
||||||
|
};
|
||||||
|
|
||||||
match self.0.make_relative(target) {
|
match base.make_relative(target) {
|
||||||
Some(relative) => {
|
Some(relative) => {
|
||||||
if relative.starts_with("../") {
|
// This is not a great scenario to have because it means that the
|
||||||
Cow::Borrowed(target.as_str())
|
// specifier is outside the vfs and could cause the binary to act
|
||||||
} else {
|
// strangely. If you encounter this, the fix is to add more paths
|
||||||
Cow::Owned(relative)
|
// to the vfs builder by calling `add_possible_min_root_dir`.
|
||||||
}
|
debug_assert!(
|
||||||
|
!relative.starts_with("../"),
|
||||||
|
"{} -> {} ({})",
|
||||||
|
base.as_str(),
|
||||||
|
target.as_str(),
|
||||||
|
relative,
|
||||||
|
);
|
||||||
|
Cow::Owned(relative)
|
||||||
}
|
}
|
||||||
None => Cow::Borrowed(target.as_str()),
|
None => Cow::Borrowed(target.as_str()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn inner(&self) -> &Url {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
|
@ -188,21 +202,28 @@ pub struct Metadata {
|
||||||
pub entrypoint_key: String,
|
pub entrypoint_key: String,
|
||||||
pub node_modules: Option<NodeModules>,
|
pub node_modules: Option<NodeModules>,
|
||||||
pub unstable_config: UnstableConfig,
|
pub unstable_config: UnstableConfig,
|
||||||
pub otel_config: Option<OtelConfig>, // None means disabled.
|
pub otel_config: OtelConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn write_binary_bytes(
|
fn write_binary_bytes(
|
||||||
mut file_writer: File,
|
mut file_writer: File,
|
||||||
original_bin: Vec<u8>,
|
original_bin: Vec<u8>,
|
||||||
metadata: &Metadata,
|
metadata: &Metadata,
|
||||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||||
remote_modules: &RemoteModulesStoreBuilder,
|
remote_modules: &RemoteModulesStoreBuilder,
|
||||||
vfs: VfsBuilder,
|
source_map_store: &SourceMapStore,
|
||||||
|
vfs: &BuiltVfs,
|
||||||
compile_flags: &CompileFlags,
|
compile_flags: &CompileFlags,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let data_section_bytes =
|
let data_section_bytes = serialize_binary_data_section(
|
||||||
serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)
|
metadata,
|
||||||
.context("Serializing binary data section.")?;
|
npm_snapshot,
|
||||||
|
remote_modules,
|
||||||
|
source_map_store,
|
||||||
|
vfs,
|
||||||
|
)
|
||||||
|
.context("Serializing binary data section.")?;
|
||||||
|
|
||||||
let target = compile_flags.resolve_target();
|
let target = compile_flags.resolve_target();
|
||||||
if target.contains("linux") {
|
if target.contains("linux") {
|
||||||
|
@ -244,6 +265,7 @@ pub struct StandaloneData {
|
||||||
pub modules: StandaloneModules,
|
pub modules: StandaloneModules,
|
||||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||||
pub root_path: PathBuf,
|
pub root_path: PathBuf,
|
||||||
|
pub source_maps: SourceMapStore,
|
||||||
pub vfs: Arc<FileBackedVfs>,
|
pub vfs: Arc<FileBackedVfs>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,20 +293,20 @@ impl StandaloneModules {
|
||||||
pub fn read<'a>(
|
pub fn read<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
specifier: &'a ModuleSpecifier,
|
specifier: &'a ModuleSpecifier,
|
||||||
|
kind: VfsFileSubDataKind,
|
||||||
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
|
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
|
||||||
if specifier.scheme() == "file" {
|
if specifier.scheme() == "file" {
|
||||||
let path = deno_path_util::url_to_file_path(specifier)?;
|
let path = deno_path_util::url_to_file_path(specifier)?;
|
||||||
let bytes = match self.vfs.file_entry(&path) {
|
let bytes = match self.vfs.file_entry(&path) {
|
||||||
Ok(entry) => self.vfs.read_file_all(entry)?,
|
Ok(entry) => self.vfs.read_file_all(entry, kind)?,
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||||
let bytes = match RealFs.read_file_sync(&path, None) {
|
match RealFs.read_file_sync(&path, None) {
|
||||||
Ok(bytes) => bytes,
|
Ok(bytes) => bytes,
|
||||||
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
|
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
|
||||||
return Ok(None)
|
return Ok(None)
|
||||||
}
|
}
|
||||||
Err(err) => return Err(err.into()),
|
Err(err) => return Err(err.into()),
|
||||||
};
|
}
|
||||||
Cow::Owned(bytes)
|
|
||||||
}
|
}
|
||||||
Err(err) => return Err(err.into()),
|
Err(err) => return Err(err.into()),
|
||||||
};
|
};
|
||||||
|
@ -294,7 +316,18 @@ impl StandaloneModules {
|
||||||
data: bytes,
|
data: bytes,
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
self.remote_modules.read(specifier)
|
self.remote_modules.read(specifier).map(|maybe_entry| {
|
||||||
|
maybe_entry.map(|entry| DenoCompileModuleData {
|
||||||
|
media_type: entry.media_type,
|
||||||
|
specifier: entry.specifier,
|
||||||
|
data: match kind {
|
||||||
|
VfsFileSubDataKind::Raw => entry.data,
|
||||||
|
VfsFileSubDataKind::ModuleGraph => {
|
||||||
|
entry.transpiled_data.unwrap_or(entry.data)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -315,7 +348,8 @@ pub fn extract_standalone(
|
||||||
mut metadata,
|
mut metadata,
|
||||||
npm_snapshot,
|
npm_snapshot,
|
||||||
remote_modules,
|
remote_modules,
|
||||||
mut vfs_dir,
|
source_maps,
|
||||||
|
vfs_root_entries,
|
||||||
vfs_files_data,
|
vfs_files_data,
|
||||||
} = match deserialize_binary_data_section(data)? {
|
} = match deserialize_binary_data_section(data)? {
|
||||||
Some(data_section) => data_section,
|
Some(data_section) => data_section,
|
||||||
|
@ -338,11 +372,12 @@ pub fn extract_standalone(
|
||||||
metadata.argv.push(arg.into_string().unwrap());
|
metadata.argv.push(arg.into_string().unwrap());
|
||||||
}
|
}
|
||||||
let vfs = {
|
let vfs = {
|
||||||
// align the name of the directory with the root dir
|
|
||||||
vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string();
|
|
||||||
|
|
||||||
let fs_root = VfsRoot {
|
let fs_root = VfsRoot {
|
||||||
dir: vfs_dir,
|
dir: VirtualDirectory {
|
||||||
|
// align the name of the directory with the root dir
|
||||||
|
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
|
||||||
|
entries: vfs_root_entries,
|
||||||
|
},
|
||||||
root_path: root_path.clone(),
|
root_path: root_path.clone(),
|
||||||
start_file_offset: 0,
|
start_file_offset: 0,
|
||||||
};
|
};
|
||||||
|
@ -359,16 +394,26 @@ pub fn extract_standalone(
|
||||||
},
|
},
|
||||||
npm_snapshot,
|
npm_snapshot,
|
||||||
root_path,
|
root_path,
|
||||||
|
source_maps,
|
||||||
vfs,
|
vfs,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct WriteBinOptions<'a> {
|
||||||
|
pub writer: File,
|
||||||
|
pub display_output_filename: &'a str,
|
||||||
|
pub graph: &'a ModuleGraph,
|
||||||
|
pub entrypoint: &'a ModuleSpecifier,
|
||||||
|
pub include_files: &'a [ModuleSpecifier],
|
||||||
|
pub compile_flags: &'a CompileFlags,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct DenoCompileBinaryWriter<'a> {
|
pub struct DenoCompileBinaryWriter<'a> {
|
||||||
cjs_tracker: &'a CjsTracker,
|
cjs_tracker: &'a CjsTracker,
|
||||||
cli_options: &'a CliOptions,
|
cli_options: &'a CliOptions,
|
||||||
deno_dir: &'a DenoDir,
|
deno_dir: &'a DenoDir,
|
||||||
emitter: &'a Emitter,
|
emitter: &'a Emitter,
|
||||||
file_fetcher: &'a FileFetcher,
|
file_fetcher: &'a CliFileFetcher,
|
||||||
http_client_provider: &'a HttpClientProvider,
|
http_client_provider: &'a HttpClientProvider,
|
||||||
npm_resolver: &'a dyn CliNpmResolver,
|
npm_resolver: &'a dyn CliNpmResolver,
|
||||||
workspace_resolver: &'a WorkspaceResolver,
|
workspace_resolver: &'a WorkspaceResolver,
|
||||||
|
@ -382,7 +427,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
cli_options: &'a CliOptions,
|
cli_options: &'a CliOptions,
|
||||||
deno_dir: &'a DenoDir,
|
deno_dir: &'a DenoDir,
|
||||||
emitter: &'a Emitter,
|
emitter: &'a Emitter,
|
||||||
file_fetcher: &'a FileFetcher,
|
file_fetcher: &'a CliFileFetcher,
|
||||||
http_client_provider: &'a HttpClientProvider,
|
http_client_provider: &'a HttpClientProvider,
|
||||||
npm_resolver: &'a dyn CliNpmResolver,
|
npm_resolver: &'a dyn CliNpmResolver,
|
||||||
workspace_resolver: &'a WorkspaceResolver,
|
workspace_resolver: &'a WorkspaceResolver,
|
||||||
|
@ -403,18 +448,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
|
|
||||||
pub async fn write_bin(
|
pub async fn write_bin(
|
||||||
&self,
|
&self,
|
||||||
writer: File,
|
options: WriteBinOptions<'_>,
|
||||||
graph: &ModuleGraph,
|
|
||||||
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
|
|
||||||
entrypoint: &ModuleSpecifier,
|
|
||||||
include_files: &[ModuleSpecifier],
|
|
||||||
compile_flags: &CompileFlags,
|
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
// Select base binary based on target
|
// Select base binary based on target
|
||||||
let mut original_binary = self.get_base_binary(compile_flags).await?;
|
let mut original_binary =
|
||||||
|
self.get_base_binary(options.compile_flags).await?;
|
||||||
|
|
||||||
if compile_flags.no_terminal {
|
if options.compile_flags.no_terminal {
|
||||||
let target = compile_flags.resolve_target();
|
let target = options.compile_flags.resolve_target();
|
||||||
if !target.contains("windows") {
|
if !target.contains("windows") {
|
||||||
bail!(
|
bail!(
|
||||||
"The `--no-terminal` flag is only available when targeting Windows (current: {})",
|
"The `--no-terminal` flag is only available when targeting Windows (current: {})",
|
||||||
|
@ -424,8 +465,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
set_windows_binary_to_gui(&mut original_binary)
|
set_windows_binary_to_gui(&mut original_binary)
|
||||||
.context("Setting windows binary to GUI.")?;
|
.context("Setting windows binary to GUI.")?;
|
||||||
}
|
}
|
||||||
if compile_flags.icon.is_some() {
|
if options.compile_flags.icon.is_some() {
|
||||||
let target = compile_flags.resolve_target();
|
let target = options.compile_flags.resolve_target();
|
||||||
if !target.contains("windows") {
|
if !target.contains("windows") {
|
||||||
bail!(
|
bail!(
|
||||||
"The `--icon` flag is only available when targeting Windows (current: {})",
|
"The `--icon` flag is only available when targeting Windows (current: {})",
|
||||||
|
@ -433,17 +474,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self
|
self.write_standalone_binary(options, original_binary)
|
||||||
.write_standalone_binary(
|
|
||||||
writer,
|
|
||||||
original_binary,
|
|
||||||
graph,
|
|
||||||
root_dir_url,
|
|
||||||
entrypoint,
|
|
||||||
include_files,
|
|
||||||
compile_flags,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_base_binary(
|
async fn get_base_binary(
|
||||||
|
@ -454,7 +485,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
//
|
//
|
||||||
// Phase 2 of the 'min sized' deno compile RFC talks
|
// Phase 2 of the 'min sized' deno compile RFC talks
|
||||||
// about adding this as a flag.
|
// about adding this as a flag.
|
||||||
if let Some(path) = std::env::var_os("DENORT_BIN") {
|
if let Some(path) = get_dev_binary_path() {
|
||||||
return std::fs::read(&path).with_context(|| {
|
return std::fs::read(&path).with_context(|| {
|
||||||
format!("Could not find denort at '{}'", path.to_string_lossy())
|
format!("Could not find denort at '{}'", path.to_string_lossy())
|
||||||
});
|
});
|
||||||
|
@ -546,16 +577,19 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
/// This functions creates a standalone deno binary by appending a bundle
|
/// This functions creates a standalone deno binary by appending a bundle
|
||||||
/// and magic trailer to the currently executing binary.
|
/// and magic trailer to the currently executing binary.
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
async fn write_standalone_binary(
|
fn write_standalone_binary(
|
||||||
&self,
|
&self,
|
||||||
writer: File,
|
options: WriteBinOptions<'_>,
|
||||||
original_bin: Vec<u8>,
|
original_bin: Vec<u8>,
|
||||||
graph: &ModuleGraph,
|
|
||||||
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
|
|
||||||
entrypoint: &ModuleSpecifier,
|
|
||||||
include_files: &[ModuleSpecifier],
|
|
||||||
compile_flags: &CompileFlags,
|
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
|
let WriteBinOptions {
|
||||||
|
writer,
|
||||||
|
display_output_filename,
|
||||||
|
graph,
|
||||||
|
entrypoint,
|
||||||
|
include_files,
|
||||||
|
compile_flags,
|
||||||
|
} = options;
|
||||||
let ca_data = match self.cli_options.ca_data() {
|
let ca_data = match self.cli_options.ca_data() {
|
||||||
Some(CaData::File(ca_file)) => Some(
|
Some(CaData::File(ca_file)) => Some(
|
||||||
std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?,
|
std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?,
|
||||||
|
@ -563,144 +597,187 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
|
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
let root_path = root_dir_url.inner().to_file_path().unwrap();
|
let mut vfs = VfsBuilder::new();
|
||||||
let (maybe_npm_vfs, node_modules, npm_snapshot) =
|
let npm_snapshot = match self.npm_resolver.as_inner() {
|
||||||
match self.npm_resolver.as_inner() {
|
InnerCliNpmResolverRef::Managed(managed) => {
|
||||||
InnerCliNpmResolverRef::Managed(managed) => {
|
let snapshot =
|
||||||
let snapshot =
|
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
if !snapshot.as_serialized().packages.is_empty() {
|
||||||
if !snapshot.as_serialized().packages.is_empty() {
|
self.fill_npm_vfs(&mut vfs).context("Building npm vfs.")?;
|
||||||
let npm_vfs_builder = self
|
Some(snapshot)
|
||||||
.build_npm_vfs(&root_path)
|
} else {
|
||||||
.context("Building npm vfs.")?;
|
None
|
||||||
(
|
|
||||||
Some(npm_vfs_builder),
|
|
||||||
Some(NodeModules::Managed {
|
|
||||||
node_modules_dir: self
|
|
||||||
.npm_resolver
|
|
||||||
.root_node_modules_path()
|
|
||||||
.map(|path| {
|
|
||||||
root_dir_url
|
|
||||||
.specifier_key(
|
|
||||||
&ModuleSpecifier::from_directory_path(path).unwrap(),
|
|
||||||
)
|
|
||||||
.into_owned()
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
Some(snapshot),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(None, None, None)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
InnerCliNpmResolverRef::Byonm(resolver) => {
|
}
|
||||||
let npm_vfs_builder = self.build_npm_vfs(&root_path)?;
|
InnerCliNpmResolverRef::Byonm(_) => {
|
||||||
(
|
self.fill_npm_vfs(&mut vfs)?;
|
||||||
Some(npm_vfs_builder),
|
None
|
||||||
Some(NodeModules::Byonm {
|
}
|
||||||
root_node_modules_dir: resolver.root_node_modules_path().map(
|
|
||||||
|node_modules_dir| {
|
|
||||||
root_dir_url
|
|
||||||
.specifier_key(
|
|
||||||
&ModuleSpecifier::from_directory_path(node_modules_dir)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.into_owned()
|
|
||||||
},
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
|
|
||||||
npm_vfs
|
|
||||||
} else {
|
|
||||||
VfsBuilder::new(root_path.clone())?
|
|
||||||
};
|
};
|
||||||
for include_file in include_files {
|
for include_file in include_files {
|
||||||
let path = deno_path_util::url_to_file_path(include_file)?;
|
let path = deno_path_util::url_to_file_path(include_file)?;
|
||||||
if path.is_dir() {
|
vfs
|
||||||
// TODO(#26941): we should analyze if any of these are
|
.add_file_at_path(&path)
|
||||||
// modules in order to include their dependencies
|
.with_context(|| format!("Including {}", path.display()))?;
|
||||||
vfs
|
|
||||||
.add_dir_recursive(&path)
|
|
||||||
.with_context(|| format!("Including {}", path.display()))?;
|
|
||||||
} else {
|
|
||||||
vfs
|
|
||||||
.add_file_at_path(&path)
|
|
||||||
.with_context(|| format!("Including {}", path.display()))?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
|
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
|
||||||
let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() {
|
let mut source_maps = Vec::with_capacity(graph.specifiers_count());
|
||||||
Some(FastInsecureHasher::new_deno_versioned())
|
// todo(dsherret): transpile in parallel
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
for module in graph.modules() {
|
for module in graph.modules() {
|
||||||
if module.specifier().scheme() == "data" {
|
if module.specifier().scheme() == "data" {
|
||||||
continue; // don't store data urls as an entry as they're in the code
|
continue; // don't store data urls as an entry as they're in the code
|
||||||
}
|
}
|
||||||
if let Some(hasher) = &mut code_cache_key_hasher {
|
let (maybe_original_source, maybe_transpiled, media_type) = match module {
|
||||||
if let Some(source) = module.source() {
|
|
||||||
hasher.write(module.specifier().as_str().as_bytes());
|
|
||||||
hasher.write(source.as_bytes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let (maybe_source, media_type) = match module {
|
|
||||||
deno_graph::Module::Js(m) => {
|
deno_graph::Module::Js(m) => {
|
||||||
let source = if m.media_type.is_emittable() {
|
let original_bytes = m.source.as_bytes().to_vec();
|
||||||
|
let maybe_transpiled = if m.media_type.is_emittable() {
|
||||||
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
|
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
|
||||||
&m.specifier,
|
&m.specifier,
|
||||||
m.media_type,
|
m.media_type,
|
||||||
m.is_script,
|
m.is_script,
|
||||||
)?;
|
)?;
|
||||||
let module_kind = ModuleKind::from_is_cjs(is_cjs);
|
let module_kind = ModuleKind::from_is_cjs(is_cjs);
|
||||||
let source = self
|
let (source, source_map) =
|
||||||
.emitter
|
self.emitter.emit_parsed_source_for_deno_compile(
|
||||||
.emit_parsed_source(
|
|
||||||
&m.specifier,
|
&m.specifier,
|
||||||
m.media_type,
|
m.media_type,
|
||||||
module_kind,
|
module_kind,
|
||||||
&m.source,
|
&m.source,
|
||||||
)
|
)?;
|
||||||
.await?;
|
if source != m.source.as_ref() {
|
||||||
source.into_bytes()
|
source_maps.push((&m.specifier, source_map));
|
||||||
|
Some(source.into_bytes())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
m.source.as_bytes().to_vec()
|
None
|
||||||
};
|
};
|
||||||
(Some(source), m.media_type)
|
(Some(original_bytes), maybe_transpiled, m.media_type)
|
||||||
}
|
}
|
||||||
deno_graph::Module::Json(m) => {
|
deno_graph::Module::Json(m) => {
|
||||||
(Some(m.source.as_bytes().to_vec()), m.media_type)
|
(Some(m.source.as_bytes().to_vec()), None, m.media_type)
|
||||||
}
|
}
|
||||||
deno_graph::Module::Wasm(m) => {
|
deno_graph::Module::Wasm(m) => {
|
||||||
(Some(m.source.to_vec()), MediaType::Wasm)
|
(Some(m.source.to_vec()), None, MediaType::Wasm)
|
||||||
}
|
}
|
||||||
deno_graph::Module::Npm(_)
|
deno_graph::Module::Npm(_)
|
||||||
| deno_graph::Module::Node(_)
|
| deno_graph::Module::Node(_)
|
||||||
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
|
| deno_graph::Module::External(_) => (None, None, MediaType::Unknown),
|
||||||
};
|
};
|
||||||
if module.specifier().scheme() == "file" {
|
if let Some(original_source) = maybe_original_source {
|
||||||
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
|
if module.specifier().scheme() == "file" {
|
||||||
vfs
|
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
|
||||||
.add_file_with_data(
|
vfs
|
||||||
&file_path,
|
.add_file_with_data(
|
||||||
match maybe_source {
|
&file_path,
|
||||||
Some(source) => source,
|
original_source,
|
||||||
None => RealFs.read_file_sync(&file_path, None)?,
|
VfsFileSubDataKind::Raw,
|
||||||
},
|
)
|
||||||
)
|
.with_context(|| {
|
||||||
.with_context(|| {
|
format!("Failed adding '{}'", file_path.display())
|
||||||
format!("Failed adding '{}'", file_path.display())
|
})?;
|
||||||
})?;
|
if let Some(transpiled_source) = maybe_transpiled {
|
||||||
} else if let Some(source) = maybe_source {
|
vfs
|
||||||
remote_modules_store.add(module.specifier(), media_type, source);
|
.add_file_with_data(
|
||||||
|
&file_path,
|
||||||
|
transpiled_source,
|
||||||
|
VfsFileSubDataKind::ModuleGraph,
|
||||||
|
)
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Failed adding '{}'", file_path.display())
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
remote_modules_store.add(
|
||||||
|
module.specifier(),
|
||||||
|
media_type,
|
||||||
|
original_source,
|
||||||
|
maybe_transpiled,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
remote_modules_store.add_redirects(&graph.redirects);
|
remote_modules_store.add_redirects(&graph.redirects);
|
||||||
|
|
||||||
|
if let Some(import_map) = self.workspace_resolver.maybe_import_map() {
|
||||||
|
if let Ok(file_path) = url_to_file_path(import_map.base_url()) {
|
||||||
|
if let Some(import_map_parent_dir) = file_path.parent() {
|
||||||
|
// tell the vfs about the import map's parent directory in case it
|
||||||
|
// falls outside what the root of where the VFS will be based
|
||||||
|
vfs.add_possible_min_root_dir(import_map_parent_dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(node_modules_dir) = self.npm_resolver.root_node_modules_path() {
|
||||||
|
// ensure the vfs doesn't go below the node_modules directory's parent
|
||||||
|
if let Some(parent) = node_modules_dir.parent() {
|
||||||
|
vfs.add_possible_min_root_dir(parent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let vfs = self.build_vfs_consolidating_global_npm_cache(vfs);
|
||||||
|
let root_dir_url = match &vfs.root_path {
|
||||||
|
WindowsSystemRootablePath::Path(dir) => {
|
||||||
|
Some(url_from_directory_path(dir)?)
|
||||||
|
}
|
||||||
|
WindowsSystemRootablePath::WindowSystemRoot => None,
|
||||||
|
};
|
||||||
|
let root_dir_url = match &root_dir_url {
|
||||||
|
Some(url) => StandaloneRelativeFileBaseUrl::Path(url),
|
||||||
|
None => StandaloneRelativeFileBaseUrl::WindowsSystemRoot,
|
||||||
|
};
|
||||||
|
|
||||||
|
let code_cache_key = if self.cli_options.code_cache_enabled() {
|
||||||
|
let mut hasher = FastInsecureHasher::new_deno_versioned();
|
||||||
|
for module in graph.modules() {
|
||||||
|
if let Some(source) = module.source() {
|
||||||
|
hasher
|
||||||
|
.write(root_dir_url.specifier_key(module.specifier()).as_bytes());
|
||||||
|
hasher.write(source.as_bytes());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(hasher.finish())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut source_map_store = SourceMapStore::with_capacity(source_maps.len());
|
||||||
|
for (specifier, source_map) in source_maps {
|
||||||
|
source_map_store.add(
|
||||||
|
Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()),
|
||||||
|
Cow::Owned(source_map.into_bytes()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let node_modules = match self.npm_resolver.as_inner() {
|
||||||
|
InnerCliNpmResolverRef::Managed(_) => {
|
||||||
|
npm_snapshot.as_ref().map(|_| NodeModules::Managed {
|
||||||
|
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|
||||||
|
|path| {
|
||||||
|
root_dir_url
|
||||||
|
.specifier_key(
|
||||||
|
&ModuleSpecifier::from_directory_path(path).unwrap(),
|
||||||
|
)
|
||||||
|
.into_owned()
|
||||||
|
},
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
InnerCliNpmResolverRef::Byonm(resolver) => Some(NodeModules::Byonm {
|
||||||
|
root_node_modules_dir: resolver.root_node_modules_path().map(
|
||||||
|
|node_modules_dir| {
|
||||||
|
root_dir_url
|
||||||
|
.specifier_key(
|
||||||
|
&ModuleSpecifier::from_directory_path(node_modules_dir)
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.into_owned()
|
||||||
|
},
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
let env_vars_from_env_file = match self.cli_options.env_file_name() {
|
let env_vars_from_env_file = match self.cli_options.env_file_name() {
|
||||||
Some(env_filenames) => {
|
Some(env_filenames) => {
|
||||||
let mut aggregated_env_vars = IndexMap::new();
|
let mut aggregated_env_vars = IndexMap::new();
|
||||||
|
@ -715,10 +792,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
None => Default::default(),
|
None => Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
output_vfs(&vfs, display_output_filename);
|
||||||
|
|
||||||
let metadata = Metadata {
|
let metadata = Metadata {
|
||||||
argv: compile_flags.args.clone(),
|
argv: compile_flags.args.clone(),
|
||||||
seed: self.cli_options.seed(),
|
seed: self.cli_options.seed(),
|
||||||
code_cache_key: code_cache_key_hasher.map(|h| h.finish()),
|
code_cache_key,
|
||||||
location: self.cli_options.location_flag().clone(),
|
location: self.cli_options.location_flag().clone(),
|
||||||
permissions: self.cli_options.permission_flags().clone(),
|
permissions: self.cli_options.permission_flags().clone(),
|
||||||
v8_flags: self.cli_options.v8_flags().clone(),
|
v8_flags: self.cli_options.v8_flags().clone(),
|
||||||
|
@ -771,8 +850,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
unstable_config: UnstableConfig {
|
unstable_config: UnstableConfig {
|
||||||
legacy_flag_enabled: false,
|
legacy_flag_enabled: false,
|
||||||
bare_node_builtins: self.cli_options.unstable_bare_node_builtins(),
|
bare_node_builtins: self.cli_options.unstable_bare_node_builtins(),
|
||||||
|
detect_cjs: self.cli_options.unstable_detect_cjs(),
|
||||||
sloppy_imports: self.cli_options.unstable_sloppy_imports(),
|
sloppy_imports: self.cli_options.unstable_sloppy_imports(),
|
||||||
features: self.cli_options.unstable_features(),
|
features: self.cli_options.unstable_features(),
|
||||||
|
npm_lazy_caching: self.cli_options.unstable_npm_lazy_caching(),
|
||||||
},
|
},
|
||||||
otel_config: self.cli_options.otel_config(),
|
otel_config: self.cli_options.otel_config(),
|
||||||
};
|
};
|
||||||
|
@ -783,13 +864,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
&metadata,
|
&metadata,
|
||||||
npm_snapshot.map(|s| s.into_serialized()),
|
npm_snapshot.map(|s| s.into_serialized()),
|
||||||
&remote_modules_store,
|
&remote_modules_store,
|
||||||
vfs,
|
&source_map_store,
|
||||||
|
&vfs,
|
||||||
compile_flags,
|
compile_flags,
|
||||||
)
|
)
|
||||||
.context("Writing binary bytes")
|
.context("Writing binary bytes")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_npm_vfs(&self, root_path: &Path) -> Result<VfsBuilder, AnyError> {
|
fn fill_npm_vfs(&self, builder: &mut VfsBuilder) -> Result<(), AnyError> {
|
||||||
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
|
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
|
||||||
if system_info != &NpmSystemInfo::default() {
|
if system_info != &NpmSystemInfo::default() {
|
||||||
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
|
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
|
||||||
|
@ -800,15 +882,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||||
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
|
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
|
||||||
maybe_warn_different_system(&self.npm_system_info);
|
maybe_warn_different_system(&self.npm_system_info);
|
||||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
|
||||||
builder.add_dir_recursive(node_modules_path)?;
|
builder.add_dir_recursive(node_modules_path)?;
|
||||||
Ok(builder)
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
// DO NOT include the user's registry url as it may contain credentials,
|
// we'll flatten to remove any custom registries later
|
||||||
// but also don't make this dependent on the registry url
|
|
||||||
let global_cache_root_path = npm_resolver.global_cache_root_path();
|
|
||||||
let mut builder =
|
|
||||||
VfsBuilder::new(global_cache_root_path.to_path_buf())?;
|
|
||||||
let mut packages =
|
let mut packages =
|
||||||
npm_resolver.all_system_packages(&self.npm_system_info);
|
npm_resolver.all_system_packages(&self.npm_system_info);
|
||||||
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
|
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
|
||||||
|
@ -817,55 +894,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
|
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
|
||||||
builder.add_dir_recursive(&folder)?;
|
builder.add_dir_recursive(&folder)?;
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
|
|
||||||
// that will be used by denort when loading the npm cache. This avoids us exposing
|
|
||||||
// the user's private registry information and means we don't have to bother
|
|
||||||
// serializing all the different registry config into the binary.
|
|
||||||
builder.with_root_dir(|root_dir| {
|
|
||||||
root_dir.name = ".deno_compile_node_modules".to_string();
|
|
||||||
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
|
|
||||||
let mut localhost_entries = IndexMap::new();
|
|
||||||
for entry in std::mem::take(&mut root_dir.entries) {
|
|
||||||
match entry {
|
|
||||||
VfsEntry::Dir(dir) => {
|
|
||||||
for entry in dir.entries {
|
|
||||||
log::debug!(
|
|
||||||
"Flattening {} into node_modules",
|
|
||||||
entry.name()
|
|
||||||
);
|
|
||||||
if let Some(existing) =
|
|
||||||
localhost_entries.insert(entry.name().to_string(), entry)
|
|
||||||
{
|
|
||||||
panic!(
|
|
||||||
"Unhandled scenario where a duplicate entry was found: {:?}",
|
|
||||||
existing
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
|
|
||||||
new_entries.push(entry);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
new_entries.push(VfsEntry::Dir(VirtualDirectory {
|
|
||||||
name: "localhost".to_string(),
|
|
||||||
entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
|
|
||||||
}));
|
|
||||||
// needs to be sorted by name
|
|
||||||
new_entries.sort_by(|a, b| a.name().cmp(b.name()));
|
|
||||||
root_dir.entries = new_entries;
|
|
||||||
});
|
|
||||||
|
|
||||||
builder.set_new_root_path(root_path.to_path_buf())?;
|
|
||||||
|
|
||||||
Ok(builder)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
InnerCliNpmResolverRef::Byonm(_) => {
|
InnerCliNpmResolverRef::Byonm(_) => {
|
||||||
maybe_warn_different_system(&self.npm_system_info);
|
maybe_warn_different_system(&self.npm_system_info);
|
||||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
|
||||||
for pkg_json in self.cli_options.workspace().package_jsons() {
|
for pkg_json in self.cli_options.workspace().package_jsons() {
|
||||||
builder.add_file_at_path(&pkg_json.path)?;
|
builder.add_file_at_path(&pkg_json.path)?;
|
||||||
}
|
}
|
||||||
|
@ -898,10 +931,120 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(builder)
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn build_vfs_consolidating_global_npm_cache(
|
||||||
|
&self,
|
||||||
|
mut vfs: VfsBuilder,
|
||||||
|
) -> BuiltVfs {
|
||||||
|
match self.npm_resolver.as_inner() {
|
||||||
|
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||||
|
if npm_resolver.root_node_modules_path().is_some() {
|
||||||
|
return vfs.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
let global_cache_root_path = npm_resolver.global_cache_root_path();
|
||||||
|
|
||||||
|
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
|
||||||
|
// that will be used by denort when loading the npm cache. This avoids us exposing
|
||||||
|
// the user's private registry information and means we don't have to bother
|
||||||
|
// serializing all the different registry config into the binary.
|
||||||
|
let Some(root_dir) = vfs.get_dir_mut(global_cache_root_path) else {
|
||||||
|
return vfs.build();
|
||||||
|
};
|
||||||
|
|
||||||
|
root_dir.name = DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME.to_string();
|
||||||
|
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
|
||||||
|
let mut localhost_entries = IndexMap::new();
|
||||||
|
for entry in root_dir.entries.take_inner() {
|
||||||
|
match entry {
|
||||||
|
VfsEntry::Dir(mut dir) => {
|
||||||
|
for entry in dir.entries.take_inner() {
|
||||||
|
log::debug!("Flattening {} into node_modules", entry.name());
|
||||||
|
if let Some(existing) =
|
||||||
|
localhost_entries.insert(entry.name().to_string(), entry)
|
||||||
|
{
|
||||||
|
panic!(
|
||||||
|
"Unhandled scenario where a duplicate entry was found: {:?}",
|
||||||
|
existing
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
|
||||||
|
new_entries.push(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
new_entries.push(VfsEntry::Dir(VirtualDirectory {
|
||||||
|
name: "localhost".to_string(),
|
||||||
|
entries: VirtualDirectoryEntries::new(
|
||||||
|
localhost_entries.into_iter().map(|(_, v)| v).collect(),
|
||||||
|
),
|
||||||
|
}));
|
||||||
|
root_dir.entries = VirtualDirectoryEntries::new(new_entries);
|
||||||
|
|
||||||
|
// it's better to not expose the user's cache directory, so take it out
|
||||||
|
// of there
|
||||||
|
let parent = global_cache_root_path.parent().unwrap();
|
||||||
|
let parent_dir = vfs.get_dir_mut(parent).unwrap();
|
||||||
|
let index = parent_dir
|
||||||
|
.entries
|
||||||
|
.binary_search(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME)
|
||||||
|
.unwrap();
|
||||||
|
let npm_global_cache_dir_entry = parent_dir.entries.remove(index);
|
||||||
|
|
||||||
|
// go up from the ancestors removing empty directories...
|
||||||
|
// this is not as optimized as it could be
|
||||||
|
let mut last_name =
|
||||||
|
Cow::Borrowed(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME);
|
||||||
|
for ancestor in parent.ancestors() {
|
||||||
|
let dir = vfs.get_dir_mut(ancestor).unwrap();
|
||||||
|
if let Ok(index) = dir.entries.binary_search(&last_name) {
|
||||||
|
dir.entries.remove(index);
|
||||||
|
}
|
||||||
|
last_name = Cow::Owned(dir.name.clone());
|
||||||
|
if !dir.entries.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// now build the vfs and add the global cache dir entry there
|
||||||
|
let mut built_vfs = vfs.build();
|
||||||
|
built_vfs.entries.insert(npm_global_cache_dir_entry);
|
||||||
|
built_vfs
|
||||||
|
}
|
||||||
|
InnerCliNpmResolverRef::Byonm(_) => vfs.build(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> {
|
||||||
|
let mut denort = deno_exe;
|
||||||
|
denort.set_file_name(if cfg!(windows) {
|
||||||
|
"denort.exe"
|
||||||
|
} else {
|
||||||
|
"denort"
|
||||||
|
});
|
||||||
|
denort.exists().then(|| denort.into_os_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_dev_binary_path() -> Option<OsString> {
|
||||||
|
env::var_os("DENORT_BIN").or_else(|| {
|
||||||
|
env::current_exe().ok().and_then(|exec_path| {
|
||||||
|
if exec_path
|
||||||
|
.components()
|
||||||
|
.any(|component| component == Component::Normal("target".as_ref()))
|
||||||
|
{
|
||||||
|
get_denort_path(exec_path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function returns the environment variables specified
|
/// This function returns the environment variables specified
|
||||||
|
|
|
@ -17,6 +17,7 @@ use deno_runtime::deno_io::fs::FsResult;
|
||||||
use deno_runtime::deno_io::fs::FsStat;
|
use deno_runtime::deno_io::fs::FsStat;
|
||||||
|
|
||||||
use super::virtual_fs::FileBackedVfs;
|
use super::virtual_fs::FileBackedVfs;
|
||||||
|
use super::virtual_fs::VfsFileSubDataKind;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
|
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
|
||||||
|
@ -36,7 +37,8 @@ impl DenoCompileFileSystem {
|
||||||
|
|
||||||
fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||||
let old_file = self.0.file_entry(oldpath)?;
|
let old_file = self.0.file_entry(oldpath)?;
|
||||||
let old_file_bytes = self.0.read_file_all(old_file)?;
|
let old_file_bytes =
|
||||||
|
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
|
||||||
RealFs.write_file_sync(
|
RealFs.write_file_sync(
|
||||||
newpath,
|
newpath,
|
||||||
OpenOptions {
|
OpenOptions {
|
||||||
|
|
|
@ -9,6 +9,7 @@ use binary::StandaloneData;
|
||||||
use binary::StandaloneModules;
|
use binary::StandaloneModules;
|
||||||
use code_cache::DenoCompileCodeCache;
|
use code_cache::DenoCompileCodeCache;
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_cache_dir::npm::NpmCacheDir;
|
use deno_cache_dir::npm::NpmCacheDir;
|
||||||
use deno_config::workspace::MappedResolution;
|
use deno_config::workspace::MappedResolution;
|
||||||
use deno_config::workspace::MappedResolutionError;
|
use deno_config::workspace::MappedResolutionError;
|
||||||
|
@ -32,6 +33,7 @@ use deno_core::ResolutionKind;
|
||||||
use deno_core::SourceCodeCacheInfo;
|
use deno_core::SourceCodeCacheInfo;
|
||||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||||
use deno_package_json::PackageJsonDepValue;
|
use deno_package_json::PackageJsonDepValue;
|
||||||
|
use deno_resolver::cjs::IsCjsResolutionMode;
|
||||||
use deno_resolver::npm::NpmReqResolverOptions;
|
use deno_resolver::npm::NpmReqResolverOptions;
|
||||||
use deno_runtime::deno_fs;
|
use deno_runtime::deno_fs;
|
||||||
use deno_runtime::deno_node::create_host_defined_options;
|
use deno_runtime::deno_node::create_host_defined_options;
|
||||||
|
@ -50,18 +52,20 @@ use deno_semver::npm::NpmPackageReqReference;
|
||||||
use import_map::parse_from_json;
|
use import_map::parse_from_json;
|
||||||
use node_resolver::analyze::NodeCodeTranslator;
|
use node_resolver::analyze::NodeCodeTranslator;
|
||||||
use node_resolver::errors::ClosestPkgJsonError;
|
use node_resolver::errors::ClosestPkgJsonError;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
use node_resolver::NodeResolutionMode;
|
use node_resolver::ResolutionMode;
|
||||||
use serialization::DenoCompileModuleSource;
|
use serialization::DenoCompileModuleSource;
|
||||||
|
use serialization::SourceMapStore;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use virtual_fs::FileBackedVfs;
|
||||||
|
use virtual_fs::VfsFileSubDataKind;
|
||||||
|
|
||||||
use crate::args::create_default_npmrc;
|
use crate::args::create_default_npmrc;
|
||||||
use crate::args::get_root_cert_store;
|
use crate::args::get_root_cert_store;
|
||||||
use crate::args::npm_pkg_req_ref_to_binary_command;
|
use crate::args::npm_pkg_req_ref_to_binary_command;
|
||||||
use crate::args::CaData;
|
use crate::args::CaData;
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::args::NpmInstallDepsProvider;
|
use crate::args::NpmInstallDepsProvider;
|
||||||
use crate::args::StorageKeyResolver;
|
use crate::args::StorageKeyResolver;
|
||||||
use crate::cache::Caches;
|
use crate::cache::Caches;
|
||||||
|
@ -85,10 +89,10 @@ use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||||
use crate::resolver::CjsTracker;
|
use crate::resolver::CjsTracker;
|
||||||
use crate::resolver::CliDenoResolverFs;
|
use crate::resolver::CliDenoResolverFs;
|
||||||
use crate::resolver::CliNpmReqResolver;
|
use crate::resolver::CliNpmReqResolver;
|
||||||
use crate::resolver::IsCjsResolverOptions;
|
|
||||||
use crate::resolver::NpmModuleLoader;
|
use crate::resolver::NpmModuleLoader;
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
use crate::util::progress_bar::ProgressBarStyle;
|
use crate::util::progress_bar::ProgressBarStyle;
|
||||||
|
use crate::util::text_encoding::from_utf8_lossy_cow;
|
||||||
use crate::util::v8::construct_v8_flags;
|
use crate::util::v8::construct_v8_flags;
|
||||||
use crate::worker::CliCodeCache;
|
use crate::worker::CliCodeCache;
|
||||||
use crate::worker::CliMainWorkerFactory;
|
use crate::worker::CliMainWorkerFactory;
|
||||||
|
@ -111,6 +115,7 @@ use self::file_system::DenoCompileFileSystem;
|
||||||
|
|
||||||
struct SharedModuleLoaderState {
|
struct SharedModuleLoaderState {
|
||||||
cjs_tracker: Arc<CjsTracker>,
|
cjs_tracker: Arc<CjsTracker>,
|
||||||
|
code_cache: Option<Arc<dyn CliCodeCache>>,
|
||||||
fs: Arc<dyn deno_fs::FileSystem>,
|
fs: Arc<dyn deno_fs::FileSystem>,
|
||||||
modules: StandaloneModules,
|
modules: StandaloneModules,
|
||||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||||
|
@ -118,8 +123,9 @@ struct SharedModuleLoaderState {
|
||||||
npm_module_loader: Arc<NpmModuleLoader>,
|
npm_module_loader: Arc<NpmModuleLoader>,
|
||||||
npm_req_resolver: Arc<CliNpmReqResolver>,
|
npm_req_resolver: Arc<CliNpmReqResolver>,
|
||||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||||
|
source_maps: SourceMapStore,
|
||||||
|
vfs: Arc<FileBackedVfs>,
|
||||||
workspace_resolver: WorkspaceResolver,
|
workspace_resolver: WorkspaceResolver,
|
||||||
code_cache: Option<Arc<dyn CliCodeCache>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SharedModuleLoaderState {
|
impl SharedModuleLoaderState {
|
||||||
|
@ -190,9 +196,9 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
.cjs_tracker
|
.cjs_tracker
|
||||||
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
|
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
|
||||||
{
|
{
|
||||||
NodeModuleKind::Cjs
|
ResolutionMode::Require
|
||||||
} else {
|
} else {
|
||||||
NodeModuleKind::Esm
|
ResolutionMode::Import
|
||||||
};
|
};
|
||||||
|
|
||||||
if self.shared.node_resolver.in_npm_package(&referrer) {
|
if self.shared.node_resolver.in_npm_package(&referrer) {
|
||||||
|
@ -204,7 +210,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
raw_specifier,
|
raw_specifier,
|
||||||
&referrer,
|
&referrer,
|
||||||
referrer_kind,
|
referrer_kind,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)?
|
)?
|
||||||
.into_url(),
|
.into_url(),
|
||||||
);
|
);
|
||||||
|
@ -232,7 +238,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
sub_path.as_deref(),
|
sub_path.as_deref(),
|
||||||
Some(&referrer),
|
Some(&referrer),
|
||||||
referrer_kind,
|
referrer_kind,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)?,
|
)?,
|
||||||
),
|
),
|
||||||
Ok(MappedResolution::PackageJson {
|
Ok(MappedResolution::PackageJson {
|
||||||
|
@ -249,7 +255,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
sub_path.as_deref(),
|
sub_path.as_deref(),
|
||||||
&referrer,
|
&referrer,
|
||||||
referrer_kind,
|
referrer_kind,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)
|
)
|
||||||
.map_err(AnyError::from),
|
.map_err(AnyError::from),
|
||||||
PackageJsonDepValue::Workspace(version_req) => {
|
PackageJsonDepValue::Workspace(version_req) => {
|
||||||
|
@ -269,7 +275,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
sub_path.as_deref(),
|
sub_path.as_deref(),
|
||||||
Some(&referrer),
|
Some(&referrer),
|
||||||
referrer_kind,
|
referrer_kind,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)?,
|
)?,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -283,7 +289,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
&reference,
|
&reference,
|
||||||
&referrer,
|
&referrer,
|
||||||
referrer_kind,
|
referrer_kind,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)?);
|
)?);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,7 +316,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
raw_specifier,
|
raw_specifier,
|
||||||
&referrer,
|
&referrer,
|
||||||
referrer_kind,
|
referrer_kind,
|
||||||
NodeResolutionMode::Execution,
|
NodeResolutionKind::Execution,
|
||||||
)?;
|
)?;
|
||||||
if let Some(res) = maybe_res {
|
if let Some(res) = maybe_res {
|
||||||
return Ok(res.into_url());
|
return Ok(res.into_url());
|
||||||
|
@ -392,7 +398,11 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.shared.modules.read(original_specifier) {
|
match self
|
||||||
|
.shared
|
||||||
|
.modules
|
||||||
|
.read(original_specifier, VfsFileSubDataKind::ModuleGraph)
|
||||||
|
{
|
||||||
Ok(Some(module)) => {
|
Ok(Some(module)) => {
|
||||||
let media_type = module.media_type;
|
let media_type = module.media_type;
|
||||||
let (module_specifier, module_type, module_source) =
|
let (module_specifier, module_type, module_source) =
|
||||||
|
@ -491,6 +501,45 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
||||||
}
|
}
|
||||||
std::future::ready(()).boxed_local()
|
std::future::ready(()).boxed_local()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_source_map(&self, file_name: &str) -> Option<Cow<[u8]>> {
|
||||||
|
if file_name.starts_with("file:///") {
|
||||||
|
let url =
|
||||||
|
deno_path_util::url_from_directory_path(self.shared.vfs.root()).ok()?;
|
||||||
|
let file_url = ModuleSpecifier::parse(file_name).ok()?;
|
||||||
|
let relative_path = url.make_relative(&file_url)?;
|
||||||
|
self.shared.source_maps.get(&relative_path)
|
||||||
|
} else {
|
||||||
|
self.shared.source_maps.get(file_name)
|
||||||
|
}
|
||||||
|
.map(Cow::Borrowed)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_source_mapped_source_line(
|
||||||
|
&self,
|
||||||
|
file_name: &str,
|
||||||
|
line_number: usize,
|
||||||
|
) -> Option<String> {
|
||||||
|
let specifier = ModuleSpecifier::parse(file_name).ok()?;
|
||||||
|
let data = self
|
||||||
|
.shared
|
||||||
|
.modules
|
||||||
|
.read(&specifier, VfsFileSubDataKind::Raw)
|
||||||
|
.ok()??;
|
||||||
|
|
||||||
|
let source = String::from_utf8_lossy(&data.data);
|
||||||
|
// Do NOT use .lines(): it skips the terminating empty line.
|
||||||
|
// (due to internally using_terminator() instead of .split())
|
||||||
|
let lines: Vec<&str> = source.split('\n').collect();
|
||||||
|
if line_number >= lines.len() {
|
||||||
|
Some(format!(
|
||||||
|
"{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
|
||||||
|
crate::colors::yellow("Warning"), line_number + 1,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Some(lines[line_number].to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NodeRequireLoader for EmbeddedModuleLoader {
|
impl NodeRequireLoader for EmbeddedModuleLoader {
|
||||||
|
@ -513,8 +562,13 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
|
||||||
fn load_text_file_lossy(
|
fn load_text_file_lossy(
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
) -> Result<String, AnyError> {
|
) -> Result<Cow<'static, str>, AnyError> {
|
||||||
Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?)
|
let file_entry = self.shared.vfs.file_entry(path)?;
|
||||||
|
let file_bytes = self
|
||||||
|
.shared
|
||||||
|
.vfs
|
||||||
|
.read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph)?;
|
||||||
|
Ok(from_utf8_lossy_cow(file_bytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_maybe_cjs(
|
fn is_maybe_cjs(
|
||||||
|
@ -581,6 +635,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
||||||
modules,
|
modules,
|
||||||
npm_snapshot,
|
npm_snapshot,
|
||||||
root_path,
|
root_path,
|
||||||
|
source_maps,
|
||||||
vfs,
|
vfs,
|
||||||
} = data;
|
} = data;
|
||||||
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
|
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
|
||||||
|
@ -723,9 +778,12 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
||||||
let cjs_tracker = Arc::new(CjsTracker::new(
|
let cjs_tracker = Arc::new(CjsTracker::new(
|
||||||
in_npm_pkg_checker.clone(),
|
in_npm_pkg_checker.clone(),
|
||||||
pkg_json_resolver.clone(),
|
pkg_json_resolver.clone(),
|
||||||
IsCjsResolverOptions {
|
if metadata.unstable_config.detect_cjs {
|
||||||
detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(),
|
IsCjsResolutionMode::ImplicitTypeCommonJs
|
||||||
is_node_main: false,
|
} else if metadata.workspace_resolver.package_jsons.is_empty() {
|
||||||
|
IsCjsResolutionMode::Disabled
|
||||||
|
} else {
|
||||||
|
IsCjsResolutionMode::ExplicitTypeCommonJs
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
let cache_db = Caches::new(deno_dir_provider.clone());
|
let cache_db = Caches::new(deno_dir_provider.clone());
|
||||||
|
@ -817,6 +875,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
||||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||||
shared: Arc::new(SharedModuleLoaderState {
|
shared: Arc::new(SharedModuleLoaderState {
|
||||||
cjs_tracker: cjs_tracker.clone(),
|
cjs_tracker: cjs_tracker.clone(),
|
||||||
|
code_cache: code_cache.clone(),
|
||||||
fs: fs.clone(),
|
fs: fs.clone(),
|
||||||
modules,
|
modules,
|
||||||
node_code_translator: node_code_translator.clone(),
|
node_code_translator: node_code_translator.clone(),
|
||||||
|
@ -826,10 +885,11 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
||||||
fs.clone(),
|
fs.clone(),
|
||||||
node_code_translator,
|
node_code_translator,
|
||||||
)),
|
)),
|
||||||
code_cache: code_cache.clone(),
|
|
||||||
npm_resolver: npm_resolver.clone(),
|
npm_resolver: npm_resolver.clone(),
|
||||||
workspace_resolver,
|
|
||||||
npm_req_resolver,
|
npm_req_resolver,
|
||||||
|
source_maps,
|
||||||
|
vfs,
|
||||||
|
workspace_resolver,
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -911,6 +971,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
||||||
serve_host: None,
|
serve_host: None,
|
||||||
},
|
},
|
||||||
metadata.otel_config,
|
metadata.otel_config,
|
||||||
|
crate::args::NpmCachingStrategy::Lazy,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Initialize v8 once from the main thread.
|
// Initialize v8 once from the main thread.
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::cell::Cell;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
|
use capacity_builder::BytesAppendable;
|
||||||
|
use deno_ast::swc::common::source_map;
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
|
@ -19,11 +22,15 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||||
use deno_npm::NpmPackageId;
|
use deno_npm::NpmPackageId;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
|
use deno_semver::StackString;
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
use crate::standalone::virtual_fs::VirtualDirectory;
|
use crate::standalone::virtual_fs::VirtualDirectory;
|
||||||
|
|
||||||
use super::binary::Metadata;
|
use super::binary::Metadata;
|
||||||
|
use super::virtual_fs::BuiltVfs;
|
||||||
use super::virtual_fs::VfsBuilder;
|
use super::virtual_fs::VfsBuilder;
|
||||||
|
use super::virtual_fs::VirtualDirectoryEntries;
|
||||||
|
|
||||||
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
||||||
|
|
||||||
|
@ -31,61 +38,64 @@ const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
||||||
/// * d3n0l4nd
|
/// * d3n0l4nd
|
||||||
/// * <metadata_len><metadata>
|
/// * <metadata_len><metadata>
|
||||||
/// * <npm_snapshot_len><npm_snapshot>
|
/// * <npm_snapshot_len><npm_snapshot>
|
||||||
/// * <remote_modules_len><remote_modules>
|
/// * <remote_modules>
|
||||||
/// * <vfs_headers_len><vfs_headers>
|
/// * <vfs_headers_len><vfs_headers>
|
||||||
/// * <vfs_file_data_len><vfs_file_data>
|
/// * <vfs_file_data_len><vfs_file_data>
|
||||||
|
/// * <source_map_data>
|
||||||
/// * d3n0l4nd
|
/// * d3n0l4nd
|
||||||
pub fn serialize_binary_data_section(
|
pub fn serialize_binary_data_section(
|
||||||
metadata: &Metadata,
|
metadata: &Metadata,
|
||||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||||
remote_modules: &RemoteModulesStoreBuilder,
|
remote_modules: &RemoteModulesStoreBuilder,
|
||||||
vfs: VfsBuilder,
|
source_map_store: &SourceMapStore,
|
||||||
|
vfs: &BuiltVfs,
|
||||||
) -> Result<Vec<u8>, AnyError> {
|
) -> Result<Vec<u8>, AnyError> {
|
||||||
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
|
let metadata = serde_json::to_string(metadata)?;
|
||||||
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
|
let npm_snapshot =
|
||||||
bytes.extend_from_slice(data);
|
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
||||||
}
|
let serialized_vfs = serde_json::to_string(&vfs.entries)?;
|
||||||
|
|
||||||
let mut bytes = Vec::new();
|
let bytes = capacity_builder::BytesBuilder::build(|builder| {
|
||||||
bytes.extend_from_slice(MAGIC_BYTES);
|
builder.append(MAGIC_BYTES);
|
||||||
|
// 1. Metadata
|
||||||
// 1. Metadata
|
{
|
||||||
{
|
builder.append_le(metadata.len() as u64);
|
||||||
let metadata = serde_json::to_string(metadata)?;
|
builder.append(&metadata);
|
||||||
write_bytes_with_len(&mut bytes, metadata.as_bytes());
|
}
|
||||||
}
|
// 2. Npm snapshot
|
||||||
// 2. Npm snapshot
|
{
|
||||||
{
|
builder.append_le(npm_snapshot.len() as u64);
|
||||||
let npm_snapshot =
|
builder.append(&npm_snapshot);
|
||||||
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
}
|
||||||
write_bytes_with_len(&mut bytes, &npm_snapshot);
|
// 3. Remote modules
|
||||||
}
|
{
|
||||||
// 3. Remote modules
|
remote_modules.write(builder);
|
||||||
{
|
}
|
||||||
let update_index = bytes.len();
|
// 4. VFS
|
||||||
bytes.extend_from_slice(&(0_u64).to_le_bytes());
|
{
|
||||||
let start_index = bytes.len();
|
builder.append_le(serialized_vfs.len() as u64);
|
||||||
remote_modules.write(&mut bytes)?;
|
builder.append(&serialized_vfs);
|
||||||
let length = bytes.len() - start_index;
|
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
|
||||||
let length_bytes = (length as u64).to_le_bytes();
|
builder.append_le(vfs_bytes_len);
|
||||||
bytes[update_index..update_index + length_bytes.len()]
|
for file in &vfs.files {
|
||||||
.copy_from_slice(&length_bytes);
|
builder.append(file);
|
||||||
}
|
}
|
||||||
// 4. VFS
|
}
|
||||||
{
|
// 5. Source maps
|
||||||
let (vfs, vfs_files) = vfs.into_dir_and_files();
|
{
|
||||||
let vfs = serde_json::to_string(&vfs)?;
|
builder.append_le(source_map_store.data.len() as u32);
|
||||||
write_bytes_with_len(&mut bytes, vfs.as_bytes());
|
for (specifier, source_map) in &source_map_store.data {
|
||||||
let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>();
|
builder.append_le(specifier.len() as u32);
|
||||||
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
|
builder.append(specifier);
|
||||||
for file in &vfs_files {
|
builder.append_le(source_map.len() as u32);
|
||||||
bytes.extend_from_slice(file);
|
builder.append(source_map.as_ref());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// write the magic bytes at the end so we can use it
|
// write the magic bytes at the end so we can use it
|
||||||
// to make sure we've deserialized correctly
|
// to make sure we've deserialized correctly
|
||||||
bytes.extend_from_slice(MAGIC_BYTES);
|
builder.append(MAGIC_BYTES);
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(bytes)
|
Ok(bytes)
|
||||||
}
|
}
|
||||||
|
@ -94,19 +104,14 @@ pub struct DeserializedDataSection {
|
||||||
pub metadata: Metadata,
|
pub metadata: Metadata,
|
||||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||||
pub remote_modules: RemoteModulesStore,
|
pub remote_modules: RemoteModulesStore,
|
||||||
pub vfs_dir: VirtualDirectory,
|
pub source_maps: SourceMapStore,
|
||||||
|
pub vfs_root_entries: VirtualDirectoryEntries,
|
||||||
pub vfs_files_data: &'static [u8],
|
pub vfs_files_data: &'static [u8],
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize_binary_data_section(
|
pub fn deserialize_binary_data_section(
|
||||||
data: &'static [u8],
|
data: &'static [u8],
|
||||||
) -> Result<Option<DeserializedDataSection>, AnyError> {
|
) -> Result<Option<DeserializedDataSection>, AnyError> {
|
||||||
fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
|
|
||||||
let (input, len) = read_u64(input)?;
|
|
||||||
let (input, data) = read_bytes(input, len as usize)?;
|
|
||||||
Ok((input, data))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
|
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
|
||||||
if input.len() < MAGIC_BYTES.len() {
|
if input.len() < MAGIC_BYTES.len() {
|
||||||
bail!("Unexpected end of data. Could not find magic bytes.");
|
bail!("Unexpected end of data. Could not find magic bytes.");
|
||||||
|
@ -118,34 +123,51 @@ pub fn deserialize_binary_data_section(
|
||||||
Ok((input, true))
|
Ok((input, true))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
fn read_source_map_entry(
|
||||||
|
input: &[u8],
|
||||||
|
) -> Result<(&[u8], (Cow<str>, &[u8])), AnyError> {
|
||||||
|
let (input, specifier) = read_string_lossy(input)?;
|
||||||
|
let (input, source_map) = read_bytes_with_u32_len(input)?;
|
||||||
|
Ok((input, (specifier, source_map)))
|
||||||
|
}
|
||||||
|
|
||||||
let (input, found) = read_magic_bytes(data)?;
|
let (input, found) = read_magic_bytes(data)?;
|
||||||
if !found {
|
if !found {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 1. Metadata
|
// 1. Metadata
|
||||||
let (input, data) = read_bytes_with_len(input).context("reading metadata")?;
|
let (input, data) =
|
||||||
|
read_bytes_with_u64_len(input).context("reading metadata")?;
|
||||||
let metadata: Metadata =
|
let metadata: Metadata =
|
||||||
serde_json::from_slice(data).context("deserializing metadata")?;
|
serde_json::from_slice(data).context("deserializing metadata")?;
|
||||||
// 2. Npm snapshot
|
// 2. Npm snapshot
|
||||||
let (input, data) =
|
let (input, data) =
|
||||||
read_bytes_with_len(input).context("reading npm snapshot")?;
|
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
|
||||||
let npm_snapshot = if data.is_empty() {
|
let npm_snapshot = if data.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
|
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
|
||||||
};
|
};
|
||||||
// 3. Remote modules
|
// 3. Remote modules
|
||||||
let (input, data) =
|
let (input, remote_modules) =
|
||||||
read_bytes_with_len(input).context("reading remote modules data")?;
|
RemoteModulesStore::build(input).context("deserializing remote modules")?;
|
||||||
let remote_modules =
|
|
||||||
RemoteModulesStore::build(data).context("deserializing remote modules")?;
|
|
||||||
// 4. VFS
|
// 4. VFS
|
||||||
let (input, data) = read_bytes_with_len(input).context("vfs")?;
|
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
|
||||||
let vfs_dir: VirtualDirectory =
|
let vfs_root_entries: VirtualDirectoryEntries =
|
||||||
serde_json::from_slice(data).context("deserializing vfs data")?;
|
serde_json::from_slice(data).context("deserializing vfs data")?;
|
||||||
let (input, vfs_files_data) =
|
let (input, vfs_files_data) =
|
||||||
read_bytes_with_len(input).context("reading vfs files data")?;
|
read_bytes_with_u64_len(input).context("reading vfs files data")?;
|
||||||
|
// 5. Source maps
|
||||||
|
let (mut input, source_map_data_len) = read_u32_as_usize(input)?;
|
||||||
|
let mut source_maps = SourceMapStore::with_capacity(source_map_data_len);
|
||||||
|
for _ in 0..source_map_data_len {
|
||||||
|
let (current_input, (specifier, source_map)) =
|
||||||
|
read_source_map_entry(input)?;
|
||||||
|
input = current_input;
|
||||||
|
source_maps.add(specifier, Cow::Borrowed(source_map));
|
||||||
|
}
|
||||||
|
|
||||||
// finally ensure we read the magic bytes at the end
|
// finally ensure we read the magic bytes at the end
|
||||||
let (_input, found) = read_magic_bytes(input)?;
|
let (_input, found) = read_magic_bytes(input)?;
|
||||||
|
@ -157,7 +179,8 @@ pub fn deserialize_binary_data_section(
|
||||||
metadata,
|
metadata,
|
||||||
npm_snapshot,
|
npm_snapshot,
|
||||||
remote_modules,
|
remote_modules,
|
||||||
vfs_dir,
|
source_maps,
|
||||||
|
vfs_root_entries,
|
||||||
vfs_files_data,
|
vfs_files_data,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -165,19 +188,31 @@ pub fn deserialize_binary_data_section(
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct RemoteModulesStoreBuilder {
|
pub struct RemoteModulesStoreBuilder {
|
||||||
specifiers: Vec<(String, u64)>,
|
specifiers: Vec<(String, u64)>,
|
||||||
data: Vec<(MediaType, Vec<u8>)>,
|
data: Vec<(MediaType, Vec<u8>, Option<Vec<u8>>)>,
|
||||||
data_byte_len: u64,
|
data_byte_len: u64,
|
||||||
redirects: Vec<(String, String)>,
|
redirects: Vec<(String, String)>,
|
||||||
redirects_len: u64,
|
redirects_len: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RemoteModulesStoreBuilder {
|
impl RemoteModulesStoreBuilder {
|
||||||
pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) {
|
pub fn add(
|
||||||
|
&mut self,
|
||||||
|
specifier: &Url,
|
||||||
|
media_type: MediaType,
|
||||||
|
data: Vec<u8>,
|
||||||
|
maybe_transpiled: Option<Vec<u8>>,
|
||||||
|
) {
|
||||||
log::debug!("Adding '{}' ({})", specifier, media_type);
|
log::debug!("Adding '{}' ({})", specifier, media_type);
|
||||||
let specifier = specifier.to_string();
|
let specifier = specifier.to_string();
|
||||||
self.specifiers.push((specifier, self.data_byte_len));
|
self.specifiers.push((specifier, self.data_byte_len));
|
||||||
self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data
|
let maybe_transpiled_len = match &maybe_transpiled {
|
||||||
self.data.push((media_type, data));
|
// data length (4 bytes), data
|
||||||
|
Some(data) => 4 + data.len() as u64,
|
||||||
|
None => 0,
|
||||||
|
};
|
||||||
|
// media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length
|
||||||
|
self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len;
|
||||||
|
self.data.push((media_type, data, maybe_transpiled));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
|
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
|
||||||
|
@ -191,26 +226,50 @@ impl RemoteModulesStoreBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
|
fn write<'a, TBytes: capacity_builder::BytesType>(
|
||||||
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
|
&'a self,
|
||||||
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
|
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
|
||||||
|
) {
|
||||||
|
builder.append_le(self.specifiers.len() as u32);
|
||||||
|
builder.append_le(self.redirects.len() as u32);
|
||||||
for (specifier, offset) in &self.specifiers {
|
for (specifier, offset) in &self.specifiers {
|
||||||
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
|
builder.append_le(specifier.len() as u32);
|
||||||
writer.write_all(specifier.as_bytes())?;
|
builder.append(specifier);
|
||||||
writer.write_all(&offset.to_le_bytes())?;
|
builder.append_le(*offset);
|
||||||
}
|
}
|
||||||
for (from, to) in &self.redirects {
|
for (from, to) in &self.redirects {
|
||||||
writer.write_all(&(from.len() as u32).to_le_bytes())?;
|
builder.append_le(from.len() as u32);
|
||||||
writer.write_all(from.as_bytes())?;
|
builder.append(from);
|
||||||
writer.write_all(&(to.len() as u32).to_le_bytes())?;
|
builder.append_le(to.len() as u32);
|
||||||
writer.write_all(to.as_bytes())?;
|
builder.append(to);
|
||||||
}
|
}
|
||||||
for (media_type, data) in &self.data {
|
builder.append_le(
|
||||||
writer.write_all(&[serialize_media_type(*media_type)])?;
|
self
|
||||||
writer.write_all(&(data.len() as u64).to_le_bytes())?;
|
.data
|
||||||
writer.write_all(data)?;
|
.iter()
|
||||||
|
.map(|(_, data, maybe_transpiled)| {
|
||||||
|
1 + 4
|
||||||
|
+ (data.len() as u64)
|
||||||
|
+ 1
|
||||||
|
+ match maybe_transpiled {
|
||||||
|
Some(transpiled) => 4 + (transpiled.len() as u64),
|
||||||
|
None => 0,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.sum::<u64>(),
|
||||||
|
);
|
||||||
|
for (media_type, data, maybe_transpiled) in &self.data {
|
||||||
|
builder.append(serialize_media_type(*media_type));
|
||||||
|
builder.append_le(data.len() as u32);
|
||||||
|
builder.append(data);
|
||||||
|
if let Some(transpiled) = maybe_transpiled {
|
||||||
|
builder.append(1);
|
||||||
|
builder.append_le(transpiled.len() as u32);
|
||||||
|
builder.append(transpiled);
|
||||||
|
} else {
|
||||||
|
builder.append(0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,6 +297,30 @@ impl DenoCompileModuleSource {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct SourceMapStore {
|
||||||
|
data: IndexMap<Cow<'static, str>, Cow<'static, [u8]>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceMapStore {
|
||||||
|
pub fn with_capacity(capacity: usize) -> Self {
|
||||||
|
Self {
|
||||||
|
data: IndexMap::with_capacity(capacity),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add(
|
||||||
|
&mut self,
|
||||||
|
specifier: Cow<'static, str>,
|
||||||
|
source_map: Cow<'static, [u8]>,
|
||||||
|
) {
|
||||||
|
self.data.insert(specifier, source_map);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, specifier: &str) -> Option<&[u8]> {
|
||||||
|
self.data.get(specifier).map(|v| v.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct DenoCompileModuleData<'a> {
|
pub struct DenoCompileModuleData<'a> {
|
||||||
pub specifier: &'a Url,
|
pub specifier: &'a Url,
|
||||||
pub media_type: MediaType,
|
pub media_type: MediaType,
|
||||||
|
@ -284,6 +367,13 @@ impl<'a> DenoCompileModuleData<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct RemoteModuleEntry<'a> {
|
||||||
|
pub specifier: &'a Url,
|
||||||
|
pub media_type: MediaType,
|
||||||
|
pub data: Cow<'static, [u8]>,
|
||||||
|
pub transpiled_data: Option<Cow<'static, [u8]>>,
|
||||||
|
}
|
||||||
|
|
||||||
enum RemoteModulesStoreSpecifierValue {
|
enum RemoteModulesStoreSpecifierValue {
|
||||||
Data(usize),
|
Data(usize),
|
||||||
Redirect(Url),
|
Redirect(Url),
|
||||||
|
@ -295,7 +385,7 @@ pub struct RemoteModulesStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RemoteModulesStore {
|
impl RemoteModulesStore {
|
||||||
fn build(data: &'static [u8]) -> Result<Self, AnyError> {
|
fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> {
|
||||||
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
|
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
|
||||||
let (input, specifier) = read_string_lossy(input)?;
|
let (input, specifier) = read_string_lossy(input)?;
|
||||||
let specifier = Url::parse(&specifier)?;
|
let specifier = Url::parse(&specifier)?;
|
||||||
|
@ -338,12 +428,16 @@ impl RemoteModulesStore {
|
||||||
Ok((input, specifiers))
|
Ok((input, specifiers))
|
||||||
}
|
}
|
||||||
|
|
||||||
let (files_data, specifiers) = read_headers(data)?;
|
let (input, specifiers) = read_headers(input)?;
|
||||||
|
let (input, files_data) = read_bytes_with_u64_len(input)?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok((
|
||||||
specifiers,
|
input,
|
||||||
files_data,
|
Self {
|
||||||
})
|
specifiers,
|
||||||
|
files_data,
|
||||||
|
},
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_specifier<'a>(
|
pub fn resolve_specifier<'a>(
|
||||||
|
@ -374,7 +468,7 @@ impl RemoteModulesStore {
|
||||||
pub fn read<'a>(
|
pub fn read<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
original_specifier: &'a Url,
|
original_specifier: &'a Url,
|
||||||
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
|
) -> Result<Option<RemoteModuleEntry<'a>>, AnyError> {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
let mut specifier = original_specifier;
|
let mut specifier = original_specifier;
|
||||||
loop {
|
loop {
|
||||||
|
@ -390,12 +484,25 @@ impl RemoteModulesStore {
|
||||||
let input = &self.files_data[*offset..];
|
let input = &self.files_data[*offset..];
|
||||||
let (input, media_type_byte) = read_bytes(input, 1)?;
|
let (input, media_type_byte) = read_bytes(input, 1)?;
|
||||||
let media_type = deserialize_media_type(media_type_byte[0])?;
|
let media_type = deserialize_media_type(media_type_byte[0])?;
|
||||||
let (input, len) = read_u64(input)?;
|
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||||
let (_input, data) = read_bytes(input, len as usize)?;
|
check_has_len(input, 1)?;
|
||||||
return Ok(Some(DenoCompileModuleData {
|
let (input, has_transpiled) = (&input[1..], input[0]);
|
||||||
|
let (_, transpiled_data) = match has_transpiled {
|
||||||
|
0 => (input, None),
|
||||||
|
1 => {
|
||||||
|
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||||
|
(input, Some(data))
|
||||||
|
}
|
||||||
|
value => bail!(
|
||||||
|
"Invalid transpiled data flag: {}. Compiled data is corrupt.",
|
||||||
|
value
|
||||||
|
),
|
||||||
|
};
|
||||||
|
return Ok(Some(RemoteModuleEntry {
|
||||||
specifier,
|
specifier,
|
||||||
media_type,
|
media_type,
|
||||||
data: Cow::Borrowed(data),
|
data: Cow::Borrowed(data),
|
||||||
|
transpiled_data: transpiled_data.map(Cow::Borrowed),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
@ -479,12 +586,13 @@ fn deserialize_npm_snapshot(
|
||||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||||
fn parse_package_dep<'a>(
|
fn parse_package_dep<'a>(
|
||||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a
|
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
|
||||||
{
|
{
|
||||||
|input| {
|
|input| {
|
||||||
let (input, req) = read_string_lossy(input)?;
|
let (input, req) = read_string_lossy(input)?;
|
||||||
let (input, id) = read_u32_as_usize(input)?;
|
let (input, id) = read_u32_as_usize(input)?;
|
||||||
Ok((input, (req.into_owned(), id_to_npm_id(id)?)))
|
let req = StackString::from_cow(req);
|
||||||
|
Ok((input, (req, id_to_npm_id(id)?)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -634,17 +742,34 @@ fn parse_vec_n_times_with_index<TResult>(
|
||||||
Ok((input, results))
|
Ok((input, results))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
|
||||||
|
let (input, len) = read_u64(input)?;
|
||||||
|
let (input, data) = read_bytes(input, len as usize)?;
|
||||||
|
Ok((input, data))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
|
||||||
|
let (input, len) = read_u32_as_usize(input)?;
|
||||||
|
let (input, data) = read_bytes(input, len)?;
|
||||||
|
Ok((input, data))
|
||||||
|
}
|
||||||
|
|
||||||
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
|
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
|
||||||
if input.len() < len {
|
check_has_len(input, len)?;
|
||||||
bail!("Unexpected end of data.",);
|
|
||||||
}
|
|
||||||
let (len_bytes, input) = input.split_at(len);
|
let (len_bytes, input) = input.split_at(len);
|
||||||
Ok((input, len_bytes))
|
Ok((input, len_bytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> {
|
||||||
|
if input.len() < len {
|
||||||
|
bail!("Unexpected end of data.");
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
|
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
|
||||||
let (input, str_len) = read_u32_as_usize(input)?;
|
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
|
||||||
let (input, data_bytes) = read_bytes(input, str_len)?;
|
|
||||||
Ok((input, String::from_utf8_lossy(data_bytes)))
|
Ok((input, String::from_utf8_lossy(data_bytes)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -14,6 +14,7 @@ use deno_runtime::deno_node::NodeResolver;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_task_shell::ExecutableCommand;
|
use deno_task_shell::ExecutableCommand;
|
||||||
use deno_task_shell::ExecuteResult;
|
use deno_task_shell::ExecuteResult;
|
||||||
|
use deno_task_shell::KillSignal;
|
||||||
use deno_task_shell::ShellCommand;
|
use deno_task_shell::ShellCommand;
|
||||||
use deno_task_shell::ShellCommandContext;
|
use deno_task_shell::ShellCommandContext;
|
||||||
use deno_task_shell::ShellPipeReader;
|
use deno_task_shell::ShellPipeReader;
|
||||||
|
@ -22,6 +23,7 @@ use lazy_regex::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tokio::task::JoinHandle;
|
use tokio::task::JoinHandle;
|
||||||
use tokio::task::LocalSet;
|
use tokio::task::LocalSet;
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::npm::InnerCliNpmResolverRef;
|
use crate::npm::InnerCliNpmResolverRef;
|
||||||
|
@ -45,9 +47,11 @@ impl TaskStdio {
|
||||||
pub fn stdout() -> Self {
|
pub fn stdout() -> Self {
|
||||||
Self(None, ShellPipeWriter::stdout())
|
Self(None, ShellPipeWriter::stdout())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stderr() -> Self {
|
pub fn stderr() -> Self {
|
||||||
Self(None, ShellPipeWriter::stderr())
|
Self(None, ShellPipeWriter::stderr())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn piped() -> Self {
|
pub fn piped() -> Self {
|
||||||
let (r, w) = deno_task_shell::pipe();
|
let (r, w) = deno_task_shell::pipe();
|
||||||
Self(Some(r), w)
|
Self(Some(r), w)
|
||||||
|
@ -62,8 +66,8 @@ pub struct TaskIo {
|
||||||
impl Default for TaskIo {
|
impl Default for TaskIo {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
stderr: TaskStdio::stderr(),
|
|
||||||
stdout: TaskStdio::stdout(),
|
stdout: TaskStdio::stdout(),
|
||||||
|
stderr: TaskStdio::stderr(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -78,6 +82,7 @@ pub struct RunTaskOptions<'a> {
|
||||||
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||||
pub root_node_modules_dir: Option<&'a Path>,
|
pub root_node_modules_dir: Option<&'a Path>,
|
||||||
pub stdio: Option<TaskIo>,
|
pub stdio: Option<TaskIo>,
|
||||||
|
pub kill_signal: KillSignal,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
|
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
|
||||||
|
@ -96,8 +101,12 @@ pub async fn run_task(
|
||||||
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
|
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
|
||||||
let env_vars =
|
let env_vars =
|
||||||
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
|
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
|
||||||
let state =
|
let state = deno_task_shell::ShellState::new(
|
||||||
deno_task_shell::ShellState::new(env_vars, opts.cwd, opts.custom_commands);
|
env_vars,
|
||||||
|
opts.cwd,
|
||||||
|
opts.custom_commands,
|
||||||
|
opts.kill_signal,
|
||||||
|
);
|
||||||
let stdio = opts.stdio.unwrap_or_default();
|
let stdio = opts.stdio.unwrap_or_default();
|
||||||
let (
|
let (
|
||||||
TaskStdio(stdout_read, stdout_write),
|
TaskStdio(stdout_read, stdout_write),
|
||||||
|
@ -537,6 +546,86 @@ fn resolve_managed_npm_commands(
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Runs a deno task future forwarding any signals received
|
||||||
|
/// to the process.
|
||||||
|
///
|
||||||
|
/// Signal listeners and ctrl+c listening will be setup.
|
||||||
|
pub async fn run_future_forwarding_signals<TOutput>(
|
||||||
|
kill_signal: KillSignal,
|
||||||
|
future: impl std::future::Future<Output = TOutput>,
|
||||||
|
) -> TOutput {
|
||||||
|
fn spawn_future_with_cancellation(
|
||||||
|
future: impl std::future::Future<Output = ()> + 'static,
|
||||||
|
token: CancellationToken,
|
||||||
|
) {
|
||||||
|
deno_core::unsync::spawn(async move {
|
||||||
|
tokio::select! {
|
||||||
|
_ = future => {}
|
||||||
|
_ = token.cancelled() => {}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let token = CancellationToken::new();
|
||||||
|
let _token_drop_guard = token.clone().drop_guard();
|
||||||
|
let _drop_guard = kill_signal.clone().drop_guard();
|
||||||
|
|
||||||
|
spawn_future_with_cancellation(
|
||||||
|
listen_ctrl_c(kill_signal.clone()),
|
||||||
|
token.clone(),
|
||||||
|
);
|
||||||
|
#[cfg(unix)]
|
||||||
|
spawn_future_with_cancellation(
|
||||||
|
listen_and_forward_all_signals(kill_signal),
|
||||||
|
token,
|
||||||
|
);
|
||||||
|
|
||||||
|
future.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn listen_ctrl_c(kill_signal: KillSignal) {
|
||||||
|
while let Ok(()) = tokio::signal::ctrl_c().await {
|
||||||
|
// On windows, ctrl+c is sent to the process group, so the signal would
|
||||||
|
// have already been sent to the child process. We still want to listen
|
||||||
|
// for ctrl+c here to keep the process alive when receiving it, but no
|
||||||
|
// need to forward the signal because it's already been sent.
|
||||||
|
if !cfg!(windows) {
|
||||||
|
kill_signal.send(deno_task_shell::SignalKind::SIGINT)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
async fn listen_and_forward_all_signals(kill_signal: KillSignal) {
|
||||||
|
use deno_core::futures::FutureExt;
|
||||||
|
use deno_runtime::signal::SIGNAL_NUMS;
|
||||||
|
|
||||||
|
// listen and forward every signal we support
|
||||||
|
let mut futures = Vec::with_capacity(SIGNAL_NUMS.len());
|
||||||
|
for signo in SIGNAL_NUMS.iter().copied() {
|
||||||
|
if signo == libc::SIGKILL || signo == libc::SIGSTOP {
|
||||||
|
continue; // skip, can't listen to these
|
||||||
|
}
|
||||||
|
|
||||||
|
let kill_signal = kill_signal.clone();
|
||||||
|
futures.push(
|
||||||
|
async move {
|
||||||
|
let Ok(mut stream) = tokio::signal::unix::signal(
|
||||||
|
tokio::signal::unix::SignalKind::from_raw(signo),
|
||||||
|
) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let signal_kind: deno_task_shell::SignalKind = signo.into();
|
||||||
|
while let Some(()) = stream.recv().await {
|
||||||
|
kill_signal.send(signal_kind);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.boxed_local(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
futures::future::join_all(futures).await;
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
|
||||||
|
|
|
@ -538,7 +538,11 @@ pub async fn run_benchmarks_with_watch(
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let graph = module_graph_creator
|
let graph = module_graph_creator
|
||||||
.create_graph(graph_kind, collected_bench_modules.clone())
|
.create_graph(
|
||||||
|
graph_kind,
|
||||||
|
collected_bench_modules.clone(),
|
||||||
|
crate::graph_util::NpmCachingStrategy::Eager,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
module_graph_creator.graph_valid(&graph)?;
|
module_graph_creator.graph_valid(&graph)?;
|
||||||
let bench_modules = &graph.roots;
|
let bench_modules = &graph.roots;
|
||||||
|
|
|
@ -64,7 +64,7 @@ pub async fn check(
|
||||||
let file = file_fetcher.fetch(&s, root_permissions).await?;
|
let file = file_fetcher.fetch(&s, root_permissions).await?;
|
||||||
let snippet_files = extract::extract_snippet_files(file)?;
|
let snippet_files = extract::extract_snippet_files(file)?;
|
||||||
for snippet_file in snippet_files {
|
for snippet_file in snippet_files {
|
||||||
specifiers_for_typecheck.push(snippet_file.specifier.clone());
|
specifiers_for_typecheck.push(snippet_file.url.clone());
|
||||||
file_fetcher.insert_memory_files(snippet_file);
|
file_fetcher.insert_memory_files(snippet_file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use crate::args::CompileFlags;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::http_util::HttpClientProvider;
|
use crate::http_util::HttpClientProvider;
|
||||||
use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
|
use crate::standalone::binary::WriteBinOptions;
|
||||||
use crate::standalone::is_standalone_binary;
|
use crate::standalone::is_standalone_binary;
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
|
@ -15,8 +15,12 @@ use deno_core::error::generic_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::resolve_url_or_path;
|
use deno_core::resolve_url_or_path;
|
||||||
use deno_graph::GraphKind;
|
use deno_graph::GraphKind;
|
||||||
|
use deno_path_util::url_from_file_path;
|
||||||
|
use deno_path_util::url_to_file_path;
|
||||||
use deno_terminal::colors;
|
use deno_terminal::colors;
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::collections::VecDeque;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -69,7 +73,11 @@ pub async fn compile(
|
||||||
// create a module graph with types information in it. We don't want to
|
// create a module graph with types information in it. We don't want to
|
||||||
// store that in the binary so create a code only module graph from scratch.
|
// store that in the binary so create a code only module graph from scratch.
|
||||||
module_graph_creator
|
module_graph_creator
|
||||||
.create_graph(GraphKind::CodeOnly, module_roots)
|
.create_graph(
|
||||||
|
GraphKind::CodeOnly,
|
||||||
|
module_roots,
|
||||||
|
crate::graph_util::NpmCachingStrategy::Eager,
|
||||||
|
)
|
||||||
.await?
|
.await?
|
||||||
} else {
|
} else {
|
||||||
graph
|
graph
|
||||||
|
@ -78,20 +86,6 @@ pub async fn compile(
|
||||||
let ts_config_for_emit = cli_options
|
let ts_config_for_emit = cli_options
|
||||||
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
|
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
|
||||||
check_warn_tsconfig(&ts_config_for_emit);
|
check_warn_tsconfig(&ts_config_for_emit);
|
||||||
let root_dir_url = resolve_root_dir_from_specifiers(
|
|
||||||
cli_options.workspace().root_dir(),
|
|
||||||
graph
|
|
||||||
.specifiers()
|
|
||||||
.map(|(s, _)| s)
|
|
||||||
.chain(
|
|
||||||
cli_options
|
|
||||||
.node_modules_dir_path()
|
|
||||||
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
|
|
||||||
.iter(),
|
|
||||||
)
|
|
||||||
.chain(include_files.iter()),
|
|
||||||
);
|
|
||||||
log::debug!("Binary root dir: {}", root_dir_url);
|
|
||||||
log::info!(
|
log::info!(
|
||||||
"{} {} to {}",
|
"{} {} to {}",
|
||||||
colors::green("Compile"),
|
colors::green("Compile"),
|
||||||
|
@ -116,14 +110,17 @@ pub async fn compile(
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let write_result = binary_writer
|
let write_result = binary_writer
|
||||||
.write_bin(
|
.write_bin(WriteBinOptions {
|
||||||
file,
|
writer: file,
|
||||||
&graph,
|
display_output_filename: &output_path
|
||||||
StandaloneRelativeFileBaseUrl::from(&root_dir_url),
|
.file_name()
|
||||||
|
.unwrap()
|
||||||
|
.to_string_lossy(),
|
||||||
|
graph: &graph,
|
||||||
entrypoint,
|
entrypoint,
|
||||||
&include_files,
|
include_files: &include_files,
|
||||||
&compile_flags,
|
compile_flags: &compile_flags,
|
||||||
)
|
})
|
||||||
.await
|
.await
|
||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
format!(
|
format!(
|
||||||
|
@ -242,15 +239,58 @@ fn get_module_roots_and_include_files(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut module_roots = Vec::with_capacity(compile_flags.include.len() + 1);
|
fn analyze_path(
|
||||||
let mut include_files = Vec::with_capacity(compile_flags.include.len());
|
url: &ModuleSpecifier,
|
||||||
|
module_roots: &mut Vec<ModuleSpecifier>,
|
||||||
|
include_files: &mut Vec<ModuleSpecifier>,
|
||||||
|
searched_paths: &mut HashSet<PathBuf>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let Ok(path) = url_to_file_path(url) else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
let mut pending = VecDeque::from([path]);
|
||||||
|
while let Some(path) = pending.pop_front() {
|
||||||
|
if !searched_paths.insert(path.clone()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if !path.is_dir() {
|
||||||
|
let url = url_from_file_path(&path)?;
|
||||||
|
include_files.push(url.clone());
|
||||||
|
if is_module_graph_module(&url) {
|
||||||
|
module_roots.push(url);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for entry in std::fs::read_dir(&path).with_context(|| {
|
||||||
|
format!("Failed reading directory '{}'", path.display())
|
||||||
|
})? {
|
||||||
|
let entry = entry.with_context(|| {
|
||||||
|
format!("Failed reading entry in directory '{}'", path.display())
|
||||||
|
})?;
|
||||||
|
pending.push_back(entry.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut searched_paths = HashSet::new();
|
||||||
|
let mut module_roots = Vec::new();
|
||||||
|
let mut include_files = Vec::new();
|
||||||
module_roots.push(entrypoint.clone());
|
module_roots.push(entrypoint.clone());
|
||||||
for side_module in &compile_flags.include {
|
for side_module in &compile_flags.include {
|
||||||
let url = resolve_url_or_path(side_module, initial_cwd)?;
|
let url = resolve_url_or_path(side_module, initial_cwd)?;
|
||||||
if is_module_graph_module(&url) {
|
if is_module_graph_module(&url) {
|
||||||
module_roots.push(url);
|
module_roots.push(url.clone());
|
||||||
|
if url.scheme() == "file" {
|
||||||
|
include_files.push(url);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
include_files.push(url);
|
analyze_path(
|
||||||
|
&url,
|
||||||
|
&mut module_roots,
|
||||||
|
&mut include_files,
|
||||||
|
&mut searched_paths,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok((module_roots, include_files))
|
Ok((module_roots, include_files))
|
||||||
|
@ -316,68 +356,6 @@ fn get_os_specific_filepath(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_root_dir_from_specifiers<'a>(
|
|
||||||
starting_dir: &ModuleSpecifier,
|
|
||||||
specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
|
|
||||||
) -> ModuleSpecifier {
|
|
||||||
fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
|
|
||||||
let min_length = a.len().min(b.len());
|
|
||||||
|
|
||||||
let mut last_slash = 0;
|
|
||||||
for i in 0..min_length {
|
|
||||||
if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
|
|
||||||
last_slash = i;
|
|
||||||
} else if a.as_bytes()[i] != b.as_bytes()[i] {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the common root path up to the last common slash.
|
|
||||||
// This returns a slice of the original string 'a', up to and including the last matching '/'.
|
|
||||||
let common = &a[..=last_slash];
|
|
||||||
if cfg!(windows) && common == "file:///" {
|
|
||||||
a
|
|
||||||
} else {
|
|
||||||
common
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_file_system_root(url: &str) -> bool {
|
|
||||||
let Some(path) = url.strip_prefix("file:///") else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
if cfg!(windows) {
|
|
||||||
let Some((_drive, path)) = path.split_once('/') else {
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
path.is_empty()
|
|
||||||
} else {
|
|
||||||
path.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut found_dir = starting_dir.as_str();
|
|
||||||
if !is_file_system_root(found_dir) {
|
|
||||||
for specifier in specifiers {
|
|
||||||
if specifier.scheme() == "file" {
|
|
||||||
found_dir = select_common_root(found_dir, specifier.as_str());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let found_dir = if is_file_system_root(found_dir) {
|
|
||||||
found_dir
|
|
||||||
} else {
|
|
||||||
// include the parent dir name because it helps create some context
|
|
||||||
found_dir
|
|
||||||
.strip_suffix('/')
|
|
||||||
.unwrap_or(found_dir)
|
|
||||||
.rfind('/')
|
|
||||||
.map(|i| &found_dir[..i + 1])
|
|
||||||
.unwrap_or(found_dir)
|
|
||||||
};
|
|
||||||
ModuleSpecifier::parse(found_dir).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
pub use super::*;
|
pub use super::*;
|
||||||
|
@ -454,38 +432,4 @@ mod test {
|
||||||
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
|
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
|
||||||
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
|
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_root_dir_from_specifiers() {
|
|
||||||
fn resolve(start: &str, specifiers: &[&str]) -> String {
|
|
||||||
let specifiers = specifiers
|
|
||||||
.iter()
|
|
||||||
.map(|s| ModuleSpecifier::parse(s).unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
resolve_root_dir_from_specifiers(
|
|
||||||
&ModuleSpecifier::parse(start).unwrap(),
|
|
||||||
specifiers.iter(),
|
|
||||||
)
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
|
|
||||||
assert_eq!(
|
|
||||||
resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
|
|
||||||
"file:///a/b/"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
|
|
||||||
"file:///a/b/"
|
|
||||||
);
|
|
||||||
assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
|
|
||||||
if cfg!(windows) {
|
|
||||||
assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
|
|
||||||
// this will ignore the other one because it's on a separate drive
|
|
||||||
assert_eq!(
|
|
||||||
resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
|
|
||||||
"file:///c:/a/b/"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ use crate::args::FileFlags;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::cdp;
|
use crate::cdp;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
|
use crate::file_fetcher::TextDecodedFile;
|
||||||
use crate::tools::fmt::format_json;
|
use crate::tools::fmt::format_json;
|
||||||
use crate::tools::test::is_supported_test_path;
|
use crate::tools::test::is_supported_test_path;
|
||||||
use crate::util::text_encoding::source_map_from_code;
|
use crate::util::text_encoding::source_map_from_code;
|
||||||
|
@ -197,7 +198,7 @@ pub struct CoverageReport {
|
||||||
fn generate_coverage_report(
|
fn generate_coverage_report(
|
||||||
script_coverage: &cdp::ScriptCoverage,
|
script_coverage: &cdp::ScriptCoverage,
|
||||||
script_source: String,
|
script_source: String,
|
||||||
maybe_source_map: &Option<Vec<u8>>,
|
maybe_source_map: Option<&[u8]>,
|
||||||
output: &Option<PathBuf>,
|
output: &Option<PathBuf>,
|
||||||
) -> CoverageReport {
|
) -> CoverageReport {
|
||||||
let maybe_source_map = maybe_source_map
|
let maybe_source_map = maybe_source_map
|
||||||
|
@ -559,6 +560,12 @@ pub fn cover_files(
|
||||||
},
|
},
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
let get_message = |specifier: &ModuleSpecifier| -> String {
|
||||||
|
format!(
|
||||||
|
"Failed to fetch \"{}\" from cache. Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
|
||||||
|
specifier,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
for script_coverage in script_coverages {
|
for script_coverage in script_coverages {
|
||||||
let module_specifier = deno_core::resolve_url_or_path(
|
let module_specifier = deno_core::resolve_url_or_path(
|
||||||
|
@ -566,21 +573,14 @@ pub fn cover_files(
|
||||||
cli_options.initial_cwd(),
|
cli_options.initial_cwd(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let maybe_file = if module_specifier.scheme() == "file" {
|
let maybe_file_result = file_fetcher
|
||||||
file_fetcher.get_source(&module_specifier)
|
.get_cached_source_or_local(&module_specifier)
|
||||||
} else {
|
.map_err(AnyError::from);
|
||||||
file_fetcher
|
let file = match maybe_file_result {
|
||||||
.fetch_cached(&module_specifier, 10)
|
Ok(Some(file)) => TextDecodedFile::decode(file)?,
|
||||||
.with_context(|| {
|
Ok(None) => return Err(anyhow!("{}", get_message(&module_specifier))),
|
||||||
format!("Failed to fetch \"{module_specifier}\" from cache.")
|
Err(err) => return Err(err).context(get_message(&module_specifier)),
|
||||||
})?
|
|
||||||
};
|
};
|
||||||
let file = maybe_file.ok_or_else(|| {
|
|
||||||
anyhow!("Failed to fetch \"{}\" from cache.
|
|
||||||
Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
|
|
||||||
module_specifier
|
|
||||||
)
|
|
||||||
})?.into_text_decoded()?;
|
|
||||||
|
|
||||||
let original_source = file.source.clone();
|
let original_source = file.source.clone();
|
||||||
// Check if file was transpiled
|
// Check if file was transpiled
|
||||||
|
@ -625,7 +625,7 @@ pub fn cover_files(
|
||||||
let coverage_report = generate_coverage_report(
|
let coverage_report = generate_coverage_report(
|
||||||
&script_coverage,
|
&script_coverage,
|
||||||
runtime_code.as_str().to_owned(),
|
runtime_code.as_str().to_owned(),
|
||||||
&source_map,
|
source_map.as_deref(),
|
||||||
&out_mode,
|
&out_mode,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -131,7 +131,11 @@ pub async fn doc(
|
||||||
|_| true,
|
|_| true,
|
||||||
)?;
|
)?;
|
||||||
let graph = module_graph_creator
|
let graph = module_graph_creator
|
||||||
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
|
.create_graph(
|
||||||
|
GraphKind::TypesOnly,
|
||||||
|
module_specifiers.clone(),
|
||||||
|
crate::graph_util::NpmCachingStrategy::Eager,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
graph_exit_integrity_errors(&graph);
|
graph_exit_integrity_errors(&graph);
|
||||||
|
@ -209,10 +213,14 @@ pub async fn doc(
|
||||||
Default::default()
|
Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut main_entrypoint = None;
|
||||||
|
|
||||||
let rewrite_map =
|
let rewrite_map =
|
||||||
if let Some(config_file) = cli_options.start_dir.maybe_deno_json() {
|
if let Some(config_file) = cli_options.start_dir.maybe_deno_json() {
|
||||||
let config = config_file.to_exports_config()?;
|
let config = config_file.to_exports_config()?;
|
||||||
|
|
||||||
|
main_entrypoint = config.get_resolved(".").ok().flatten();
|
||||||
|
|
||||||
let rewrite_map = config
|
let rewrite_map = config
|
||||||
.clone()
|
.clone()
|
||||||
.into_map()
|
.into_map()
|
||||||
|
@ -240,6 +248,7 @@ pub async fn doc(
|
||||||
html_options,
|
html_options,
|
||||||
deno_ns,
|
deno_ns,
|
||||||
rewrite_map,
|
rewrite_map,
|
||||||
|
main_entrypoint,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
let modules_len = doc_nodes_by_url.len();
|
let modules_len = doc_nodes_by_url.len();
|
||||||
|
@ -334,14 +343,14 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
||||||
let name = &res.req().name;
|
let name = &res.req().name;
|
||||||
Some((
|
Some((
|
||||||
format!("https://www.npmjs.com/package/{name}"),
|
format!("https://www.npmjs.com/package/{name}"),
|
||||||
name.to_owned(),
|
name.to_string(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
"jsr" => {
|
"jsr" => {
|
||||||
let res =
|
let res =
|
||||||
deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?;
|
deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?;
|
||||||
let name = &res.req().name;
|
let name = &res.req().name;
|
||||||
Some((format!("https://jsr.io/{name}"), name.to_owned()))
|
Some((format!("https://jsr.io/{name}"), name.to_string()))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
@ -383,6 +392,7 @@ fn generate_docs_directory(
|
||||||
html_options: &DocHtmlFlag,
|
html_options: &DocHtmlFlag,
|
||||||
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
|
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
|
||||||
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
|
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
|
||||||
|
main_entrypoint: Option<ModuleSpecifier>,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let cwd = std::env::current_dir().context("Failed to get CWD")?;
|
let cwd = std::env::current_dir().context("Failed to get CWD")?;
|
||||||
let output_dir_resolved = cwd.join(&html_options.output);
|
let output_dir_resolved = cwd.join(&html_options.output);
|
||||||
|
@ -415,7 +425,7 @@ fn generate_docs_directory(
|
||||||
|
|
||||||
let options = deno_doc::html::GenerateOptions {
|
let options = deno_doc::html::GenerateOptions {
|
||||||
package_name: html_options.name.clone(),
|
package_name: html_options.name.clone(),
|
||||||
main_entrypoint: None,
|
main_entrypoint,
|
||||||
rewrite_map,
|
rewrite_map,
|
||||||
href_resolver: Rc::new(DocResolver {
|
href_resolver: Rc::new(DocResolver {
|
||||||
deno_ns,
|
deno_ns,
|
||||||
|
@ -427,33 +437,7 @@ fn generate_docs_directory(
|
||||||
symbol_redirect_map,
|
symbol_redirect_map,
|
||||||
default_symbol_map,
|
default_symbol_map,
|
||||||
markdown_renderer: deno_doc::html::comrak::create_renderer(
|
markdown_renderer: deno_doc::html::comrak::create_renderer(
|
||||||
None,
|
None, None, None,
|
||||||
Some(Box::new(|ammonia| {
|
|
||||||
ammonia.add_allowed_classes(
|
|
||||||
"code",
|
|
||||||
&[
|
|
||||||
"language-ts",
|
|
||||||
"language-tsx",
|
|
||||||
"language-typescript",
|
|
||||||
"language-js",
|
|
||||||
"language-jsx",
|
|
||||||
"language-javascript",
|
|
||||||
"language-bash",
|
|
||||||
"language-shell",
|
|
||||||
"language-md",
|
|
||||||
"language-markdown",
|
|
||||||
"language-rs",
|
|
||||||
"language-rust",
|
|
||||||
"language-html",
|
|
||||||
"language-xml",
|
|
||||||
"language-css",
|
|
||||||
"language-json",
|
|
||||||
"language-regex",
|
|
||||||
"language-svg",
|
|
||||||
],
|
|
||||||
);
|
|
||||||
})),
|
|
||||||
None,
|
|
||||||
),
|
),
|
||||||
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
|
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
|
||||||
head_inject: Some(Rc::new(|root| {
|
head_inject: Some(Rc::new(|root| {
|
||||||
|
|
|
@ -440,8 +440,10 @@ pub fn format_html(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let mut typescript_config =
|
let mut typescript_config_builder =
|
||||||
get_resolved_typescript_config(fmt_options);
|
get_typescript_config_builder(fmt_options);
|
||||||
|
typescript_config_builder.file_indent_level(hints.indent_level);
|
||||||
|
let mut typescript_config = typescript_config_builder.build();
|
||||||
typescript_config.line_width = hints.print_width as u32;
|
typescript_config.line_width = hints.print_width as u32;
|
||||||
dprint_plugin_typescript::format_text(
|
dprint_plugin_typescript::format_text(
|
||||||
&path,
|
&path,
|
||||||
|
@ -919,9 +921,9 @@ fn files_str(len: usize) -> &'static str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_resolved_typescript_config(
|
fn get_typescript_config_builder(
|
||||||
options: &FmtOptionsConfig,
|
options: &FmtOptionsConfig,
|
||||||
) -> dprint_plugin_typescript::configuration::Configuration {
|
) -> dprint_plugin_typescript::configuration::ConfigurationBuilder {
|
||||||
let mut builder =
|
let mut builder =
|
||||||
dprint_plugin_typescript::configuration::ConfigurationBuilder::new();
|
dprint_plugin_typescript::configuration::ConfigurationBuilder::new();
|
||||||
builder.deno();
|
builder.deno();
|
||||||
|
@ -953,7 +955,13 @@ fn get_resolved_typescript_config(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.build()
|
builder
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_resolved_typescript_config(
|
||||||
|
options: &FmtOptionsConfig,
|
||||||
|
) -> dprint_plugin_typescript::configuration::Configuration {
|
||||||
|
get_typescript_config_builder(options).build()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_resolved_markdown_config(
|
fn get_resolved_markdown_config(
|
||||||
|
@ -1075,6 +1083,7 @@ fn get_resolved_markup_fmt_config(
|
||||||
};
|
};
|
||||||
|
|
||||||
let language_options = LanguageOptions {
|
let language_options = LanguageOptions {
|
||||||
|
script_formatter: Some(markup_fmt::config::ScriptFormatter::Dprint),
|
||||||
quotes: Quotes::Double,
|
quotes: Quotes::Double,
|
||||||
format_comments: false,
|
format_comments: false,
|
||||||
script_indent: true,
|
script_indent: true,
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fmt;
|
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
@ -35,6 +34,7 @@ use crate::graph_util::graph_exit_integrity_errors;
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::npm::ManagedCliNpmResolver;
|
use crate::npm::ManagedCliNpmResolver;
|
||||||
use crate::util::checksum;
|
use crate::util::checksum;
|
||||||
|
use crate::util::display::DisplayTreeNode;
|
||||||
|
|
||||||
const JSON_SCHEMA_VERSION: u8 = 1;
|
const JSON_SCHEMA_VERSION: u8 = 1;
|
||||||
|
|
||||||
|
@ -49,19 +49,67 @@ pub async fn info(
|
||||||
let module_graph_creator = factory.module_graph_creator().await?;
|
let module_graph_creator = factory.module_graph_creator().await?;
|
||||||
let npm_resolver = factory.npm_resolver().await?;
|
let npm_resolver = factory.npm_resolver().await?;
|
||||||
let maybe_lockfile = cli_options.maybe_lockfile();
|
let maybe_lockfile = cli_options.maybe_lockfile();
|
||||||
|
let resolver = factory.workspace_resolver().await?.clone();
|
||||||
let npmrc = cli_options.npmrc();
|
let npmrc = cli_options.npmrc();
|
||||||
let resolver = factory.workspace_resolver().await?;
|
let node_resolver = factory.node_resolver().await?;
|
||||||
|
|
||||||
let cwd_url =
|
let cwd_url =
|
||||||
url::Url::from_directory_path(cli_options.initial_cwd()).unwrap();
|
url::Url::from_directory_path(cli_options.initial_cwd()).unwrap();
|
||||||
|
|
||||||
let maybe_import_specifier = if let Some(import_map) =
|
let maybe_import_specifier = if let Ok(resolved) =
|
||||||
resolver.maybe_import_map()
|
resolver.resolve(&specifier, &cwd_url)
|
||||||
{
|
{
|
||||||
if let Ok(imports_specifier) = import_map.resolve(&specifier, &cwd_url) {
|
match resolved {
|
||||||
Some(imports_specifier)
|
deno_config::workspace::MappedResolution::Normal {
|
||||||
} else {
|
specifier, ..
|
||||||
None
|
}
|
||||||
|
| deno_config::workspace::MappedResolution::ImportMap {
|
||||||
|
specifier,
|
||||||
|
..
|
||||||
|
}
|
||||||
|
| deno_config::workspace::MappedResolution::WorkspaceJsrPackage {
|
||||||
|
specifier,
|
||||||
|
..
|
||||||
|
} => Some(specifier),
|
||||||
|
deno_config::workspace::MappedResolution::WorkspaceNpmPackage {
|
||||||
|
target_pkg_json,
|
||||||
|
sub_path,
|
||||||
|
..
|
||||||
|
} => Some(node_resolver.resolve_package_subpath_from_deno_module(
|
||||||
|
target_pkg_json.clone().dir_path(),
|
||||||
|
sub_path.as_deref(),
|
||||||
|
Some(&cwd_url),
|
||||||
|
node_resolver::ResolutionMode::Import,
|
||||||
|
node_resolver::NodeResolutionKind::Execution,
|
||||||
|
)?),
|
||||||
|
deno_config::workspace::MappedResolution::PackageJson {
|
||||||
|
alias,
|
||||||
|
sub_path,
|
||||||
|
dep_result,
|
||||||
|
..
|
||||||
|
} => match dep_result.as_ref().map_err(|e| e.clone())? {
|
||||||
|
deno_package_json::PackageJsonDepValue::Workspace(version_req) => {
|
||||||
|
let pkg_folder = resolver
|
||||||
|
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
|
||||||
|
alias,
|
||||||
|
version_req,
|
||||||
|
)?;
|
||||||
|
Some(node_resolver.resolve_package_subpath_from_deno_module(
|
||||||
|
pkg_folder,
|
||||||
|
sub_path.as_deref(),
|
||||||
|
Some(&cwd_url),
|
||||||
|
node_resolver::ResolutionMode::Import,
|
||||||
|
node_resolver::NodeResolutionKind::Execution,
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
deno_package_json::PackageJsonDepValue::Req(req) => {
|
||||||
|
Some(ModuleSpecifier::parse(&format!(
|
||||||
|
"npm:{}{}",
|
||||||
|
req,
|
||||||
|
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
|
||||||
|
))?)
|
||||||
|
}
|
||||||
|
},
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -75,7 +123,12 @@ pub async fn info(
|
||||||
let mut loader = module_graph_builder.create_graph_loader();
|
let mut loader = module_graph_builder.create_graph_loader();
|
||||||
loader.enable_loading_cache_info(); // for displaying the cache information
|
loader.enable_loading_cache_info(); // for displaying the cache information
|
||||||
let graph = module_graph_creator
|
let graph = module_graph_creator
|
||||||
.create_graph_with_loader(GraphKind::All, vec![specifier], &mut loader)
|
.create_graph_with_loader(
|
||||||
|
GraphKind::All,
|
||||||
|
vec![specifier],
|
||||||
|
&mut loader,
|
||||||
|
crate::graph_util::NpmCachingStrategy::Eager,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// write out the lockfile if there is one
|
// write out the lockfile if there is one
|
||||||
|
@ -225,8 +278,10 @@ fn add_npm_packages_to_json(
|
||||||
});
|
});
|
||||||
if let Some(pkg) = maybe_package {
|
if let Some(pkg) = maybe_package {
|
||||||
if let Some(module) = module.as_object_mut() {
|
if let Some(module) = module.as_object_mut() {
|
||||||
module
|
module.insert(
|
||||||
.insert("npmPackage".to_string(), pkg.id.as_serialized().into());
|
"npmPackage".to_string(),
|
||||||
|
pkg.id.as_serialized().into_string().into(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -235,22 +290,31 @@ fn add_npm_packages_to_json(
|
||||||
.get_mut("dependencies")
|
.get_mut("dependencies")
|
||||||
.and_then(|d| d.as_array_mut());
|
.and_then(|d| d.as_array_mut());
|
||||||
if let Some(dependencies) = dependencies {
|
if let Some(dependencies) = dependencies {
|
||||||
for dep in dependencies.iter_mut() {
|
for dep in dependencies.iter_mut().flat_map(|d| d.as_object_mut()) {
|
||||||
if let serde_json::Value::Object(dep) = dep {
|
if let Some(specifier) = dep.get("specifier").and_then(|s| s.as_str())
|
||||||
let specifier = dep.get("specifier").and_then(|s| s.as_str());
|
{
|
||||||
if let Some(specifier) = specifier {
|
if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
|
||||||
if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
|
if let Ok(pkg) = snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
|
||||||
if let Ok(pkg) =
|
{
|
||||||
snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
|
dep.insert(
|
||||||
{
|
"npmPackage".to_string(),
|
||||||
dep.insert(
|
pkg.id.as_serialized().into_string().into(),
|
||||||
"npmPackage".to_string(),
|
);
|
||||||
pkg.id.as_serialized().into(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// don't show this in the output unless someone needs it
|
||||||
|
if let Some(code) =
|
||||||
|
dep.get_mut("code").and_then(|c| c.as_object_mut())
|
||||||
|
{
|
||||||
|
code.remove("resolutionMode");
|
||||||
|
}
|
||||||
|
if let Some(types) =
|
||||||
|
dep.get_mut("types").and_then(|c| c.as_object_mut())
|
||||||
|
{
|
||||||
|
types.remove("resolutionMode");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -262,94 +326,24 @@ fn add_npm_packages_to_json(
|
||||||
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
|
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
|
||||||
for pkg in sorted_packages {
|
for pkg in sorted_packages {
|
||||||
let mut kv = serde_json::Map::new();
|
let mut kv = serde_json::Map::new();
|
||||||
kv.insert("name".to_string(), pkg.id.nv.name.clone().into());
|
kv.insert("name".to_string(), pkg.id.nv.name.to_string().into());
|
||||||
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
|
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
|
||||||
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
|
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
|
||||||
deps.sort();
|
deps.sort();
|
||||||
let deps = deps
|
let deps = deps
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|id| serde_json::Value::String(id.as_serialized()))
|
.map(|id| serde_json::Value::String(id.as_serialized().into_string()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
kv.insert("dependencies".to_string(), deps.into());
|
kv.insert("dependencies".to_string(), deps.into());
|
||||||
let registry_url = npmrc.get_registry_url(&pkg.id.nv.name);
|
let registry_url = npmrc.get_registry_url(&pkg.id.nv.name);
|
||||||
kv.insert("registryUrl".to_string(), registry_url.to_string().into());
|
kv.insert("registryUrl".to_string(), registry_url.to_string().into());
|
||||||
|
|
||||||
json_packages.insert(pkg.id.as_serialized(), kv.into());
|
json_packages.insert(pkg.id.as_serialized().into_string(), kv.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
json.insert("npmPackages".to_string(), json_packages.into());
|
json.insert("npmPackages".to_string(), json_packages.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TreeNode {
|
|
||||||
text: String,
|
|
||||||
children: Vec<TreeNode>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeNode {
|
|
||||||
pub fn from_text(text: String) -> Self {
|
|
||||||
Self {
|
|
||||||
text,
|
|
||||||
children: Default::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_tree_node<TWrite: Write>(
|
|
||||||
tree_node: &TreeNode,
|
|
||||||
writer: &mut TWrite,
|
|
||||||
) -> fmt::Result {
|
|
||||||
fn print_children<TWrite: Write>(
|
|
||||||
writer: &mut TWrite,
|
|
||||||
prefix: &str,
|
|
||||||
children: &[TreeNode],
|
|
||||||
) -> fmt::Result {
|
|
||||||
const SIBLING_CONNECTOR: char = '├';
|
|
||||||
const LAST_SIBLING_CONNECTOR: char = '└';
|
|
||||||
const CHILD_DEPS_CONNECTOR: char = '┬';
|
|
||||||
const CHILD_NO_DEPS_CONNECTOR: char = '─';
|
|
||||||
const VERTICAL_CONNECTOR: char = '│';
|
|
||||||
const EMPTY_CONNECTOR: char = ' ';
|
|
||||||
|
|
||||||
let child_len = children.len();
|
|
||||||
for (index, child) in children.iter().enumerate() {
|
|
||||||
let is_last = index + 1 == child_len;
|
|
||||||
let sibling_connector = if is_last {
|
|
||||||
LAST_SIBLING_CONNECTOR
|
|
||||||
} else {
|
|
||||||
SIBLING_CONNECTOR
|
|
||||||
};
|
|
||||||
let child_connector = if child.children.is_empty() {
|
|
||||||
CHILD_NO_DEPS_CONNECTOR
|
|
||||||
} else {
|
|
||||||
CHILD_DEPS_CONNECTOR
|
|
||||||
};
|
|
||||||
writeln!(
|
|
||||||
writer,
|
|
||||||
"{} {}",
|
|
||||||
colors::gray(format!("{prefix}{sibling_connector}─{child_connector}")),
|
|
||||||
child.text
|
|
||||||
)?;
|
|
||||||
let child_prefix = format!(
|
|
||||||
"{}{}{}",
|
|
||||||
prefix,
|
|
||||||
if is_last {
|
|
||||||
EMPTY_CONNECTOR
|
|
||||||
} else {
|
|
||||||
VERTICAL_CONNECTOR
|
|
||||||
},
|
|
||||||
EMPTY_CONNECTOR
|
|
||||||
);
|
|
||||||
print_children(writer, &child_prefix, &child.children)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
writeln!(writer, "{}", tree_node.text)?;
|
|
||||||
print_children(writer, "", &tree_node.children)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Precached information about npm packages that are used in deno info.
|
/// Precached information about npm packages that are used in deno info.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct NpmInfo {
|
struct NpmInfo {
|
||||||
|
@ -506,7 +500,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
)?;
|
)?;
|
||||||
writeln!(writer)?;
|
writeln!(writer)?;
|
||||||
let root_node = self.build_module_info(root, false);
|
let root_node = self.build_module_info(root, false);
|
||||||
print_tree_node(&root_node, writer)?;
|
root_node.print(writer)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
@ -522,7 +516,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_dep_info(&mut self, dep: &Dependency) -> Vec<TreeNode> {
|
fn build_dep_info(&mut self, dep: &Dependency) -> Vec<DisplayTreeNode> {
|
||||||
let mut children = Vec::with_capacity(2);
|
let mut children = Vec::with_capacity(2);
|
||||||
if !dep.maybe_code.is_none() {
|
if !dep.maybe_code.is_none() {
|
||||||
if let Some(child) = self.build_resolved_info(&dep.maybe_code, false) {
|
if let Some(child) = self.build_resolved_info(&dep.maybe_code, false) {
|
||||||
|
@ -537,7 +531,11 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
children
|
children
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode {
|
fn build_module_info(
|
||||||
|
&mut self,
|
||||||
|
module: &Module,
|
||||||
|
type_dep: bool,
|
||||||
|
) -> DisplayTreeNode {
|
||||||
enum PackageOrSpecifier {
|
enum PackageOrSpecifier {
|
||||||
Package(Box<NpmResolutionPackage>),
|
Package(Box<NpmResolutionPackage>),
|
||||||
Specifier(ModuleSpecifier),
|
Specifier(ModuleSpecifier),
|
||||||
|
@ -553,7 +551,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
None => Specifier(module.specifier().clone()),
|
None => Specifier(module.specifier().clone()),
|
||||||
};
|
};
|
||||||
let was_seen = !self.seen.insert(match &package_or_specifier {
|
let was_seen = !self.seen.insert(match &package_or_specifier {
|
||||||
Package(package) => package.id.as_serialized(),
|
Package(package) => package.id.as_serialized().into_string(),
|
||||||
Specifier(specifier) => specifier.to_string(),
|
Specifier(specifier) => specifier.to_string(),
|
||||||
});
|
});
|
||||||
let header_text = if was_seen {
|
let header_text = if was_seen {
|
||||||
|
@ -583,7 +581,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
format!("{} {}", header_text, maybe_size_to_text(maybe_size))
|
format!("{} {}", header_text, maybe_size_to_text(maybe_size))
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut tree_node = TreeNode::from_text(header_text);
|
let mut tree_node = DisplayTreeNode::from_text(header_text);
|
||||||
|
|
||||||
if !was_seen {
|
if !was_seen {
|
||||||
match &package_or_specifier {
|
match &package_or_specifier {
|
||||||
|
@ -621,21 +619,22 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
fn build_npm_deps(
|
fn build_npm_deps(
|
||||||
&mut self,
|
&mut self,
|
||||||
package: &NpmResolutionPackage,
|
package: &NpmResolutionPackage,
|
||||||
) -> Vec<TreeNode> {
|
) -> Vec<DisplayTreeNode> {
|
||||||
let mut deps = package.dependencies.values().collect::<Vec<_>>();
|
let mut deps = package.dependencies.values().collect::<Vec<_>>();
|
||||||
deps.sort();
|
deps.sort();
|
||||||
let mut children = Vec::with_capacity(deps.len());
|
let mut children = Vec::with_capacity(deps.len());
|
||||||
for dep_id in deps.into_iter() {
|
for dep_id in deps.into_iter() {
|
||||||
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
|
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
|
||||||
let size_str = maybe_size_to_text(maybe_size);
|
let size_str = maybe_size_to_text(maybe_size);
|
||||||
let mut child = TreeNode::from_text(format!(
|
let mut child = DisplayTreeNode::from_text(format!(
|
||||||
"npm:/{} {}",
|
"npm:/{} {}",
|
||||||
dep_id.as_serialized(),
|
dep_id.as_serialized(),
|
||||||
size_str
|
size_str
|
||||||
));
|
));
|
||||||
if let Some(package) = self.npm_info.packages.get(dep_id) {
|
if let Some(package) = self.npm_info.packages.get(dep_id) {
|
||||||
if !package.dependencies.is_empty() {
|
if !package.dependencies.is_empty() {
|
||||||
let was_seen = !self.seen.insert(package.id.as_serialized());
|
let was_seen =
|
||||||
|
!self.seen.insert(package.id.as_serialized().into_string());
|
||||||
if was_seen {
|
if was_seen {
|
||||||
child.text = format!("{} {}", child.text, colors::gray("*"));
|
child.text = format!("{} {}", child.text, colors::gray("*"));
|
||||||
} else {
|
} else {
|
||||||
|
@ -653,7 +652,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
err: &ModuleError,
|
err: &ModuleError,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
) -> TreeNode {
|
) -> DisplayTreeNode {
|
||||||
self.seen.insert(specifier.to_string());
|
self.seen.insert(specifier.to_string());
|
||||||
match err {
|
match err {
|
||||||
ModuleError::InvalidTypeAssertion { .. } => {
|
ModuleError::InvalidTypeAssertion { .. } => {
|
||||||
|
@ -696,8 +695,8 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
error_msg: &str,
|
error_msg: &str,
|
||||||
) -> TreeNode {
|
) -> DisplayTreeNode {
|
||||||
TreeNode::from_text(format!(
|
DisplayTreeNode::from_text(format!(
|
||||||
"{} {}",
|
"{} {}",
|
||||||
colors::red(specifier),
|
colors::red(specifier),
|
||||||
colors::red_bold(error_msg)
|
colors::red_bold(error_msg)
|
||||||
|
@ -708,7 +707,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
resolution: &Resolution,
|
resolution: &Resolution,
|
||||||
type_dep: bool,
|
type_dep: bool,
|
||||||
) -> Option<TreeNode> {
|
) -> Option<DisplayTreeNode> {
|
||||||
match resolution {
|
match resolution {
|
||||||
Resolution::Ok(resolved) => {
|
Resolution::Ok(resolved) => {
|
||||||
let specifier = &resolved.specifier;
|
let specifier = &resolved.specifier;
|
||||||
|
@ -716,14 +715,14 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
Some(match self.graph.try_get(resolved_specifier) {
|
Some(match self.graph.try_get(resolved_specifier) {
|
||||||
Ok(Some(module)) => self.build_module_info(module, type_dep),
|
Ok(Some(module)) => self.build_module_info(module, type_dep),
|
||||||
Err(err) => self.build_error_info(err, resolved_specifier),
|
Err(err) => self.build_error_info(err, resolved_specifier),
|
||||||
Ok(None) => TreeNode::from_text(format!(
|
Ok(None) => DisplayTreeNode::from_text(format!(
|
||||||
"{} {}",
|
"{} {}",
|
||||||
colors::red(specifier),
|
colors::red(specifier),
|
||||||
colors::red_bold("(missing)")
|
colors::red_bold("(missing)")
|
||||||
)),
|
)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Resolution::Err(err) => Some(TreeNode::from_text(format!(
|
Resolution::Err(err) => Some(DisplayTreeNode::from_text(format!(
|
||||||
"{} {}",
|
"{} {}",
|
||||||
colors::italic(err.to_string()),
|
colors::italic(err.to_string()),
|
||||||
colors::red_bold("(resolve error)")
|
colors::red_bold("(resolve error)")
|
||||||
|
|
|
@ -9,6 +9,7 @@ use crate::args::RunFlags;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
use color_print::cformat;
|
use color_print::cformat;
|
||||||
use color_print::cstr;
|
use color_print::cstr;
|
||||||
|
use deno_config::deno_json::NodeModulesDirMode;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::serde_json::json;
|
use deno_core::serde_json::json;
|
||||||
|
@ -251,8 +252,46 @@ Deno.test(function addTest() {
|
||||||
Ok(0)
|
Ok(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn npm_name_to_create_package(name: &str) -> String {
|
||||||
|
let mut s = "npm:".to_string();
|
||||||
|
|
||||||
|
let mut scoped = false;
|
||||||
|
let mut create = false;
|
||||||
|
|
||||||
|
for (i, ch) in name.char_indices() {
|
||||||
|
if i == 0 {
|
||||||
|
if ch == '@' {
|
||||||
|
scoped = true;
|
||||||
|
} else {
|
||||||
|
create = true;
|
||||||
|
s.push_str("create-");
|
||||||
|
}
|
||||||
|
} else if scoped {
|
||||||
|
if ch == '/' {
|
||||||
|
scoped = false;
|
||||||
|
create = true;
|
||||||
|
s.push_str("/create-");
|
||||||
|
continue;
|
||||||
|
} else if ch == '@' && !create {
|
||||||
|
scoped = false;
|
||||||
|
create = true;
|
||||||
|
s.push_str("/create@");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s.push(ch);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !create {
|
||||||
|
s.push_str("/create");
|
||||||
|
}
|
||||||
|
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
|
async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
|
||||||
let script_name = format!("npm:create-{}", name);
|
let script_name = npm_name_to_create_package(name);
|
||||||
|
|
||||||
fn print_manual_usage(script_name: &str, args: &[String]) -> i32 {
|
fn print_manual_usage(script_name: &str, args: &[String]) -> i32 {
|
||||||
log::info!("{}", cformat!("You can initialize project manually by running <u>deno run {} {}</> and applying desired permissions.", script_name, args.join(" ")));
|
log::info!("{}", cformat!("You can initialize project manually by running <u>deno run {} {}</> and applying desired permissions.", script_name, args.join(" ")));
|
||||||
|
@ -288,6 +327,7 @@ async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
|
||||||
},
|
},
|
||||||
allow_scripts: PackagesAllowedScripts::All,
|
allow_scripts: PackagesAllowedScripts::All,
|
||||||
argv: args,
|
argv: args,
|
||||||
|
node_modules_dir: Some(NodeModulesDirMode::Auto),
|
||||||
subcommand: DenoSubcommand::Run(RunFlags {
|
subcommand: DenoSubcommand::Run(RunFlags {
|
||||||
script: script_name,
|
script: script_name,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -334,3 +374,37 @@ fn create_file(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use crate::tools::init::npm_name_to_create_package;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn npm_name_to_create_package_test() {
|
||||||
|
// See https://docs.npmjs.com/cli/v8/commands/npm-init#description
|
||||||
|
assert_eq!(
|
||||||
|
npm_name_to_create_package("foo"),
|
||||||
|
"npm:create-foo".to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
npm_name_to_create_package("foo@1.0.0"),
|
||||||
|
"npm:create-foo@1.0.0".to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
npm_name_to_create_package("@foo"),
|
||||||
|
"npm:@foo/create".to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
npm_name_to_create_package("@foo@1.0.0"),
|
||||||
|
"npm:@foo/create@1.0.0".to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
npm_name_to_create_package("@foo/bar"),
|
||||||
|
"npm:@foo/create-bar".to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
npm_name_to_create_package("@foo/bar@1.0.0"),
|
||||||
|
"npm:@foo/create-bar@1.0.0".to_string()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -3,24 +3,23 @@
|
||||||
use crate::args::resolve_no_prompt;
|
use crate::args::resolve_no_prompt;
|
||||||
use crate::args::AddFlags;
|
use crate::args::AddFlags;
|
||||||
use crate::args::CaData;
|
use crate::args::CaData;
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::args::ConfigFlag;
|
use crate::args::ConfigFlag;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::args::InstallFlags;
|
use crate::args::InstallFlags;
|
||||||
use crate::args::InstallFlagsGlobal;
|
use crate::args::InstallFlagsGlobal;
|
||||||
use crate::args::InstallFlagsLocal;
|
use crate::args::InstallFlagsLocal;
|
||||||
use crate::args::InstallKind;
|
|
||||||
use crate::args::TypeCheckMode;
|
use crate::args::TypeCheckMode;
|
||||||
use crate::args::UninstallFlags;
|
use crate::args::UninstallFlags;
|
||||||
use crate::args::UninstallKind;
|
use crate::args::UninstallKind;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::graph_container::ModuleGraphContainer;
|
use crate::graph_container::ModuleGraphContainer;
|
||||||
use crate::http_util::HttpClientProvider;
|
use crate::http_util::HttpClientProvider;
|
||||||
use crate::jsr::JsrFetchResolver;
|
use crate::jsr::JsrFetchResolver;
|
||||||
use crate::npm::NpmFetchResolver;
|
use crate::npm::NpmFetchResolver;
|
||||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||||
|
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::generic_error;
|
use deno_core::error::generic_error;
|
||||||
|
@ -162,11 +161,11 @@ pub async fn infer_name_from_url(
|
||||||
let npm_ref = npm_ref.into_inner();
|
let npm_ref = npm_ref.into_inner();
|
||||||
if let Some(sub_path) = npm_ref.sub_path {
|
if let Some(sub_path) = npm_ref.sub_path {
|
||||||
if !sub_path.contains('/') {
|
if !sub_path.contains('/') {
|
||||||
return Some(sub_path);
|
return Some(sub_path.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !npm_ref.req.name.contains('/') {
|
if !npm_ref.req.name.contains('/') {
|
||||||
return Some(npm_ref.req.name);
|
return Some(npm_ref.req.name.into_string());
|
||||||
}
|
}
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -344,11 +343,11 @@ pub async fn install_command(
|
||||||
flags: Arc<Flags>,
|
flags: Arc<Flags>,
|
||||||
install_flags: InstallFlags,
|
install_flags: InstallFlags,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
match install_flags.kind {
|
match install_flags {
|
||||||
InstallKind::Global(global_flags) => {
|
InstallFlags::Global(global_flags) => {
|
||||||
install_global(flags, global_flags).await
|
install_global(flags, global_flags).await
|
||||||
}
|
}
|
||||||
InstallKind::Local(local_flags) => {
|
InstallFlags::Local(local_flags) => {
|
||||||
if let InstallFlagsLocal::Add(add_flags) = &local_flags {
|
if let InstallFlagsLocal::Add(add_flags) = &local_flags {
|
||||||
check_if_installs_a_single_package_globally(Some(add_flags))?;
|
check_if_installs_a_single_package_globally(Some(add_flags))?;
|
||||||
}
|
}
|
||||||
|
@ -367,18 +366,18 @@ async fn install_global(
|
||||||
let cli_options = factory.cli_options()?;
|
let cli_options = factory.cli_options()?;
|
||||||
let http_client = factory.http_client_provider();
|
let http_client = factory.http_client_provider();
|
||||||
let deps_http_cache = factory.global_http_cache()?;
|
let deps_http_cache = factory.global_http_cache()?;
|
||||||
let mut deps_file_fetcher = FileFetcher::new(
|
let deps_file_fetcher = CliFileFetcher::new(
|
||||||
deps_http_cache.clone(),
|
deps_http_cache.clone(),
|
||||||
CacheSetting::ReloadAll,
|
|
||||||
true,
|
|
||||||
http_client.clone(),
|
http_client.clone(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
None,
|
None,
|
||||||
|
true,
|
||||||
|
CacheSetting::ReloadAll,
|
||||||
|
log::Level::Trace,
|
||||||
);
|
);
|
||||||
|
|
||||||
let npmrc = factory.cli_options().unwrap().npmrc();
|
let npmrc = factory.cli_options().unwrap().npmrc();
|
||||||
|
|
||||||
deps_file_fetcher.set_download_log_level(log::Level::Trace);
|
|
||||||
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
||||||
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
||||||
let npm_resolver = Arc::new(NpmFetchResolver::new(
|
let npm_resolver = Arc::new(NpmFetchResolver::new(
|
||||||
|
|
518
cli/tools/lint/ast_buffer/buffer.rs
Normal file
518
cli/tools/lint/ast_buffer/buffer.rs
Normal file
|
@ -0,0 +1,518 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use deno_ast::swc::common::Span;
|
||||||
|
use deno_ast::swc::common::DUMMY_SP;
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
|
/// Each property has this flag to mark what kind of value it holds-
|
||||||
|
/// Plain objects and arrays are not supported yet, but could be easily
|
||||||
|
/// added if needed.
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub enum PropFlags {
|
||||||
|
Ref,
|
||||||
|
RefArr,
|
||||||
|
String,
|
||||||
|
Bool,
|
||||||
|
Null,
|
||||||
|
Undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<PropFlags> for u8 {
|
||||||
|
fn from(m: PropFlags) -> u8 {
|
||||||
|
m as u8
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<u8> for PropFlags {
|
||||||
|
type Error = &'static str;
|
||||||
|
|
||||||
|
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||||
|
match value {
|
||||||
|
0 => Ok(PropFlags::Ref),
|
||||||
|
1 => Ok(PropFlags::RefArr),
|
||||||
|
2 => Ok(PropFlags::String),
|
||||||
|
3 => Ok(PropFlags::Bool),
|
||||||
|
4 => Ok(PropFlags::Null),
|
||||||
|
5 => Ok(PropFlags::Undefined),
|
||||||
|
_ => Err("Unknown Prop flag"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000;
|
||||||
|
const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000;
|
||||||
|
const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000;
|
||||||
|
const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111;
|
||||||
|
|
||||||
|
// TODO: There is probably a native Rust function to do this.
|
||||||
|
pub fn append_u32(result: &mut Vec<u8>, value: u32) {
|
||||||
|
let v1: u8 = ((value & MASK_U32_1) >> 24) as u8;
|
||||||
|
let v2: u8 = ((value & MASK_U32_2) >> 16) as u8;
|
||||||
|
let v3: u8 = ((value & MASK_U32_3) >> 8) as u8;
|
||||||
|
let v4: u8 = (value & MASK_U32_4) as u8;
|
||||||
|
|
||||||
|
result.push(v1);
|
||||||
|
result.push(v2);
|
||||||
|
result.push(v3);
|
||||||
|
result.push(v4);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_usize(result: &mut Vec<u8>, value: usize) {
|
||||||
|
let raw = u32::try_from(value).unwrap();
|
||||||
|
append_u32(result, raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_usize(result: &mut [u8], value: usize, idx: usize) {
|
||||||
|
let raw = u32::try_from(value).unwrap();
|
||||||
|
|
||||||
|
let v1: u8 = ((raw & MASK_U32_1) >> 24) as u8;
|
||||||
|
let v2: u8 = ((raw & MASK_U32_2) >> 16) as u8;
|
||||||
|
let v3: u8 = ((raw & MASK_U32_3) >> 8) as u8;
|
||||||
|
let v4: u8 = (raw & MASK_U32_4) as u8;
|
||||||
|
|
||||||
|
result[idx] = v1;
|
||||||
|
result[idx + 1] = v2;
|
||||||
|
result[idx + 2] = v3;
|
||||||
|
result[idx + 3] = v4;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct StringTable {
|
||||||
|
id: usize,
|
||||||
|
table: IndexMap<String, usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StringTable {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: 0,
|
||||||
|
table: IndexMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(&mut self, s: &str) -> usize {
|
||||||
|
if let Some(id) = self.table.get(s) {
|
||||||
|
return *id;
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = self.id;
|
||||||
|
self.id += 1;
|
||||||
|
self.table.insert(s.to_string(), id);
|
||||||
|
id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&mut self) -> Vec<u8> {
|
||||||
|
let mut result: Vec<u8> = vec![];
|
||||||
|
append_u32(&mut result, self.table.len() as u32);
|
||||||
|
|
||||||
|
// Assume that it's sorted by id
|
||||||
|
for (s, _id) in &self.table {
|
||||||
|
let bytes = s.as_bytes();
|
||||||
|
append_u32(&mut result, bytes.len() as u32);
|
||||||
|
result.append(&mut bytes.to_vec());
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
pub struct NodeRef(pub usize);
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BoolPos(pub usize);
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FieldPos(pub usize);
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FieldArrPos(pub usize);
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct StrPos(pub usize);
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UndefPos(pub usize);
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct NullPos(pub usize);
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum NodePos {
|
||||||
|
Bool(BoolPos),
|
||||||
|
#[allow(dead_code)]
|
||||||
|
Field(FieldPos),
|
||||||
|
#[allow(dead_code)]
|
||||||
|
FieldArr(FieldArrPos),
|
||||||
|
Str(StrPos),
|
||||||
|
Undef(UndefPos),
|
||||||
|
#[allow(dead_code)]
|
||||||
|
Null(NullPos),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait AstBufSerializer<K, P>
|
||||||
|
where
|
||||||
|
K: Into<u8> + Display,
|
||||||
|
P: Into<u8> + Display,
|
||||||
|
{
|
||||||
|
fn header(
|
||||||
|
&mut self,
|
||||||
|
kind: K,
|
||||||
|
parent: NodeRef,
|
||||||
|
span: &Span,
|
||||||
|
prop_count: usize,
|
||||||
|
) -> NodeRef;
|
||||||
|
fn ref_field(&mut self, prop: P) -> FieldPos;
|
||||||
|
fn ref_vec_field(&mut self, prop: P, len: usize) -> FieldArrPos;
|
||||||
|
fn str_field(&mut self, prop: P) -> StrPos;
|
||||||
|
fn bool_field(&mut self, prop: P) -> BoolPos;
|
||||||
|
fn undefined_field(&mut self, prop: P) -> UndefPos;
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn null_field(&mut self, prop: P) -> NullPos;
|
||||||
|
|
||||||
|
fn write_ref(&mut self, pos: FieldPos, value: NodeRef);
|
||||||
|
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>);
|
||||||
|
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>);
|
||||||
|
fn write_str(&mut self, pos: StrPos, value: &str);
|
||||||
|
fn write_bool(&mut self, pos: BoolPos, value: bool);
|
||||||
|
|
||||||
|
fn serialize(&mut self) -> Vec<u8>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SerializeCtx {
|
||||||
|
buf: Vec<u8>,
|
||||||
|
start_buf: NodeRef,
|
||||||
|
str_table: StringTable,
|
||||||
|
kind_map: Vec<usize>,
|
||||||
|
prop_map: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is the internal context used to allocate and fill the buffer. The point
|
||||||
|
/// is to be able to write absolute offsets directly in place.
|
||||||
|
///
|
||||||
|
/// The typical workflow is to reserve all necessary space for the currrent
|
||||||
|
/// node with placeholders for the offsets of the child nodes. Once child
|
||||||
|
/// nodes have been traversed, we know their offsets and can replace the
|
||||||
|
/// placeholder values with the actual ones.
|
||||||
|
impl SerializeCtx {
|
||||||
|
pub fn new(kind_len: u8, prop_len: u8) -> Self {
|
||||||
|
let kind_size = kind_len as usize;
|
||||||
|
let prop_size = prop_len as usize;
|
||||||
|
let mut ctx = Self {
|
||||||
|
start_buf: NodeRef(0),
|
||||||
|
buf: vec![],
|
||||||
|
str_table: StringTable::new(),
|
||||||
|
kind_map: vec![0; kind_size + 1],
|
||||||
|
prop_map: vec![0; prop_size + 1],
|
||||||
|
};
|
||||||
|
|
||||||
|
ctx.str_table.insert("");
|
||||||
|
|
||||||
|
// Placeholder node is always 0
|
||||||
|
ctx.append_node(0, NodeRef(0), &DUMMY_SP, 0);
|
||||||
|
ctx.kind_map[0] = 0;
|
||||||
|
ctx.start_buf = NodeRef(ctx.buf.len());
|
||||||
|
|
||||||
|
// Insert default props that are always present
|
||||||
|
let type_str = ctx.str_table.insert("type");
|
||||||
|
let parent_str = ctx.str_table.insert("parent");
|
||||||
|
let range_str = ctx.str_table.insert("range");
|
||||||
|
let length_str = ctx.str_table.insert("length");
|
||||||
|
|
||||||
|
// These values are expected to be in this order on the JS side
|
||||||
|
ctx.prop_map[0] = type_str;
|
||||||
|
ctx.prop_map[1] = parent_str;
|
||||||
|
ctx.prop_map[2] = range_str;
|
||||||
|
ctx.prop_map[3] = length_str;
|
||||||
|
|
||||||
|
ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate a node's header
|
||||||
|
fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
let offset = self.buf.len();
|
||||||
|
|
||||||
|
let n: u8 = prop.clone().into();
|
||||||
|
self.buf.push(n);
|
||||||
|
|
||||||
|
if let Some(v) = self.prop_map.get::<usize>(n.into()) {
|
||||||
|
if *v == 0 {
|
||||||
|
let id = self.str_table.insert(&format!("{prop}"));
|
||||||
|
self.prop_map[n as usize] = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let flags: u8 = prop_flags.into();
|
||||||
|
self.buf.push(flags);
|
||||||
|
|
||||||
|
offset
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate a property pointing to another node.
|
||||||
|
fn field<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
let offset = self.field_header(prop, prop_flags);
|
||||||
|
|
||||||
|
append_usize(&mut self.buf, 0);
|
||||||
|
|
||||||
|
offset
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_node(
|
||||||
|
&mut self,
|
||||||
|
kind: u8,
|
||||||
|
parent: NodeRef,
|
||||||
|
span: &Span,
|
||||||
|
prop_count: usize,
|
||||||
|
) -> NodeRef {
|
||||||
|
let offset = self.buf.len();
|
||||||
|
|
||||||
|
// Node type fits in a u8
|
||||||
|
self.buf.push(kind);
|
||||||
|
|
||||||
|
// Offset to the parent node. Will be 0 if none exists
|
||||||
|
append_usize(&mut self.buf, parent.0);
|
||||||
|
|
||||||
|
// Span, the start and end location of this node
|
||||||
|
append_u32(&mut self.buf, span.lo.0);
|
||||||
|
append_u32(&mut self.buf, span.hi.0);
|
||||||
|
|
||||||
|
// No node has more than <10 properties
|
||||||
|
debug_assert!(prop_count < 10);
|
||||||
|
self.buf.push(prop_count as u8);
|
||||||
|
|
||||||
|
NodeRef(offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate the node header. It's always the same for every node.
|
||||||
|
/// <type u8>
|
||||||
|
/// <parent offset u32>
|
||||||
|
/// <span lo u32>
|
||||||
|
/// <span high u32>
|
||||||
|
/// <property count u8> (There is no node with more than 10 properties)
|
||||||
|
pub fn header<N>(
|
||||||
|
&mut self,
|
||||||
|
kind: N,
|
||||||
|
parent: NodeRef,
|
||||||
|
span: &Span,
|
||||||
|
prop_count: usize,
|
||||||
|
) -> NodeRef
|
||||||
|
where
|
||||||
|
N: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
let n: u8 = kind.clone().into();
|
||||||
|
|
||||||
|
if let Some(v) = self.kind_map.get::<usize>(n.into()) {
|
||||||
|
if *v == 0 {
|
||||||
|
let id = self.str_table.insert(&format!("{kind}"));
|
||||||
|
self.kind_map[n as usize] = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.append_node(n, parent, span, prop_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate a reference property that will hold the offset of
|
||||||
|
/// another node.
|
||||||
|
pub fn ref_field<P>(&mut self, prop: P) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
self.field(prop, PropFlags::Ref)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate a property that is a vec of node offsets pointing to other
|
||||||
|
/// nodes.
|
||||||
|
pub fn ref_vec_field<P>(&mut self, prop: P, len: usize) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
let offset = self.field(prop, PropFlags::RefArr);
|
||||||
|
|
||||||
|
for _ in 0..len {
|
||||||
|
append_u32(&mut self.buf, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
offset
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate a property representing a string. Strings are deduplicated
|
||||||
|
// in the message and the property will only contain the string id.
|
||||||
|
pub fn str_field<P>(&mut self, prop: P) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
self.field(prop, PropFlags::String)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate a bool field
|
||||||
|
pub fn bool_field<P>(&mut self, prop: P) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
let offset = self.field_header(prop, PropFlags::Bool);
|
||||||
|
self.buf.push(0);
|
||||||
|
offset
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate an undefined field
|
||||||
|
pub fn undefined_field<P>(&mut self, prop: P) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
self.field_header(prop, PropFlags::Undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocate an undefined field
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn null_field<P>(&mut self, prop: P) -> usize
|
||||||
|
where
|
||||||
|
P: Into<u8> + Display + Clone,
|
||||||
|
{
|
||||||
|
self.field_header(prop, PropFlags::Null)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Replace the placeholder of a reference field with the actual offset
|
||||||
|
/// to the node we want to point to.
|
||||||
|
pub fn write_ref(&mut self, field_offset: usize, value: NodeRef) {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
let value_kind = self.buf[field_offset + 1];
|
||||||
|
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
|
||||||
|
panic!("Trying to write a ref into a non-ref field")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
write_usize(&mut self.buf, value.0, field_offset + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper for writing optional node offsets
|
||||||
|
pub fn write_maybe_ref(
|
||||||
|
&mut self,
|
||||||
|
field_offset: usize,
|
||||||
|
value: Option<NodeRef>,
|
||||||
|
) {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
let value_kind = self.buf[field_offset + 1];
|
||||||
|
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
|
||||||
|
panic!("Trying to write a ref into a non-ref field")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let ref_value = if let Some(v) = value { v } else { NodeRef(0) };
|
||||||
|
write_usize(&mut self.buf, ref_value.0, field_offset + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a vec of node offsets into the property. The necessary space
|
||||||
|
/// has been reserved earlier.
|
||||||
|
pub fn write_refs(&mut self, field_offset: usize, value: Vec<NodeRef>) {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
let value_kind = self.buf[field_offset + 1];
|
||||||
|
if PropFlags::try_from(value_kind).unwrap() != PropFlags::RefArr {
|
||||||
|
panic!("Trying to write a ref into a non-ref array field")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut offset = field_offset + 2;
|
||||||
|
write_usize(&mut self.buf, value.len(), offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
for item in value {
|
||||||
|
write_usize(&mut self.buf, item.0, offset);
|
||||||
|
offset += 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Store the string in our string table and save the id of the string
|
||||||
|
/// in the current field.
|
||||||
|
pub fn write_str(&mut self, field_offset: usize, value: &str) {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
let value_kind = self.buf[field_offset + 1];
|
||||||
|
if PropFlags::try_from(value_kind).unwrap() != PropFlags::String {
|
||||||
|
panic!("Trying to write a ref into a non-string field")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = self.str_table.insert(value);
|
||||||
|
write_usize(&mut self.buf, id, field_offset + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a bool to a field.
|
||||||
|
pub fn write_bool(&mut self, field_offset: usize, value: bool) {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
let value_kind = self.buf[field_offset + 1];
|
||||||
|
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Bool {
|
||||||
|
panic!("Trying to write a ref into a non-bool field")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.buf[field_offset + 2] = if value { 1 } else { 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize all information we have into a buffer that can be sent to JS.
|
||||||
|
/// It has the following structure:
|
||||||
|
///
|
||||||
|
/// <...ast>
|
||||||
|
/// <string table>
|
||||||
|
/// <node kind map> <- node kind id maps to string id
|
||||||
|
/// <node prop map> <- node property id maps to string id
|
||||||
|
/// <offset kind map>
|
||||||
|
/// <offset prop map>
|
||||||
|
/// <offset str table>
|
||||||
|
pub fn serialize(&mut self) -> Vec<u8> {
|
||||||
|
let mut buf: Vec<u8> = vec![];
|
||||||
|
|
||||||
|
// The buffer starts with the serialized AST first, because that
|
||||||
|
// contains absolute offsets. By butting this at the start of the
|
||||||
|
// message we don't have to waste time updating any offsets.
|
||||||
|
buf.append(&mut self.buf);
|
||||||
|
|
||||||
|
// Next follows the string table. We'll keep track of the offset
|
||||||
|
// in the message of where the string table begins
|
||||||
|
let offset_str_table = buf.len();
|
||||||
|
|
||||||
|
// Serialize string table
|
||||||
|
buf.append(&mut self.str_table.serialize());
|
||||||
|
|
||||||
|
// Next, serialize the mappings of kind -> string of encountered
|
||||||
|
// nodes in the AST. We use this additional lookup table to compress
|
||||||
|
// the message so that we can save space by using a u8 . All nodes of
|
||||||
|
// JS, TS and JSX together are <200
|
||||||
|
let offset_kind_map = buf.len();
|
||||||
|
|
||||||
|
// Write the total number of entries in the kind -> str mapping table
|
||||||
|
// TODO: make this a u8
|
||||||
|
append_usize(&mut buf, self.kind_map.len());
|
||||||
|
for v in &self.kind_map {
|
||||||
|
append_usize(&mut buf, *v);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store offset to prop -> string map. It's the same as with node kind
|
||||||
|
// as the total number of properties is <120 which allows us to store it
|
||||||
|
// as u8.
|
||||||
|
let offset_prop_map = buf.len();
|
||||||
|
// Write the total number of entries in the kind -> str mapping table
|
||||||
|
append_usize(&mut buf, self.prop_map.len());
|
||||||
|
for v in &self.prop_map {
|
||||||
|
append_usize(&mut buf, *v);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Putting offsets of relevant parts of the buffer at the end. This
|
||||||
|
// allows us to hop to the relevant part by merely looking at the last
|
||||||
|
// for values in the message. Each value represents an offset into the
|
||||||
|
// buffer.
|
||||||
|
append_usize(&mut buf, offset_kind_map);
|
||||||
|
append_usize(&mut buf, offset_prop_map);
|
||||||
|
append_usize(&mut buf, offset_str_table);
|
||||||
|
append_usize(&mut buf, self.start_buf.0);
|
||||||
|
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
}
|
13
cli/tools/lint/ast_buffer/mod.rs
Normal file
13
cli/tools/lint/ast_buffer/mod.rs
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_ast::ParsedSource;
|
||||||
|
use swc::serialize_swc_to_buffer;
|
||||||
|
|
||||||
|
mod buffer;
|
||||||
|
mod swc;
|
||||||
|
mod ts_estree;
|
||||||
|
|
||||||
|
pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec<u8> {
|
||||||
|
// TODO: We could support multiple languages here
|
||||||
|
serialize_swc_to_buffer(parsed_source)
|
||||||
|
}
|
3018
cli/tools/lint/ast_buffer/swc.rs
Normal file
3018
cli/tools/lint/ast_buffer/swc.rs
Normal file
File diff suppressed because it is too large
Load diff
515
cli/tools/lint/ast_buffer/ts_estree.rs
Normal file
515
cli/tools/lint/ast_buffer/ts_estree.rs
Normal file
|
@ -0,0 +1,515 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use deno_ast::swc::common::Span;
|
||||||
|
|
||||||
|
use super::buffer::AstBufSerializer;
|
||||||
|
use super::buffer::BoolPos;
|
||||||
|
use super::buffer::FieldArrPos;
|
||||||
|
use super::buffer::FieldPos;
|
||||||
|
use super::buffer::NodeRef;
|
||||||
|
use super::buffer::NullPos;
|
||||||
|
use super::buffer::SerializeCtx;
|
||||||
|
use super::buffer::StrPos;
|
||||||
|
use super::buffer::UndefPos;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum AstNode {
|
||||||
|
// First node must always be the empty/invalid node
|
||||||
|
Invalid,
|
||||||
|
// Typically the
|
||||||
|
Program,
|
||||||
|
|
||||||
|
// Module declarations
|
||||||
|
ExportAllDeclaration,
|
||||||
|
ExportDefaultDeclaration,
|
||||||
|
ExportNamedDeclaration,
|
||||||
|
ImportDeclaration,
|
||||||
|
TsExportAssignment,
|
||||||
|
TsImportEquals,
|
||||||
|
TsNamespaceExport,
|
||||||
|
|
||||||
|
// Decls
|
||||||
|
ClassDeclaration,
|
||||||
|
FunctionDeclaration,
|
||||||
|
TSEnumDeclaration,
|
||||||
|
TSInterface,
|
||||||
|
TsModule,
|
||||||
|
TsTypeAlias,
|
||||||
|
Using,
|
||||||
|
VariableDeclaration,
|
||||||
|
|
||||||
|
// Statements
|
||||||
|
BlockStatement,
|
||||||
|
BreakStatement,
|
||||||
|
ContinueStatement,
|
||||||
|
DebuggerStatement,
|
||||||
|
DoWhileStatement,
|
||||||
|
EmptyStatement,
|
||||||
|
ExpressionStatement,
|
||||||
|
ForInStatement,
|
||||||
|
ForOfStatement,
|
||||||
|
ForStatement,
|
||||||
|
IfStatement,
|
||||||
|
LabeledStatement,
|
||||||
|
ReturnStatement,
|
||||||
|
SwitchCase,
|
||||||
|
SwitchStatement,
|
||||||
|
ThrowStatement,
|
||||||
|
TryStatement,
|
||||||
|
WhileStatement,
|
||||||
|
WithStatement,
|
||||||
|
|
||||||
|
// Expressions
|
||||||
|
ArrayExpression,
|
||||||
|
ArrowFunctionExpression,
|
||||||
|
AssignmentExpression,
|
||||||
|
AwaitExpression,
|
||||||
|
BinaryExpression,
|
||||||
|
CallExpression,
|
||||||
|
ChainExpression,
|
||||||
|
ClassExpression,
|
||||||
|
ConditionalExpression,
|
||||||
|
FunctionExpression,
|
||||||
|
Identifier,
|
||||||
|
ImportExpression,
|
||||||
|
LogicalExpression,
|
||||||
|
MemberExpression,
|
||||||
|
MetaProp,
|
||||||
|
NewExpression,
|
||||||
|
ObjectExpression,
|
||||||
|
PrivateIdentifier,
|
||||||
|
SequenceExpression,
|
||||||
|
Super,
|
||||||
|
TaggedTemplateExpression,
|
||||||
|
TemplateLiteral,
|
||||||
|
ThisExpression,
|
||||||
|
TSAsExpression,
|
||||||
|
TsConstAssertion,
|
||||||
|
TsInstantiation,
|
||||||
|
TSNonNullExpression,
|
||||||
|
TSSatisfiesExpression,
|
||||||
|
TSTypeAssertion,
|
||||||
|
UnaryExpression,
|
||||||
|
UpdateExpression,
|
||||||
|
YieldExpression,
|
||||||
|
|
||||||
|
// TODO: TSEsTree uses a single literal node
|
||||||
|
// Literals
|
||||||
|
StringLiteral,
|
||||||
|
Bool,
|
||||||
|
Null,
|
||||||
|
NumericLiteral,
|
||||||
|
BigIntLiteral,
|
||||||
|
RegExpLiteral,
|
||||||
|
|
||||||
|
EmptyExpr,
|
||||||
|
SpreadElement,
|
||||||
|
Property,
|
||||||
|
VariableDeclarator,
|
||||||
|
CatchClause,
|
||||||
|
RestElement,
|
||||||
|
ExportSpecifier,
|
||||||
|
TemplateElement,
|
||||||
|
MethodDefinition,
|
||||||
|
ClassBody,
|
||||||
|
|
||||||
|
// Patterns
|
||||||
|
ArrayPattern,
|
||||||
|
AssignmentPattern,
|
||||||
|
ObjectPattern,
|
||||||
|
|
||||||
|
// JSX
|
||||||
|
JSXAttribute,
|
||||||
|
JSXClosingElement,
|
||||||
|
JSXClosingFragment,
|
||||||
|
JSXElement,
|
||||||
|
JSXEmptyExpression,
|
||||||
|
JSXExpressionContainer,
|
||||||
|
JSXFragment,
|
||||||
|
JSXIdentifier,
|
||||||
|
JSXMemberExpression,
|
||||||
|
JSXNamespacedName,
|
||||||
|
JSXOpeningElement,
|
||||||
|
JSXOpeningFragment,
|
||||||
|
JSXSpreadAttribute,
|
||||||
|
JSXSpreadChild,
|
||||||
|
JSXText,
|
||||||
|
|
||||||
|
TSTypeAnnotation,
|
||||||
|
TSTypeParameterDeclaration,
|
||||||
|
TSTypeParameter,
|
||||||
|
TSTypeParameterInstantiation,
|
||||||
|
TSEnumMember,
|
||||||
|
TSInterfaceBody,
|
||||||
|
TSInterfaceHeritage,
|
||||||
|
TSTypeReference,
|
||||||
|
TSThisType,
|
||||||
|
TSLiteralType,
|
||||||
|
TSInferType,
|
||||||
|
TSConditionalType,
|
||||||
|
TSUnionType,
|
||||||
|
TSIntersectionType,
|
||||||
|
TSMappedType,
|
||||||
|
TSTypeQuery,
|
||||||
|
TSTupleType,
|
||||||
|
TSNamedTupleMember,
|
||||||
|
TSFunctionType,
|
||||||
|
TsCallSignatureDeclaration,
|
||||||
|
TSPropertySignature,
|
||||||
|
TSMethodSignature,
|
||||||
|
TSIndexSignature,
|
||||||
|
TSIndexedAccessType,
|
||||||
|
TSTypeOperator,
|
||||||
|
TSTypePredicate,
|
||||||
|
TSImportType,
|
||||||
|
TSRestType,
|
||||||
|
TSArrayType,
|
||||||
|
TSClassImplements,
|
||||||
|
|
||||||
|
TSAnyKeyword,
|
||||||
|
TSBigIntKeyword,
|
||||||
|
TSBooleanKeyword,
|
||||||
|
TSIntrinsicKeyword,
|
||||||
|
TSNeverKeyword,
|
||||||
|
TSNullKeyword,
|
||||||
|
TSNumberKeyword,
|
||||||
|
TSObjectKeyword,
|
||||||
|
TSStringKeyword,
|
||||||
|
TSSymbolKeyword,
|
||||||
|
TSUndefinedKeyword,
|
||||||
|
TSUnknownKeyword,
|
||||||
|
TSVoidKeyword,
|
||||||
|
TSEnumBody, // Last value is used for max value
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for AstNode {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Debug::fmt(self, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AstNode> for u8 {
|
||||||
|
fn from(m: AstNode) -> u8 {
|
||||||
|
m as u8
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum AstProp {
|
||||||
|
// Base, these three must be in sync with JS. The
|
||||||
|
// order here for these 3 fields is important.
|
||||||
|
Type,
|
||||||
|
Parent,
|
||||||
|
Range,
|
||||||
|
Length, // Not used in AST, but can be used in attr selectors
|
||||||
|
|
||||||
|
// Starting from here the order doesn't matter.
|
||||||
|
// Following are all possible AST node properties.
|
||||||
|
Abstract,
|
||||||
|
Accessibility,
|
||||||
|
Alternate,
|
||||||
|
Argument,
|
||||||
|
Arguments,
|
||||||
|
Asserts,
|
||||||
|
Async,
|
||||||
|
Attributes,
|
||||||
|
Await,
|
||||||
|
Block,
|
||||||
|
Body,
|
||||||
|
Callee,
|
||||||
|
Cases,
|
||||||
|
Children,
|
||||||
|
CheckType,
|
||||||
|
ClosingElement,
|
||||||
|
ClosingFragment,
|
||||||
|
Computed,
|
||||||
|
Consequent,
|
||||||
|
Const,
|
||||||
|
Constraint,
|
||||||
|
Cooked,
|
||||||
|
Declaration,
|
||||||
|
Declarations,
|
||||||
|
Declare,
|
||||||
|
Default,
|
||||||
|
Definite,
|
||||||
|
Delegate,
|
||||||
|
Discriminant,
|
||||||
|
Elements,
|
||||||
|
ElementType,
|
||||||
|
ElementTypes,
|
||||||
|
ExprName,
|
||||||
|
Expression,
|
||||||
|
Expressions,
|
||||||
|
Exported,
|
||||||
|
Extends,
|
||||||
|
ExtendsType,
|
||||||
|
FalseType,
|
||||||
|
Finalizer,
|
||||||
|
Flags,
|
||||||
|
Generator,
|
||||||
|
Handler,
|
||||||
|
Id,
|
||||||
|
In,
|
||||||
|
IndexType,
|
||||||
|
Init,
|
||||||
|
Initializer,
|
||||||
|
Implements,
|
||||||
|
Key,
|
||||||
|
Kind,
|
||||||
|
Label,
|
||||||
|
Left,
|
||||||
|
Literal,
|
||||||
|
Local,
|
||||||
|
Members,
|
||||||
|
Meta,
|
||||||
|
Method,
|
||||||
|
Name,
|
||||||
|
Namespace,
|
||||||
|
NameType,
|
||||||
|
Object,
|
||||||
|
ObjectType,
|
||||||
|
OpeningElement,
|
||||||
|
OpeningFragment,
|
||||||
|
Operator,
|
||||||
|
Optional,
|
||||||
|
Out,
|
||||||
|
Param,
|
||||||
|
ParameterName,
|
||||||
|
Params,
|
||||||
|
Pattern,
|
||||||
|
Prefix,
|
||||||
|
Properties,
|
||||||
|
Property,
|
||||||
|
Qualifier,
|
||||||
|
Quasi,
|
||||||
|
Quasis,
|
||||||
|
Raw,
|
||||||
|
Readonly,
|
||||||
|
ReturnType,
|
||||||
|
Right,
|
||||||
|
SelfClosing,
|
||||||
|
Shorthand,
|
||||||
|
Source,
|
||||||
|
SourceType,
|
||||||
|
Specifiers,
|
||||||
|
Static,
|
||||||
|
SuperClass,
|
||||||
|
SuperTypeArguments,
|
||||||
|
Tag,
|
||||||
|
Tail,
|
||||||
|
Test,
|
||||||
|
TrueType,
|
||||||
|
TypeAnnotation,
|
||||||
|
TypeArguments,
|
||||||
|
TypeName,
|
||||||
|
TypeParameter,
|
||||||
|
TypeParameters,
|
||||||
|
Types,
|
||||||
|
Update,
|
||||||
|
Value, // Last value is used for max value
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Feels like there should be an easier way to iterater over an
|
||||||
|
// enum in Rust and lowercase the first letter.
|
||||||
|
impl Display for AstProp {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let s = match self {
|
||||||
|
AstProp::Parent => "parent",
|
||||||
|
AstProp::Range => "range",
|
||||||
|
AstProp::Type => "type",
|
||||||
|
AstProp::Length => "length",
|
||||||
|
AstProp::Abstract => "abstract",
|
||||||
|
AstProp::Accessibility => "accessibility",
|
||||||
|
AstProp::Alternate => "alternate",
|
||||||
|
AstProp::Argument => "argument",
|
||||||
|
AstProp::Arguments => "arguments",
|
||||||
|
AstProp::Asserts => "asserts",
|
||||||
|
AstProp::Async => "async",
|
||||||
|
AstProp::Attributes => "attributes",
|
||||||
|
AstProp::Await => "await",
|
||||||
|
AstProp::Block => "block",
|
||||||
|
AstProp::Body => "body",
|
||||||
|
AstProp::Callee => "callee",
|
||||||
|
AstProp::Cases => "cases",
|
||||||
|
AstProp::Children => "children",
|
||||||
|
AstProp::CheckType => "checkType",
|
||||||
|
AstProp::ClosingElement => "closingElement",
|
||||||
|
AstProp::ClosingFragment => "closingFragment",
|
||||||
|
AstProp::Computed => "computed",
|
||||||
|
AstProp::Consequent => "consequent",
|
||||||
|
AstProp::Const => "const",
|
||||||
|
AstProp::Constraint => "constraint",
|
||||||
|
AstProp::Cooked => "cooked",
|
||||||
|
AstProp::Declaration => "declaration",
|
||||||
|
AstProp::Declarations => "declarations",
|
||||||
|
AstProp::Declare => "declare",
|
||||||
|
AstProp::Default => "default",
|
||||||
|
AstProp::Definite => "definite",
|
||||||
|
AstProp::Delegate => "delegate",
|
||||||
|
AstProp::Discriminant => "discriminant",
|
||||||
|
AstProp::Elements => "elements",
|
||||||
|
AstProp::ElementType => "elementType",
|
||||||
|
AstProp::ElementTypes => "elementTypes",
|
||||||
|
AstProp::ExprName => "exprName",
|
||||||
|
AstProp::Expression => "expression",
|
||||||
|
AstProp::Expressions => "expressions",
|
||||||
|
AstProp::Exported => "exported",
|
||||||
|
AstProp::Extends => "extends",
|
||||||
|
AstProp::ExtendsType => "extendsType",
|
||||||
|
AstProp::FalseType => "falseType",
|
||||||
|
AstProp::Finalizer => "finalizer",
|
||||||
|
AstProp::Flags => "flags",
|
||||||
|
AstProp::Generator => "generator",
|
||||||
|
AstProp::Handler => "handler",
|
||||||
|
AstProp::Id => "id",
|
||||||
|
AstProp::In => "in",
|
||||||
|
AstProp::IndexType => "indexType",
|
||||||
|
AstProp::Init => "init",
|
||||||
|
AstProp::Initializer => "initializer",
|
||||||
|
AstProp::Implements => "implements",
|
||||||
|
AstProp::Key => "key",
|
||||||
|
AstProp::Kind => "kind",
|
||||||
|
AstProp::Label => "label",
|
||||||
|
AstProp::Left => "left",
|
||||||
|
AstProp::Literal => "literal",
|
||||||
|
AstProp::Local => "local",
|
||||||
|
AstProp::Members => "members",
|
||||||
|
AstProp::Meta => "meta",
|
||||||
|
AstProp::Method => "method",
|
||||||
|
AstProp::Name => "name",
|
||||||
|
AstProp::Namespace => "namespace",
|
||||||
|
AstProp::NameType => "nameType",
|
||||||
|
AstProp::Object => "object",
|
||||||
|
AstProp::ObjectType => "objectType",
|
||||||
|
AstProp::OpeningElement => "openingElement",
|
||||||
|
AstProp::OpeningFragment => "openingFragment",
|
||||||
|
AstProp::Operator => "operator",
|
||||||
|
AstProp::Optional => "optional",
|
||||||
|
AstProp::Out => "out",
|
||||||
|
AstProp::Param => "param",
|
||||||
|
AstProp::ParameterName => "parameterName",
|
||||||
|
AstProp::Params => "params",
|
||||||
|
AstProp::Pattern => "pattern",
|
||||||
|
AstProp::Prefix => "prefix",
|
||||||
|
AstProp::Properties => "properties",
|
||||||
|
AstProp::Property => "property",
|
||||||
|
AstProp::Qualifier => "qualifier",
|
||||||
|
AstProp::Quasi => "quasi",
|
||||||
|
AstProp::Quasis => "quasis",
|
||||||
|
AstProp::Raw => "raw",
|
||||||
|
AstProp::Readonly => "readonly",
|
||||||
|
AstProp::ReturnType => "returnType",
|
||||||
|
AstProp::Right => "right",
|
||||||
|
AstProp::SelfClosing => "selfClosing",
|
||||||
|
AstProp::Shorthand => "shorthand",
|
||||||
|
AstProp::Source => "source",
|
||||||
|
AstProp::SourceType => "sourceType",
|
||||||
|
AstProp::Specifiers => "specifiers",
|
||||||
|
AstProp::Static => "static",
|
||||||
|
AstProp::SuperClass => "superClass",
|
||||||
|
AstProp::SuperTypeArguments => "superTypeArguments",
|
||||||
|
AstProp::Tag => "tag",
|
||||||
|
AstProp::Tail => "tail",
|
||||||
|
AstProp::Test => "test",
|
||||||
|
AstProp::TrueType => "trueType",
|
||||||
|
AstProp::TypeAnnotation => "typeAnnotation",
|
||||||
|
AstProp::TypeArguments => "typeArguments",
|
||||||
|
AstProp::TypeName => "typeName",
|
||||||
|
AstProp::TypeParameter => "typeParameter",
|
||||||
|
AstProp::TypeParameters => "typeParameters",
|
||||||
|
AstProp::Types => "types",
|
||||||
|
AstProp::Update => "update",
|
||||||
|
AstProp::Value => "value",
|
||||||
|
};
|
||||||
|
|
||||||
|
write!(f, "{}", s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AstProp> for u8 {
|
||||||
|
fn from(m: AstProp) -> u8 {
|
||||||
|
m as u8
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TsEsTreeBuilder {
|
||||||
|
ctx: SerializeCtx,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Add a builder API to make it easier to convert from different source
|
||||||
|
// ast formats.
|
||||||
|
impl TsEsTreeBuilder {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
// Max values
|
||||||
|
// TODO: Maybe there is a rust macro to grab the last enum value?
|
||||||
|
let kind_count: u8 = AstNode::TSEnumBody.into();
|
||||||
|
let prop_count: u8 = AstProp::Value.into();
|
||||||
|
Self {
|
||||||
|
ctx: SerializeCtx::new(kind_count, prop_count),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AstBufSerializer<AstNode, AstProp> for TsEsTreeBuilder {
|
||||||
|
fn header(
|
||||||
|
&mut self,
|
||||||
|
kind: AstNode,
|
||||||
|
parent: NodeRef,
|
||||||
|
span: &Span,
|
||||||
|
prop_count: usize,
|
||||||
|
) -> NodeRef {
|
||||||
|
self.ctx.header(kind, parent, span, prop_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ref_field(&mut self, prop: AstProp) -> FieldPos {
|
||||||
|
FieldPos(self.ctx.ref_field(prop))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ref_vec_field(&mut self, prop: AstProp, len: usize) -> FieldArrPos {
|
||||||
|
FieldArrPos(self.ctx.ref_vec_field(prop, len))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn str_field(&mut self, prop: AstProp) -> StrPos {
|
||||||
|
StrPos(self.ctx.str_field(prop))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bool_field(&mut self, prop: AstProp) -> BoolPos {
|
||||||
|
BoolPos(self.ctx.bool_field(prop))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn undefined_field(&mut self, prop: AstProp) -> UndefPos {
|
||||||
|
UndefPos(self.ctx.undefined_field(prop))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn null_field(&mut self, prop: AstProp) -> NullPos {
|
||||||
|
NullPos(self.ctx.null_field(prop))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_ref(&mut self, pos: FieldPos, value: NodeRef) {
|
||||||
|
self.ctx.write_ref(pos.0, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>) {
|
||||||
|
self.ctx.write_maybe_ref(pos.0, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>) {
|
||||||
|
self.ctx.write_refs(pos.0, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_str(&mut self, pos: StrPos, value: &str) {
|
||||||
|
self.ctx.write_str(pos.0, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_bool(&mut self, pos: BoolPos, value: bool) {
|
||||||
|
self.ctx.write_bool(pos.0, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize(&mut self) -> Vec<u8> {
|
||||||
|
self.ctx.serialize()
|
||||||
|
}
|
||||||
|
}
|
|
@ -20,7 +20,7 @@ use deno_core::unsync::future::LocalFutureExt;
|
||||||
use deno_core::unsync::future::SharedLocal;
|
use deno_core::unsync::future::SharedLocal;
|
||||||
use deno_graph::ModuleGraph;
|
use deno_graph::ModuleGraph;
|
||||||
use deno_lint::diagnostic::LintDiagnostic;
|
use deno_lint::diagnostic::LintDiagnostic;
|
||||||
use deno_lint::linter::LintConfig;
|
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use reporters::create_reporter;
|
use reporters::create_reporter;
|
||||||
use reporters::LintReporter;
|
use reporters::LintReporter;
|
||||||
|
@ -29,7 +29,6 @@ use std::collections::HashSet;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::stdin;
|
use std::io::stdin;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::path::Path;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -47,14 +46,18 @@ use crate::graph_util::ModuleGraphCreator;
|
||||||
use crate::tools::fmt::run_parallelized;
|
use crate::tools::fmt::run_parallelized;
|
||||||
use crate::util::display;
|
use crate::util::display;
|
||||||
use crate::util::file_watcher;
|
use crate::util::file_watcher;
|
||||||
|
use crate::util::file_watcher::WatcherCommunicator;
|
||||||
use crate::util::fs::canonicalize_path;
|
use crate::util::fs::canonicalize_path;
|
||||||
use crate::util::path::is_script_ext;
|
use crate::util::path::is_script_ext;
|
||||||
use crate::util::sync::AtomicFlag;
|
use crate::util::sync::AtomicFlag;
|
||||||
|
|
||||||
|
mod ast_buffer;
|
||||||
mod linter;
|
mod linter;
|
||||||
mod reporters;
|
mod reporters;
|
||||||
mod rules;
|
mod rules;
|
||||||
|
|
||||||
|
// TODO(bartlomieju): remove once we wire plugins through the CLI linter
|
||||||
|
pub use ast_buffer::serialize_ast_to_buffer;
|
||||||
pub use linter::CliLinter;
|
pub use linter::CliLinter;
|
||||||
pub use linter::CliLinterOptions;
|
pub use linter::CliLinterOptions;
|
||||||
pub use rules::collect_no_slow_type_diagnostics;
|
pub use rules::collect_no_slow_type_diagnostics;
|
||||||
|
@ -69,136 +72,139 @@ pub async fn lint(
|
||||||
flags: Arc<Flags>,
|
flags: Arc<Flags>,
|
||||||
lint_flags: LintFlags,
|
lint_flags: LintFlags,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
if let Some(watch_flags) = &lint_flags.watch {
|
if lint_flags.watch.is_some() {
|
||||||
if lint_flags.is_stdin() {
|
if lint_flags.is_stdin() {
|
||||||
return Err(generic_error(
|
return Err(generic_error(
|
||||||
"Lint watch on standard input is not supported.",
|
"Lint watch on standard input is not supported.",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
file_watcher::watch_func(
|
|
||||||
flags,
|
|
||||||
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
|
|
||||||
move |flags, watcher_communicator, changed_paths| {
|
|
||||||
let lint_flags = lint_flags.clone();
|
|
||||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
|
||||||
Ok(async move {
|
|
||||||
let factory = CliFactory::from_flags(flags);
|
|
||||||
let cli_options = factory.cli_options()?;
|
|
||||||
let lint_config = cli_options.resolve_deno_lint_config()?;
|
|
||||||
let mut paths_with_options_batches =
|
|
||||||
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
|
|
||||||
for paths_with_options in &mut paths_with_options_batches {
|
|
||||||
_ = watcher_communicator
|
|
||||||
.watch_paths(paths_with_options.paths.clone());
|
|
||||||
|
|
||||||
let files = std::mem::take(&mut paths_with_options.paths);
|
return lint_with_watch(flags, lint_flags).await;
|
||||||
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
}
|
||||||
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
|
|
||||||
files
|
|
||||||
.iter()
|
|
||||||
.any(|path| {
|
|
||||||
canonicalize_path(path)
|
|
||||||
.map(|p| paths.contains(&p))
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
.then_some(files)
|
|
||||||
.unwrap_or_else(|| [].to_vec())
|
|
||||||
} else {
|
|
||||||
files
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut linter = WorkspaceLinter::new(
|
let factory = CliFactory::from_flags(flags);
|
||||||
factory.caches()?.clone(),
|
let cli_options = factory.cli_options()?;
|
||||||
factory.lint_rule_provider().await?,
|
let lint_rule_provider = factory.lint_rule_provider().await?;
|
||||||
factory.module_graph_creator().await?.clone(),
|
let is_stdin = lint_flags.is_stdin();
|
||||||
cli_options.start_dir.clone(),
|
let deno_lint_config = cli_options.resolve_deno_lint_config()?;
|
||||||
&cli_options.resolve_workspace_lint_options(&lint_flags)?,
|
let workspace_lint_options =
|
||||||
);
|
cli_options.resolve_workspace_lint_options(&lint_flags)?;
|
||||||
for paths_with_options in paths_with_options_batches {
|
let success = if is_stdin {
|
||||||
linter
|
lint_stdin(
|
||||||
.lint_files(
|
cli_options,
|
||||||
cli_options,
|
lint_rule_provider,
|
||||||
paths_with_options.options,
|
workspace_lint_options,
|
||||||
lint_config.clone(),
|
lint_flags,
|
||||||
paths_with_options.dir,
|
deno_lint_config,
|
||||||
paths_with_options.paths,
|
)?
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
linter.finish();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
} else {
|
} else {
|
||||||
let factory = CliFactory::from_flags(flags);
|
let mut linter = WorkspaceLinter::new(
|
||||||
let cli_options = factory.cli_options()?;
|
factory.caches()?.clone(),
|
||||||
let is_stdin = lint_flags.is_stdin();
|
lint_rule_provider,
|
||||||
let deno_lint_config = cli_options.resolve_deno_lint_config()?;
|
factory.module_graph_creator().await?.clone(),
|
||||||
let workspace_lint_options =
|
cli_options.start_dir.clone(),
|
||||||
cli_options.resolve_workspace_lint_options(&lint_flags)?;
|
&workspace_lint_options,
|
||||||
let success = if is_stdin {
|
);
|
||||||
let start_dir = &cli_options.start_dir;
|
let paths_with_options_batches =
|
||||||
let reporter_lock = Arc::new(Mutex::new(create_reporter(
|
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
|
||||||
workspace_lint_options.reporter_kind,
|
for paths_with_options in paths_with_options_batches {
|
||||||
)));
|
linter
|
||||||
let lint_config = start_dir
|
.lint_files(
|
||||||
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
|
cli_options,
|
||||||
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
|
paths_with_options.options,
|
||||||
let lint_rules = factory
|
deno_lint_config.clone(),
|
||||||
.lint_rule_provider()
|
paths_with_options.dir,
|
||||||
.await?
|
paths_with_options.paths,
|
||||||
.resolve_lint_rules_err_empty(
|
)
|
||||||
lint_options.rules,
|
.await?;
|
||||||
start_dir.maybe_deno_json().map(|c| c.as_ref()),
|
|
||||||
)?;
|
|
||||||
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
|
|
||||||
if let Some(ext) = cli_options.ext_flag() {
|
|
||||||
file_path.set_extension(ext);
|
|
||||||
}
|
|
||||||
let r = lint_stdin(&file_path, lint_rules, deno_lint_config);
|
|
||||||
let success = handle_lint_result(
|
|
||||||
&file_path.to_string_lossy(),
|
|
||||||
r,
|
|
||||||
reporter_lock.clone(),
|
|
||||||
);
|
|
||||||
reporter_lock.lock().close(1);
|
|
||||||
success
|
|
||||||
} else {
|
|
||||||
let mut linter = WorkspaceLinter::new(
|
|
||||||
factory.caches()?.clone(),
|
|
||||||
factory.lint_rule_provider().await?,
|
|
||||||
factory.module_graph_creator().await?.clone(),
|
|
||||||
cli_options.start_dir.clone(),
|
|
||||||
&workspace_lint_options,
|
|
||||||
);
|
|
||||||
let paths_with_options_batches =
|
|
||||||
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
|
|
||||||
for paths_with_options in paths_with_options_batches {
|
|
||||||
linter
|
|
||||||
.lint_files(
|
|
||||||
cli_options,
|
|
||||||
paths_with_options.options,
|
|
||||||
deno_lint_config.clone(),
|
|
||||||
paths_with_options.dir,
|
|
||||||
paths_with_options.paths,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
linter.finish()
|
|
||||||
};
|
|
||||||
if !success {
|
|
||||||
deno_runtime::exit(1);
|
|
||||||
}
|
}
|
||||||
|
linter.finish()
|
||||||
|
};
|
||||||
|
if !success {
|
||||||
|
deno_runtime::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn lint_with_watch_inner(
|
||||||
|
flags: Arc<Flags>,
|
||||||
|
lint_flags: LintFlags,
|
||||||
|
watcher_communicator: Arc<WatcherCommunicator>,
|
||||||
|
changed_paths: Option<Vec<PathBuf>>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let factory = CliFactory::from_flags(flags);
|
||||||
|
let cli_options = factory.cli_options()?;
|
||||||
|
let lint_config = cli_options.resolve_deno_lint_config()?;
|
||||||
|
let mut paths_with_options_batches =
|
||||||
|
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
|
||||||
|
for paths_with_options in &mut paths_with_options_batches {
|
||||||
|
_ = watcher_communicator.watch_paths(paths_with_options.paths.clone());
|
||||||
|
|
||||||
|
let files = std::mem::take(&mut paths_with_options.paths);
|
||||||
|
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
||||||
|
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||||
|
files
|
||||||
|
.iter()
|
||||||
|
.any(|path| {
|
||||||
|
canonicalize_path(path)
|
||||||
|
.map(|p| paths.contains(&p))
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
.then_some(files)
|
||||||
|
.unwrap_or_else(|| [].to_vec())
|
||||||
|
} else {
|
||||||
|
files
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut linter = WorkspaceLinter::new(
|
||||||
|
factory.caches()?.clone(),
|
||||||
|
factory.lint_rule_provider().await?,
|
||||||
|
factory.module_graph_creator().await?.clone(),
|
||||||
|
cli_options.start_dir.clone(),
|
||||||
|
&cli_options.resolve_workspace_lint_options(&lint_flags)?,
|
||||||
|
);
|
||||||
|
for paths_with_options in paths_with_options_batches {
|
||||||
|
linter
|
||||||
|
.lint_files(
|
||||||
|
cli_options,
|
||||||
|
paths_with_options.options,
|
||||||
|
lint_config.clone(),
|
||||||
|
paths_with_options.dir,
|
||||||
|
paths_with_options.paths,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
linter.finish();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn lint_with_watch(
|
||||||
|
flags: Arc<Flags>,
|
||||||
|
lint_flags: LintFlags,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let watch_flags = lint_flags.watch.as_ref().unwrap();
|
||||||
|
|
||||||
|
file_watcher::watch_func(
|
||||||
|
flags,
|
||||||
|
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
|
||||||
|
move |flags, watcher_communicator, changed_paths| {
|
||||||
|
let lint_flags = lint_flags.clone();
|
||||||
|
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||||
|
Ok(lint_with_watch_inner(
|
||||||
|
flags,
|
||||||
|
lint_flags,
|
||||||
|
watcher_communicator,
|
||||||
|
changed_paths,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
struct PathsWithOptions {
|
struct PathsWithOptions {
|
||||||
dir: WorkspaceDirectory,
|
dir: WorkspaceDirectory,
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
|
@ -269,7 +275,7 @@ impl WorkspaceLinter {
|
||||||
&mut self,
|
&mut self,
|
||||||
cli_options: &Arc<CliOptions>,
|
cli_options: &Arc<CliOptions>,
|
||||||
lint_options: LintOptions,
|
lint_options: LintOptions,
|
||||||
lint_config: LintConfig,
|
lint_config: DenoLintConfig,
|
||||||
member_dir: WorkspaceDirectory,
|
member_dir: WorkspaceDirectory,
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
|
@ -294,112 +300,63 @@ impl WorkspaceLinter {
|
||||||
deno_lint_config: lint_config,
|
deno_lint_config: lint_config,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
let has_error = self.has_error.clone();
|
||||||
|
let reporter_lock = self.reporter_lock.clone();
|
||||||
|
|
||||||
let mut futures = Vec::with_capacity(2);
|
let mut futures = Vec::with_capacity(2);
|
||||||
if linter.has_package_rules() {
|
if linter.has_package_rules() {
|
||||||
if self.workspace_module_graph.is_none() {
|
if let Some(fut) = self.run_package_rules(&linter, &member_dir, &paths) {
|
||||||
let module_graph_creator = self.module_graph_creator.clone();
|
futures.push(fut);
|
||||||
let packages = self.workspace_dir.jsr_packages_for_publish();
|
|
||||||
self.workspace_module_graph = Some(
|
|
||||||
async move {
|
|
||||||
module_graph_creator
|
|
||||||
.create_and_validate_publish_graph(&packages, true)
|
|
||||||
.await
|
|
||||||
.map(Rc::new)
|
|
||||||
.map_err(Rc::new)
|
|
||||||
}
|
|
||||||
.boxed_local()
|
|
||||||
.shared_local(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let workspace_module_graph_future =
|
|
||||||
self.workspace_module_graph.as_ref().unwrap().clone();
|
|
||||||
let publish_config = member_dir.maybe_package_config();
|
|
||||||
if let Some(publish_config) = publish_config {
|
|
||||||
let has_error = self.has_error.clone();
|
|
||||||
let reporter_lock = self.reporter_lock.clone();
|
|
||||||
let linter = linter.clone();
|
|
||||||
let path_urls = paths
|
|
||||||
.iter()
|
|
||||||
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
|
|
||||||
.collect::<HashSet<_>>();
|
|
||||||
futures.push(
|
|
||||||
async move {
|
|
||||||
let graph = workspace_module_graph_future
|
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("{:#}", err))?;
|
|
||||||
let export_urls =
|
|
||||||
publish_config.config_file.resolve_export_value_urls()?;
|
|
||||||
if !export_urls.iter().any(|url| path_urls.contains(url)) {
|
|
||||||
return Ok(()); // entrypoint is not specified, so skip
|
|
||||||
}
|
|
||||||
let diagnostics = linter.lint_package(&graph, &export_urls);
|
|
||||||
if !diagnostics.is_empty() {
|
|
||||||
has_error.raise();
|
|
||||||
let mut reporter = reporter_lock.lock();
|
|
||||||
for diagnostic in &diagnostics {
|
|
||||||
reporter.visit_diagnostic(diagnostic);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
.boxed_local(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
futures.push({
|
let maybe_incremental_cache_ = maybe_incremental_cache.clone();
|
||||||
let has_error = self.has_error.clone();
|
let linter = linter.clone();
|
||||||
let reporter_lock = self.reporter_lock.clone();
|
let cli_options = cli_options.clone();
|
||||||
let maybe_incremental_cache = maybe_incremental_cache.clone();
|
let fut = async move {
|
||||||
let linter = linter.clone();
|
let operation = move |file_path: PathBuf| {
|
||||||
let cli_options = cli_options.clone();
|
let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
|
||||||
async move {
|
|
||||||
run_parallelized(paths, {
|
|
||||||
move |file_path| {
|
|
||||||
let file_text =
|
|
||||||
deno_ast::strip_bom(fs::read_to_string(&file_path)?);
|
|
||||||
|
|
||||||
// don't bother rechecking this file if it didn't have any diagnostics before
|
// don't bother rechecking this file if it didn't have any diagnostics before
|
||||||
if let Some(incremental_cache) = &maybe_incremental_cache {
|
if let Some(incremental_cache) = &maybe_incremental_cache_ {
|
||||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let r = linter.lint_file(
|
|
||||||
&file_path,
|
|
||||||
file_text,
|
|
||||||
cli_options.ext_flag().as_deref(),
|
|
||||||
);
|
|
||||||
if let Ok((file_source, file_diagnostics)) = &r {
|
|
||||||
if let Some(incremental_cache) = &maybe_incremental_cache {
|
|
||||||
if file_diagnostics.is_empty() {
|
|
||||||
// update the incremental cache if there were no diagnostics
|
|
||||||
incremental_cache.update_file(
|
|
||||||
&file_path,
|
|
||||||
// ensure the returned text is used here as it may have been modified via --fix
|
|
||||||
file_source.text(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let success = handle_lint_result(
|
|
||||||
&file_path.to_string_lossy(),
|
|
||||||
r,
|
|
||||||
reporter_lock.clone(),
|
|
||||||
);
|
|
||||||
if !success {
|
|
||||||
has_error.raise();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
.await
|
|
||||||
}
|
let r = linter.lint_file(
|
||||||
.boxed_local()
|
&file_path,
|
||||||
});
|
file_text,
|
||||||
|
cli_options.ext_flag().as_deref(),
|
||||||
|
);
|
||||||
|
if let Ok((file_source, file_diagnostics)) = &r {
|
||||||
|
if let Some(incremental_cache) = &maybe_incremental_cache_ {
|
||||||
|
if file_diagnostics.is_empty() {
|
||||||
|
// update the incremental cache if there were no diagnostics
|
||||||
|
incremental_cache.update_file(
|
||||||
|
&file_path,
|
||||||
|
// ensure the returned text is used here as it may have been modified via --fix
|
||||||
|
file_source.text(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let success = handle_lint_result(
|
||||||
|
&file_path.to_string_lossy(),
|
||||||
|
r,
|
||||||
|
reporter_lock.clone(),
|
||||||
|
);
|
||||||
|
if !success {
|
||||||
|
has_error.raise();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
run_parallelized(paths, operation).await
|
||||||
|
}
|
||||||
|
.boxed_local();
|
||||||
|
futures.push(fut);
|
||||||
|
|
||||||
if lint_options.fix {
|
if lint_options.fix {
|
||||||
// run sequentially when using `--fix` to lower the chances of weird
|
// run sequentially when using `--fix` to lower the chances of weird
|
||||||
|
@ -419,6 +376,63 @@ impl WorkspaceLinter {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn run_package_rules(
|
||||||
|
&mut self,
|
||||||
|
linter: &Arc<CliLinter>,
|
||||||
|
member_dir: &WorkspaceDirectory,
|
||||||
|
paths: &[PathBuf],
|
||||||
|
) -> Option<LocalBoxFuture<Result<(), AnyError>>> {
|
||||||
|
if self.workspace_module_graph.is_none() {
|
||||||
|
let module_graph_creator = self.module_graph_creator.clone();
|
||||||
|
let packages = self.workspace_dir.jsr_packages_for_publish();
|
||||||
|
self.workspace_module_graph = Some(
|
||||||
|
async move {
|
||||||
|
module_graph_creator
|
||||||
|
.create_and_validate_publish_graph(&packages, true)
|
||||||
|
.await
|
||||||
|
.map(Rc::new)
|
||||||
|
.map_err(Rc::new)
|
||||||
|
}
|
||||||
|
.boxed_local()
|
||||||
|
.shared_local(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_module_graph_future =
|
||||||
|
self.workspace_module_graph.as_ref().unwrap().clone();
|
||||||
|
let maybe_publish_config = member_dir.maybe_package_config();
|
||||||
|
let publish_config = maybe_publish_config?;
|
||||||
|
|
||||||
|
let has_error = self.has_error.clone();
|
||||||
|
let reporter_lock = self.reporter_lock.clone();
|
||||||
|
let linter = linter.clone();
|
||||||
|
let path_urls = paths
|
||||||
|
.iter()
|
||||||
|
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
let fut = async move {
|
||||||
|
let graph = workspace_module_graph_future
|
||||||
|
.await
|
||||||
|
.map_err(|err| anyhow!("{:#}", err))?;
|
||||||
|
let export_urls =
|
||||||
|
publish_config.config_file.resolve_export_value_urls()?;
|
||||||
|
if !export_urls.iter().any(|url| path_urls.contains(url)) {
|
||||||
|
return Ok(()); // entrypoint is not specified, so skip
|
||||||
|
}
|
||||||
|
let diagnostics = linter.lint_package(&graph, &export_urls);
|
||||||
|
if !diagnostics.is_empty() {
|
||||||
|
has_error.raise();
|
||||||
|
let mut reporter = reporter_lock.lock();
|
||||||
|
for diagnostic in &diagnostics {
|
||||||
|
reporter.visit_diagnostic(diagnostic);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
.boxed_local();
|
||||||
|
Some(fut)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn finish(self) -> bool {
|
pub fn finish(self) -> bool {
|
||||||
debug!("Found {} files", self.file_count);
|
debug!("Found {} files", self.file_count);
|
||||||
self.reporter_lock.lock().close(self.file_count);
|
self.reporter_lock.lock().close(self.file_count);
|
||||||
|
@ -494,10 +508,27 @@ pub fn print_rules_list(json: bool, maybe_rules_tags: Option<Vec<String>>) {
|
||||||
/// Treats input as TypeScript.
|
/// Treats input as TypeScript.
|
||||||
/// Compatible with `--json` flag.
|
/// Compatible with `--json` flag.
|
||||||
fn lint_stdin(
|
fn lint_stdin(
|
||||||
file_path: &Path,
|
cli_options: &Arc<CliOptions>,
|
||||||
configured_rules: ConfiguredRules,
|
lint_rule_provider: LintRuleProvider,
|
||||||
deno_lint_config: LintConfig,
|
workspace_lint_options: WorkspaceLintOptions,
|
||||||
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
|
lint_flags: LintFlags,
|
||||||
|
deno_lint_config: DenoLintConfig,
|
||||||
|
) -> Result<bool, AnyError> {
|
||||||
|
let start_dir = &cli_options.start_dir;
|
||||||
|
let reporter_lock = Arc::new(Mutex::new(create_reporter(
|
||||||
|
workspace_lint_options.reporter_kind,
|
||||||
|
)));
|
||||||
|
let lint_config = start_dir
|
||||||
|
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
|
||||||
|
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
|
||||||
|
let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty(
|
||||||
|
lint_options.rules,
|
||||||
|
start_dir.maybe_deno_json().map(|c| c.as_ref()),
|
||||||
|
)?;
|
||||||
|
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
|
||||||
|
if let Some(ext) = cli_options.ext_flag() {
|
||||||
|
file_path.set_extension(ext);
|
||||||
|
}
|
||||||
let mut source_code = String::new();
|
let mut source_code = String::new();
|
||||||
if stdin().read_to_string(&mut source_code).is_err() {
|
if stdin().read_to_string(&mut source_code).is_err() {
|
||||||
return Err(generic_error("Failed to read from stdin"));
|
return Err(generic_error("Failed to read from stdin"));
|
||||||
|
@ -509,9 +540,14 @@ fn lint_stdin(
|
||||||
deno_lint_config,
|
deno_lint_config,
|
||||||
});
|
});
|
||||||
|
|
||||||
linter
|
let r = linter
|
||||||
.lint_file(file_path, deno_ast::strip_bom(source_code), None)
|
.lint_file(&file_path, deno_ast::strip_bom(source_code), None)
|
||||||
.map_err(AnyError::from)
|
.map_err(AnyError::from);
|
||||||
|
|
||||||
|
let success =
|
||||||
|
handle_lint_result(&file_path.to_string_lossy(), r, reporter_lock.clone());
|
||||||
|
reporter_lock.lock().close(1);
|
||||||
|
Ok(success)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_lint_result(
|
fn handle_lint_result(
|
||||||
|
@ -556,3 +592,68 @@ struct LintError {
|
||||||
file_path: String,
|
file_path: String,
|
||||||
message: String,
|
message: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use test_util as util;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct RulesSchema {
|
||||||
|
#[serde(rename = "$schema")]
|
||||||
|
schema: String,
|
||||||
|
|
||||||
|
#[serde(rename = "enum")]
|
||||||
|
rules: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_all_rules() -> Vec<String> {
|
||||||
|
let rule_provider = LintRuleProvider::new(None, None);
|
||||||
|
let configured_rules =
|
||||||
|
rule_provider.resolve_lint_rules(Default::default(), None);
|
||||||
|
let mut all_rules = configured_rules
|
||||||
|
.all_rule_codes
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
all_rules.sort();
|
||||||
|
|
||||||
|
all_rules
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): do the same for tags, once https://github.com/denoland/deno/pull/27162 lands
|
||||||
|
#[test]
|
||||||
|
fn all_lint_rules_are_listed_in_schema_file() {
|
||||||
|
let all_rules = get_all_rules();
|
||||||
|
|
||||||
|
let rules_schema_path =
|
||||||
|
util::root_path().join("cli/schemas/lint-rules.v1.json");
|
||||||
|
let rules_schema_file =
|
||||||
|
std::fs::read_to_string(&rules_schema_path).unwrap();
|
||||||
|
|
||||||
|
let schema: RulesSchema = serde_json::from_str(&rules_schema_file).unwrap();
|
||||||
|
|
||||||
|
const UPDATE_ENV_VAR_NAME: &str = "UPDATE_EXPECTED";
|
||||||
|
|
||||||
|
if std::env::var(UPDATE_ENV_VAR_NAME).ok().is_none() {
|
||||||
|
assert_eq!(
|
||||||
|
schema.rules, all_rules,
|
||||||
|
"Lint rules schema file not up to date. Run again with {}=1 to update the expected output",
|
||||||
|
UPDATE_ENV_VAR_NAME
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fs::write(
|
||||||
|
&rules_schema_path,
|
||||||
|
serde_json::to_string_pretty(&RulesSchema {
|
||||||
|
schema: schema.schema,
|
||||||
|
rules: all_rules,
|
||||||
|
})
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::sync::Arc;
|
||||||
use deno_ast::SourceRange;
|
use deno_ast::SourceRange;
|
||||||
use deno_config::workspace::WorkspaceResolver;
|
use deno_config::workspace::WorkspaceResolver;
|
||||||
use deno_core::anyhow::anyhow;
|
use deno_core::anyhow::anyhow;
|
||||||
use deno_graph::source::ResolutionMode;
|
use deno_graph::source::ResolutionKind;
|
||||||
use deno_graph::source::ResolveError;
|
use deno_graph::source::ResolveError;
|
||||||
use deno_graph::Range;
|
use deno_graph::Range;
|
||||||
use deno_lint::diagnostic::LintDiagnosticDetails;
|
use deno_lint::diagnostic::LintDiagnosticDetails;
|
||||||
|
@ -17,7 +17,7 @@ use deno_lint::diagnostic::LintFix;
|
||||||
use deno_lint::diagnostic::LintFixChange;
|
use deno_lint::diagnostic::LintFixChange;
|
||||||
use deno_lint::rules::LintRule;
|
use deno_lint::rules::LintRule;
|
||||||
use deno_resolver::sloppy_imports::SloppyImportsResolution;
|
use deno_resolver::sloppy_imports::SloppyImportsResolution;
|
||||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
|
||||||
use text_lines::LineAndColumnIndex;
|
use text_lines::LineAndColumnIndex;
|
||||||
|
|
||||||
use crate::graph_util::CliJsrUrlProvider;
|
use crate::graph_util::CliJsrUrlProvider;
|
||||||
|
@ -101,16 +101,16 @@ impl LintRule for NoSloppyImportsRule {
|
||||||
maybe_npm_resolver: None,
|
maybe_npm_resolver: None,
|
||||||
});
|
});
|
||||||
|
|
||||||
for (range, sloppy_import) in resolver.captures.borrow_mut().drain() {
|
for (referrer, sloppy_import) in resolver.captures.borrow_mut().drain() {
|
||||||
let start_range =
|
let start_range =
|
||||||
context.text_info().loc_to_source_pos(LineAndColumnIndex {
|
context.text_info().loc_to_source_pos(LineAndColumnIndex {
|
||||||
line_index: range.start.line,
|
line_index: referrer.range.start.line,
|
||||||
column_index: range.start.character,
|
column_index: referrer.range.start.character,
|
||||||
});
|
});
|
||||||
let end_range =
|
let end_range =
|
||||||
context.text_info().loc_to_source_pos(LineAndColumnIndex {
|
context.text_info().loc_to_source_pos(LineAndColumnIndex {
|
||||||
line_index: range.end.line,
|
line_index: referrer.range.end.line,
|
||||||
column_index: range.end.character,
|
column_index: referrer.range.end.character,
|
||||||
});
|
});
|
||||||
let source_range = SourceRange::new(start_range, end_range);
|
let source_range = SourceRange::new(start_range, end_range);
|
||||||
context.add_diagnostic_details(
|
context.add_diagnostic_details(
|
||||||
|
@ -183,7 +183,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
|
||||||
&self,
|
&self,
|
||||||
specifier_text: &str,
|
specifier_text: &str,
|
||||||
referrer_range: &Range,
|
referrer_range: &Range,
|
||||||
mode: ResolutionMode,
|
resolution_kind: ResolutionKind,
|
||||||
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
|
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
|
||||||
let resolution = self
|
let resolution = self
|
||||||
.workspace_resolver
|
.workspace_resolver
|
||||||
|
@ -198,9 +198,9 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
|
||||||
specifier, ..
|
specifier, ..
|
||||||
} => match self.sloppy_imports_resolver.resolve(
|
} => match self.sloppy_imports_resolver.resolve(
|
||||||
&specifier,
|
&specifier,
|
||||||
match mode {
|
match resolution_kind {
|
||||||
ResolutionMode::Execution => SloppyImportsResolutionMode::Execution,
|
ResolutionKind::Execution => SloppyImportsResolutionKind::Execution,
|
||||||
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
|
ResolutionKind::Types => SloppyImportsResolutionKind::Types,
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
Some(res) => {
|
Some(res) => {
|
||||||
|
|
|
@ -169,7 +169,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
..
|
..
|
||||||
}) => DiagnosticLevel::Warning,
|
}) => DiagnosticLevel::Warning,
|
||||||
FastCheck(_) => DiagnosticLevel::Error,
|
FastCheck(_) => DiagnosticLevel::Error,
|
||||||
SpecifierUnfurl(_) => DiagnosticLevel::Warning,
|
SpecifierUnfurl(d) => d.level(),
|
||||||
InvalidPath { .. } => DiagnosticLevel::Error,
|
InvalidPath { .. } => DiagnosticLevel::Error,
|
||||||
DuplicatePath { .. } => DiagnosticLevel::Error,
|
DuplicatePath { .. } => DiagnosticLevel::Error,
|
||||||
UnsupportedFileType { .. } => DiagnosticLevel::Warning,
|
UnsupportedFileType { .. } => DiagnosticLevel::Warning,
|
||||||
|
@ -187,7 +187,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => diagnostic.code(),
|
FastCheck(diagnostic) => diagnostic.code(),
|
||||||
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.code()),
|
SpecifierUnfurl(diagnostic) => diagnostic.code(),
|
||||||
InvalidPath { .. } => Cow::Borrowed("invalid-path"),
|
InvalidPath { .. } => Cow::Borrowed("invalid-path"),
|
||||||
DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"),
|
DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"),
|
||||||
UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"),
|
UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"),
|
||||||
|
@ -207,7 +207,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => diagnostic.message(),
|
FastCheck(diagnostic) => diagnostic.message(),
|
||||||
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.message()),
|
SpecifierUnfurl(diagnostic) => diagnostic.message(),
|
||||||
InvalidPath { message, .. } => Cow::Borrowed(message.as_str()),
|
InvalidPath { message, .. } => Cow::Borrowed(message.as_str()),
|
||||||
DuplicatePath { .. } => {
|
DuplicatePath { .. } => {
|
||||||
Cow::Borrowed("package path is a case insensitive duplicate of another path in the package")
|
Cow::Borrowed("package path is a case insensitive duplicate of another path in the package")
|
||||||
|
@ -234,8 +234,8 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
specifier: Cow::Borrowed(&referrer.specifier),
|
specifier: Cow::Borrowed(&referrer.specifier),
|
||||||
text_info: Cow::Borrowed(text_info),
|
text_info: Cow::Borrowed(text_info),
|
||||||
source_pos: DiagnosticSourcePos::LineAndCol {
|
source_pos: DiagnosticSourcePos::LineAndCol {
|
||||||
line: referrer.start.line,
|
line: referrer.range.start.line,
|
||||||
column: referrer.start.character,
|
column: referrer.range.start.character,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -243,17 +243,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => diagnostic.location(),
|
FastCheck(diagnostic) => diagnostic.location(),
|
||||||
SpecifierUnfurl(diagnostic) => match diagnostic {
|
SpecifierUnfurl(diagnostic) => diagnostic.location(),
|
||||||
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
|
|
||||||
specifier,
|
|
||||||
text_info,
|
|
||||||
range,
|
|
||||||
} => DiagnosticLocation::ModulePosition {
|
|
||||||
specifier: Cow::Borrowed(specifier),
|
|
||||||
text_info: Cow::Borrowed(text_info),
|
|
||||||
source_pos: DiagnosticSourcePos::SourcePos(range.start),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
InvalidPath { path, .. } => {
|
InvalidPath { path, .. } => {
|
||||||
DiagnosticLocation::Path { path: path.clone() }
|
DiagnosticLocation::Path { path: path.clone() }
|
||||||
}
|
}
|
||||||
|
@ -300,7 +290,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
text_info: &'a SourceTextInfo,
|
text_info: &'a SourceTextInfo,
|
||||||
referrer: &'a deno_graph::Range,
|
referrer: &'a deno_graph::Range,
|
||||||
) -> Option<DiagnosticSnippet<'a>> {
|
) -> Option<DiagnosticSnippet<'a>> {
|
||||||
if referrer.start.line == 0 && referrer.start.character == 0 {
|
if referrer.range.start.line == 0 && referrer.range.start.character == 0 {
|
||||||
return None; // no range, probably a jsxImportSource import
|
return None; // no range, probably a jsxImportSource import
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,12 +300,12 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
style: DiagnosticSnippetHighlightStyle::Error,
|
style: DiagnosticSnippetHighlightStyle::Error,
|
||||||
range: DiagnosticSourceRange {
|
range: DiagnosticSourceRange {
|
||||||
start: DiagnosticSourcePos::LineAndCol {
|
start: DiagnosticSourcePos::LineAndCol {
|
||||||
line: referrer.start.line,
|
line: referrer.range.start.line,
|
||||||
column: referrer.start.character,
|
column: referrer.range.start.character,
|
||||||
},
|
},
|
||||||
end: DiagnosticSourcePos::LineAndCol {
|
end: DiagnosticSourcePos::LineAndCol {
|
||||||
line: referrer.end.line,
|
line: referrer.range.end.line,
|
||||||
column: referrer.end.character,
|
column: referrer.range.end.character,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
description: Some("the specifier".into()),
|
description: Some("the specifier".into()),
|
||||||
|
@ -325,24 +315,8 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
|
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => diagnostic.snippet(),
|
FastCheck(d) => d.snippet(),
|
||||||
SpecifierUnfurl(diagnostic) => match diagnostic {
|
SpecifierUnfurl(d) => d.snippet(),
|
||||||
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
|
|
||||||
text_info,
|
|
||||||
range,
|
|
||||||
..
|
|
||||||
} => Some(DiagnosticSnippet {
|
|
||||||
source: Cow::Borrowed(text_info),
|
|
||||||
highlights: vec![DiagnosticSnippetHighlight {
|
|
||||||
style: DiagnosticSnippetHighlightStyle::Warning,
|
|
||||||
range: DiagnosticSourceRange {
|
|
||||||
start: DiagnosticSourcePos::SourcePos(range.start),
|
|
||||||
end: DiagnosticSourcePos::SourcePos(range.end),
|
|
||||||
},
|
|
||||||
description: Some("the unanalyzable dynamic import".into()),
|
|
||||||
}],
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
InvalidPath { .. } => None,
|
InvalidPath { .. } => None,
|
||||||
DuplicatePath { .. } => None,
|
DuplicatePath { .. } => None,
|
||||||
UnsupportedFileType { .. } => None,
|
UnsupportedFileType { .. } => None,
|
||||||
|
@ -380,7 +354,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => diagnostic.hint(),
|
FastCheck(diagnostic) => diagnostic.hint(),
|
||||||
SpecifierUnfurl(_) => None,
|
SpecifierUnfurl(d) => d.hint(),
|
||||||
InvalidPath { .. } => Some(
|
InvalidPath { .. } => Some(
|
||||||
Cow::Borrowed("rename or remove the file, or add it to 'publish.exclude' in the config file"),
|
Cow::Borrowed("rename or remove the file, or add it to 'publish.exclude' in the config file"),
|
||||||
),
|
),
|
||||||
|
@ -436,9 +410,9 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
None => None,
|
None => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SyntaxError(diagnostic) => diagnostic.snippet_fixed(),
|
SyntaxError(d) => d.snippet_fixed(),
|
||||||
|
SpecifierUnfurl(d) => d.snippet_fixed(),
|
||||||
FastCheck(_)
|
FastCheck(_)
|
||||||
| SpecifierUnfurl(_)
|
|
||||||
| InvalidPath { .. }
|
| InvalidPath { .. }
|
||||||
| DuplicatePath { .. }
|
| DuplicatePath { .. }
|
||||||
| UnsupportedFileType { .. }
|
| UnsupportedFileType { .. }
|
||||||
|
@ -453,16 +427,8 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
|
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => {
|
FastCheck(d) => d.info(),
|
||||||
diagnostic.info()
|
SpecifierUnfurl(d) => d.info(),
|
||||||
}
|
|
||||||
SpecifierUnfurl(diagnostic) => match diagnostic {
|
|
||||||
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
|
|
||||||
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
|
|
||||||
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
|
|
||||||
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
|
|
||||||
]),
|
|
||||||
},
|
|
||||||
InvalidPath { .. } => Cow::Borrowed(&[
|
InvalidPath { .. } => Cow::Borrowed(&[
|
||||||
Cow::Borrowed("to portably support all platforms, including windows, the allowed characters in package paths are limited"),
|
Cow::Borrowed("to portably support all platforms, including windows, the allowed characters in package paths are limited"),
|
||||||
]),
|
]),
|
||||||
|
@ -476,7 +442,7 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
InvalidExternalImport { imported, .. } => Cow::Owned(vec![
|
InvalidExternalImport { imported, .. } => Cow::Owned(vec![
|
||||||
Cow::Owned(format!("the import was resolved to '{}'", imported)),
|
Cow::Owned(format!("the import was resolved to '{}'", imported)),
|
||||||
Cow::Borrowed("this specifier is not allowed to be imported on jsr"),
|
Cow::Borrowed("this specifier is not allowed to be imported on jsr"),
|
||||||
Cow::Borrowed("jsr only supports importing `jsr:`, `npm:`, and `data:` specifiers"),
|
Cow::Borrowed("jsr only supports importing `jsr:`, `npm:`, `data:`, `bun:`, and `node:` specifiers"),
|
||||||
]),
|
]),
|
||||||
UnsupportedJsxTsx { .. } => Cow::Owned(vec![
|
UnsupportedJsxTsx { .. } => Cow::Owned(vec![
|
||||||
Cow::Borrowed("follow https://github.com/jsr-io/jsr/issues/24 for updates"),
|
Cow::Borrowed("follow https://github.com/jsr-io/jsr/issues/24 for updates"),
|
||||||
|
@ -503,10 +469,8 @@ impl Diagnostic for PublishDiagnostic {
|
||||||
fn docs_url(&self) -> Option<Cow<'_, str>> {
|
fn docs_url(&self) -> Option<Cow<'_, str>> {
|
||||||
use PublishDiagnostic::*;
|
use PublishDiagnostic::*;
|
||||||
match &self {
|
match &self {
|
||||||
FastCheck(diagnostic) => diagnostic.docs_url(),
|
FastCheck(d) => d.docs_url(),
|
||||||
SpecifierUnfurl(diagnostic) => match diagnostic {
|
SpecifierUnfurl(d) => d.docs_url(),
|
||||||
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => None,
|
|
||||||
},
|
|
||||||
InvalidPath { .. } => {
|
InvalidPath { .. } => {
|
||||||
Some(Cow::Borrowed("https://jsr.io/go/invalid-path"))
|
Some(Cow::Borrowed("https://jsr.io/go/invalid-path"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ impl GraphDiagnosticsCollector {
|
||||||
resolution: &ResolutionResolved| {
|
resolution: &ResolutionResolved| {
|
||||||
if visited.insert(resolution.specifier.clone()) {
|
if visited.insert(resolution.specifier.clone()) {
|
||||||
match resolution.specifier.scheme() {
|
match resolution.specifier.scheme() {
|
||||||
"file" | "data" | "node" => {}
|
"file" | "data" | "node" | "bun" => {}
|
||||||
"jsr" => {
|
"jsr" => {
|
||||||
skip_specifiers.insert(resolution.specifier.clone());
|
skip_specifiers.insert(resolution.specifier.clone());
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@ use base64::Engine;
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_config::deno_json::ConfigFile;
|
use deno_config::deno_json::ConfigFile;
|
||||||
use deno_config::workspace::JsrPackageConfig;
|
use deno_config::workspace::JsrPackageConfig;
|
||||||
use deno_config::workspace::PackageJsonDepResolution;
|
|
||||||
use deno_config::workspace::Workspace;
|
use deno_config::workspace::Workspace;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
|
@ -27,6 +26,7 @@ use deno_core::serde_json;
|
||||||
use deno_core::serde_json::json;
|
use deno_core::serde_json::json;
|
||||||
use deno_core::serde_json::Value;
|
use deno_core::serde_json::Value;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
|
use deno_runtime::deno_fetch;
|
||||||
use deno_terminal::colors;
|
use deno_terminal::colors;
|
||||||
use http_body_util::BodyExt;
|
use http_body_util::BodyExt;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
@ -44,8 +44,6 @@ use crate::cache::ParsedSourceCache;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::graph_util::ModuleGraphCreator;
|
use crate::graph_util::ModuleGraphCreator;
|
||||||
use crate::http_util::HttpClient;
|
use crate::http_util::HttpClient;
|
||||||
use crate::resolver::CliSloppyImportsResolver;
|
|
||||||
use crate::resolver::SloppyImportsCachedFs;
|
|
||||||
use crate::tools::check::CheckOptions;
|
use crate::tools::check::CheckOptions;
|
||||||
use crate::tools::lint::collect_no_slow_type_diagnostics;
|
use crate::tools::lint::collect_no_slow_type_diagnostics;
|
||||||
use crate::tools::registry::diagnostics::PublishDiagnostic;
|
use crate::tools::registry::diagnostics::PublishDiagnostic;
|
||||||
|
@ -97,11 +95,10 @@ pub async fn publish(
|
||||||
match cli_options.start_dir.maybe_deno_json() {
|
match cli_options.start_dir.maybe_deno_json() {
|
||||||
Some(deno_json) => {
|
Some(deno_json) => {
|
||||||
debug_assert!(!deno_json.is_package());
|
debug_assert!(!deno_json.is_package());
|
||||||
|
if deno_json.json.name.is_none() {
|
||||||
|
bail!("Missing 'name' field in '{}'.", deno_json.specifier);
|
||||||
|
}
|
||||||
error_missing_exports_field(deno_json)?;
|
error_missing_exports_field(deno_json)?;
|
||||||
bail!(
|
|
||||||
"Missing 'name' or 'exports' field in '{}'.",
|
|
||||||
deno_json.specifier
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
bail!(
|
bail!(
|
||||||
|
@ -124,19 +121,8 @@ pub async fn publish(
|
||||||
}
|
}
|
||||||
|
|
||||||
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
|
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
|
||||||
if cli_options.unstable_sloppy_imports() {
|
cli_factory.sloppy_imports_resolver()?.cloned(),
|
||||||
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
|
cli_factory.workspace_resolver().await?.clone(),
|
||||||
cli_factory.fs().clone(),
|
|
||||||
)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
cli_options
|
|
||||||
.create_workspace_resolver(
|
|
||||||
cli_factory.file_fetcher()?,
|
|
||||||
PackageJsonDepResolution::Enabled,
|
|
||||||
)
|
|
||||||
.await?,
|
|
||||||
cli_options.unstable_bare_node_builtins(),
|
cli_options.unstable_bare_node_builtins(),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -926,9 +912,7 @@ async fn publish_package(
|
||||||
package.config
|
package.config
|
||||||
);
|
);
|
||||||
|
|
||||||
let body = http_body_util::Full::new(package.tarball.bytes.clone())
|
let body = deno_fetch::ReqBody::full(package.tarball.bytes.clone());
|
||||||
.map_err(|never| match never {})
|
|
||||||
.boxed();
|
|
||||||
let response = http_client
|
let response = http_client
|
||||||
.post(url.parse()?, body)?
|
.post(url.parse()?, body)?
|
||||||
.header(
|
.header(
|
||||||
|
|
|
@ -4,6 +4,7 @@ use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
|
@ -14,6 +15,7 @@ use deno_semver::jsr::JsrPackageReqReference;
|
||||||
use deno_semver::npm::NpmPackageReqReference;
|
use deno_semver::npm::NpmPackageReqReference;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
|
use deno_semver::StackString;
|
||||||
use deno_semver::Version;
|
use deno_semver::Version;
|
||||||
use deno_semver::VersionReq;
|
use deno_semver::VersionReq;
|
||||||
use deps::KeyPath;
|
use deps::KeyPath;
|
||||||
|
@ -23,12 +25,11 @@ use jsonc_parser::cst::CstRootNode;
|
||||||
use jsonc_parser::json;
|
use jsonc_parser::json;
|
||||||
|
|
||||||
use crate::args::AddFlags;
|
use crate::args::AddFlags;
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::args::CliOptions;
|
use crate::args::CliOptions;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::args::RemoveFlags;
|
use crate::args::RemoveFlags;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::jsr::JsrFetchResolver;
|
use crate::jsr::JsrFetchResolver;
|
||||||
use crate::npm::NpmFetchResolver;
|
use crate::npm::NpmFetchResolver;
|
||||||
|
|
||||||
|
@ -283,7 +284,7 @@ fn package_json_dependency_entry(
|
||||||
(npm_package.into(), selected.version_req)
|
(npm_package.into(), selected.version_req)
|
||||||
} else {
|
} else {
|
||||||
(
|
(
|
||||||
selected.import_name,
|
selected.import_name.into_string(),
|
||||||
format!("npm:{}@{}", npm_package, selected.version_req),
|
format!("npm:{}@{}", npm_package, selected.version_req),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -292,7 +293,7 @@ fn package_json_dependency_entry(
|
||||||
let scope_replaced = jsr_package.replace('/', "__");
|
let scope_replaced = jsr_package.replace('/', "__");
|
||||||
let version_req =
|
let version_req =
|
||||||
format!("npm:@jsr/{scope_replaced}@{}", selected.version_req);
|
format!("npm:@jsr/{scope_replaced}@{}", selected.version_req);
|
||||||
(selected.import_name, version_req)
|
(selected.import_name.into_string(), version_req)
|
||||||
} else {
|
} else {
|
||||||
(selected.package_name, selected.version_req)
|
(selected.package_name, selected.version_req)
|
||||||
}
|
}
|
||||||
|
@ -411,18 +412,18 @@ pub async fn add(
|
||||||
|
|
||||||
let http_client = cli_factory.http_client_provider();
|
let http_client = cli_factory.http_client_provider();
|
||||||
let deps_http_cache = cli_factory.global_http_cache()?;
|
let deps_http_cache = cli_factory.global_http_cache()?;
|
||||||
let mut deps_file_fetcher = FileFetcher::new(
|
let deps_file_fetcher = CliFileFetcher::new(
|
||||||
deps_http_cache.clone(),
|
deps_http_cache.clone(),
|
||||||
CacheSetting::ReloadAll,
|
|
||||||
true,
|
|
||||||
http_client.clone(),
|
http_client.clone(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
None,
|
None,
|
||||||
|
true,
|
||||||
|
CacheSetting::ReloadAll,
|
||||||
|
log::Level::Trace,
|
||||||
);
|
);
|
||||||
|
|
||||||
let npmrc = cli_factory.cli_options().unwrap().npmrc();
|
let npmrc = cli_factory.cli_options().unwrap().npmrc();
|
||||||
|
|
||||||
deps_file_fetcher.set_download_log_level(log::Level::Trace);
|
|
||||||
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
||||||
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
||||||
let npm_resolver =
|
let npm_resolver =
|
||||||
|
@ -432,9 +433,8 @@ pub async fn add(
|
||||||
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
|
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
|
||||||
|
|
||||||
for entry_text in add_flags.packages.iter() {
|
for entry_text in add_flags.packages.iter() {
|
||||||
let req = AddRmPackageReq::parse(entry_text).with_context(|| {
|
let req = AddRmPackageReq::parse(entry_text)
|
||||||
format!("Failed to parse package required: {}", entry_text)
|
.with_context(|| format!("Failed to parse package: {}", entry_text))?;
|
||||||
})?;
|
|
||||||
|
|
||||||
match req {
|
match req {
|
||||||
Ok(add_req) => package_reqs.push(add_req),
|
Ok(add_req) => package_reqs.push(add_req),
|
||||||
|
@ -550,10 +550,10 @@ pub async fn add(
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SelectedPackage {
|
struct SelectedPackage {
|
||||||
import_name: String,
|
import_name: StackString,
|
||||||
package_name: String,
|
package_name: String,
|
||||||
version_req: String,
|
version_req: String,
|
||||||
selected_version: String,
|
selected_version: StackString,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum NotFoundHelp {
|
enum NotFoundHelp {
|
||||||
|
@ -684,7 +684,7 @@ async fn find_package_and_select_version_for_req(
|
||||||
import_name: add_package_req.alias,
|
import_name: add_package_req.alias,
|
||||||
package_name: prefixed_name,
|
package_name: prefixed_name,
|
||||||
version_req: format!("{}{}", range_symbol, &nv.version),
|
version_req: format!("{}{}", range_symbol, &nv.version),
|
||||||
selected_version: nv.version.to_string(),
|
selected_version: nv.version.to_custom_string::<StackString>(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -706,7 +706,7 @@ enum AddRmPackageReqValue {
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct AddRmPackageReq {
|
pub struct AddRmPackageReq {
|
||||||
alias: String,
|
alias: StackString,
|
||||||
value: AddRmPackageReqValue,
|
value: AddRmPackageReqValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -754,7 +754,11 @@ impl AddRmPackageReq {
|
||||||
return Ok(Err(PackageReq::from_str(entry_text)?));
|
return Ok(Err(PackageReq::from_str(entry_text)?));
|
||||||
}
|
}
|
||||||
|
|
||||||
(maybe_prefix.unwrap(), Some(alias.to_string()), entry_text)
|
(
|
||||||
|
maybe_prefix.unwrap(),
|
||||||
|
Some(StackString::from(alias)),
|
||||||
|
entry_text,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
None => return Ok(Err(PackageReq::from_str(entry_text)?)),
|
None => return Ok(Err(PackageReq::from_str(entry_text)?)),
|
||||||
},
|
},
|
||||||
|
@ -766,7 +770,7 @@ impl AddRmPackageReq {
|
||||||
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
|
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
|
||||||
let package_req = req_ref.into_inner().req;
|
let package_req = req_ref.into_inner().req;
|
||||||
Ok(Ok(AddRmPackageReq {
|
Ok(Ok(AddRmPackageReq {
|
||||||
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
|
alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()),
|
||||||
value: AddRmPackageReqValue::Jsr(package_req),
|
value: AddRmPackageReqValue::Jsr(package_req),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -786,7 +790,7 @@ impl AddRmPackageReq {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(Ok(AddRmPackageReq {
|
Ok(Ok(AddRmPackageReq {
|
||||||
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
|
alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()),
|
||||||
value: AddRmPackageReqValue::Npm(package_req),
|
value: AddRmPackageReqValue::Npm(package_req),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -805,9 +809,8 @@ pub async fn remove(
|
||||||
let mut removed_packages = vec![];
|
let mut removed_packages = vec![];
|
||||||
|
|
||||||
for package in &remove_flags.packages {
|
for package in &remove_flags.packages {
|
||||||
let req = AddRmPackageReq::parse(package).with_context(|| {
|
let req = AddRmPackageReq::parse(package)
|
||||||
format!("Failed to parse package required: {}", package)
|
.with_context(|| format!("Failed to parse package: {}", package))?;
|
||||||
})?;
|
|
||||||
let mut parsed_pkg_name = None;
|
let mut parsed_pkg_name = None;
|
||||||
for config in configs.iter_mut().flatten() {
|
for config in configs.iter_mut().flatten() {
|
||||||
match &req {
|
match &req {
|
||||||
|
@ -880,14 +883,14 @@ mod test {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(),
|
AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(),
|
||||||
AddRmPackageReq {
|
AddRmPackageReq {
|
||||||
alias: "foo".to_string(),
|
alias: "foo".into(),
|
||||||
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
|
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
|
AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
|
||||||
AddRmPackageReq {
|
AddRmPackageReq {
|
||||||
alias: "alias".to_string(),
|
alias: "alias".into(),
|
||||||
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
|
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -896,7 +899,7 @@ mod test {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
AddRmPackageReq {
|
AddRmPackageReq {
|
||||||
alias: "@alias/pkg".to_string(),
|
alias: "@alias/pkg".into(),
|
||||||
value: AddRmPackageReqValue::Npm(
|
value: AddRmPackageReqValue::Npm(
|
||||||
PackageReq::from_str("foo@latest").unwrap()
|
PackageReq::from_str("foo@latest").unwrap()
|
||||||
)
|
)
|
||||||
|
@ -907,7 +910,7 @@ mod test {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
AddRmPackageReq {
|
AddRmPackageReq {
|
||||||
alias: "@alias/pkg".to_string(),
|
alias: "@alias/pkg".into(),
|
||||||
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
|
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -916,7 +919,7 @@ mod test {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
AddRmPackageReq {
|
AddRmPackageReq {
|
||||||
alias: "alias".to_string(),
|
alias: "alias".into(),
|
||||||
value: AddRmPackageReqValue::Jsr(
|
value: AddRmPackageReqValue::Jsr(
|
||||||
PackageReq::from_str("foo@^1.5.0").unwrap()
|
PackageReq::from_str("foo@^1.5.0").unwrap()
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::graph_container::ModuleGraphContainer;
|
use crate::graph_container::ModuleGraphContainer;
|
||||||
use crate::graph_container::ModuleGraphUpdatePermit;
|
use crate::graph_container::ModuleGraphUpdatePermit;
|
||||||
|
use crate::graph_util::CreateGraphOptions;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::futures::stream::FuturesUnordered;
|
use deno_core::futures::stream::FuturesUnordered;
|
||||||
use deno_core::futures::StreamExt;
|
use deno_core::futures::StreamExt;
|
||||||
|
@ -17,18 +19,16 @@ pub async fn cache_top_level_deps(
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let npm_resolver = factory.npm_resolver().await?;
|
let npm_resolver = factory.npm_resolver().await?;
|
||||||
let cli_options = factory.cli_options()?;
|
let cli_options = factory.cli_options()?;
|
||||||
let root_permissions = factory.root_permissions_container()?;
|
|
||||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||||
if !npm_resolver.ensure_top_level_package_json_install().await? {
|
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||||
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
||||||
lockfile.error_if_changed()?;
|
lockfile.error_if_changed()?;
|
||||||
}
|
|
||||||
|
|
||||||
npm_resolver.cache_packages().await?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// cache as many entries in the import map as we can
|
// cache as many entries in the import map as we can
|
||||||
let resolver = factory.workspace_resolver().await?;
|
let resolver = factory.workspace_resolver().await?;
|
||||||
|
|
||||||
|
let mut maybe_graph_error = Ok(());
|
||||||
if let Some(import_map) = resolver.maybe_import_map() {
|
if let Some(import_map) = resolver.maybe_import_map() {
|
||||||
let jsr_resolver = if let Some(resolver) = jsr_resolver {
|
let jsr_resolver = if let Some(resolver) = jsr_resolver {
|
||||||
resolver
|
resolver
|
||||||
|
@ -37,6 +37,16 @@ pub async fn cache_top_level_deps(
|
||||||
factory.file_fetcher()?.clone(),
|
factory.file_fetcher()?.clone(),
|
||||||
))
|
))
|
||||||
};
|
};
|
||||||
|
let mut graph_permit = factory
|
||||||
|
.main_module_graph_container()
|
||||||
|
.await?
|
||||||
|
.acquire_update_permit()
|
||||||
|
.await;
|
||||||
|
let graph = graph_permit.graph_mut();
|
||||||
|
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
||||||
|
let lockfile = lockfile.lock();
|
||||||
|
crate::graph_util::fill_graph_from_lockfile(graph, &lockfile);
|
||||||
|
}
|
||||||
|
|
||||||
let mut roots = Vec::new();
|
let mut roots = Vec::new();
|
||||||
|
|
||||||
|
@ -67,13 +77,16 @@ pub async fn cache_top_level_deps(
|
||||||
if !seen_reqs.insert(req.req().clone()) {
|
if !seen_reqs.insert(req.req().clone()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
let resolved_req = graph.packages.mappings().get(req.req());
|
||||||
let jsr_resolver = jsr_resolver.clone();
|
let jsr_resolver = jsr_resolver.clone();
|
||||||
info_futures.push(async move {
|
info_futures.push(async move {
|
||||||
if let Some(nv) = jsr_resolver.req_to_nv(req.req()).await {
|
let nv = if let Some(req) = resolved_req {
|
||||||
if let Some(info) = jsr_resolver.package_version_info(&nv).await
|
Cow::Borrowed(req)
|
||||||
{
|
} else {
|
||||||
return Some((specifier.clone(), info));
|
Cow::Owned(jsr_resolver.req_to_nv(req.req()).await?)
|
||||||
}
|
};
|
||||||
|
if let Some(info) = jsr_resolver.package_version_info(&nv).await {
|
||||||
|
return Some((specifier.clone(), info));
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
});
|
});
|
||||||
|
@ -106,25 +119,31 @@ pub async fn cache_top_level_deps(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut graph_permit = factory
|
drop(info_futures);
|
||||||
.main_module_graph_container()
|
|
||||||
.await?
|
let graph_builder = factory.module_graph_builder().await?;
|
||||||
.acquire_update_permit()
|
graph_builder
|
||||||
.await;
|
.build_graph_with_npm_resolution(
|
||||||
let graph = graph_permit.graph_mut();
|
|
||||||
factory
|
|
||||||
.module_load_preparer()
|
|
||||||
.await?
|
|
||||||
.prepare_module_load(
|
|
||||||
graph,
|
graph,
|
||||||
&roots,
|
CreateGraphOptions {
|
||||||
false,
|
loader: None,
|
||||||
deno_config::deno_json::TsTypeLib::DenoWorker,
|
graph_kind: graph.graph_kind(),
|
||||||
root_permissions.clone(),
|
is_dynamic: false,
|
||||||
None,
|
roots: roots.clone(),
|
||||||
|
npm_caching: crate::graph_util::NpmCachingStrategy::Manual,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
maybe_graph_error = graph_builder.graph_roots_valid(graph, &roots);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||||
|
npm_resolver
|
||||||
|
.cache_packages(crate::npm::PackageCaching::All)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
maybe_graph_error?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::atomic::AtomicBool;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
|
@ -11,6 +11,7 @@ use deno_config::deno_json::ConfigFileRc;
|
||||||
use deno_config::workspace::Workspace;
|
use deno_config::workspace::Workspace;
|
||||||
use deno_config::workspace::WorkspaceDirectory;
|
use deno_config::workspace::WorkspaceDirectory;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::futures::future::try_join;
|
use deno_core::futures::future::try_join;
|
||||||
use deno_core::futures::stream::FuturesOrdered;
|
use deno_core::futures::stream::FuturesOrdered;
|
||||||
|
@ -18,9 +19,7 @@ use deno_core::futures::stream::FuturesUnordered;
|
||||||
use deno_core::futures::FutureExt;
|
use deno_core::futures::FutureExt;
|
||||||
use deno_core::futures::StreamExt;
|
use deno_core::futures::StreamExt;
|
||||||
use deno_core::serde_json;
|
use deno_core::serde_json;
|
||||||
use deno_graph::FillFromLockfileOptions;
|
use deno_package_json::PackageJsonDepsMap;
|
||||||
use deno_package_json::PackageJsonDepValue;
|
|
||||||
use deno_package_json::PackageJsonDepValueParseError;
|
|
||||||
use deno_package_json::PackageJsonRc;
|
use deno_package_json::PackageJsonRc;
|
||||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||||
use deno_semver::jsr::JsrPackageReqReference;
|
use deno_semver::jsr::JsrPackageReqReference;
|
||||||
|
@ -28,11 +27,12 @@ use deno_semver::npm::NpmPackageReqReference;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
use deno_semver::package::PackageReqReference;
|
use deno_semver::package::PackageReqReference;
|
||||||
|
use deno_semver::StackString;
|
||||||
|
use deno_semver::Version;
|
||||||
use deno_semver::VersionReq;
|
use deno_semver::VersionReq;
|
||||||
use import_map::ImportMap;
|
use import_map::ImportMap;
|
||||||
use import_map::ImportMapWithDiagnostics;
|
use import_map::ImportMapWithDiagnostics;
|
||||||
use import_map::SpecifierMapEntry;
|
use import_map::SpecifierMapEntry;
|
||||||
use indexmap::IndexMap;
|
|
||||||
use tokio::sync::Semaphore;
|
use tokio::sync::Semaphore;
|
||||||
|
|
||||||
use crate::args::CliLockfile;
|
use crate::args::CliLockfile;
|
||||||
|
@ -43,13 +43,14 @@ use crate::jsr::JsrFetchResolver;
|
||||||
use crate::module_loader::ModuleLoadPreparer;
|
use crate::module_loader::ModuleLoadPreparer;
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::npm::NpmFetchResolver;
|
use crate::npm::NpmFetchResolver;
|
||||||
|
use crate::util::sync::AtomicFlag;
|
||||||
|
|
||||||
use super::ConfigUpdater;
|
use super::ConfigUpdater;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum ImportMapKind {
|
pub enum ImportMapKind {
|
||||||
Inline,
|
Inline,
|
||||||
Outline,
|
Outline(PathBuf),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -65,9 +66,12 @@ impl DepLocation {
|
||||||
|
|
||||||
pub fn file_path(&self) -> Cow<std::path::Path> {
|
pub fn file_path(&self) -> Cow<std::path::Path> {
|
||||||
match self {
|
match self {
|
||||||
DepLocation::DenoJson(arc, _, _) => {
|
DepLocation::DenoJson(arc, _, kind) => match kind {
|
||||||
Cow::Owned(arc.specifier.to_file_path().unwrap())
|
ImportMapKind::Inline => {
|
||||||
}
|
Cow::Owned(arc.specifier.to_file_path().unwrap())
|
||||||
|
}
|
||||||
|
ImportMapKind::Outline(path) => Cow::Borrowed(path.as_path()),
|
||||||
|
},
|
||||||
DepLocation::PackageJson(arc, _) => Cow::Borrowed(arc.path.as_ref()),
|
DepLocation::PackageJson(arc, _) => Cow::Borrowed(arc.path.as_ref()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -136,13 +140,7 @@ pub enum KeyPart {
|
||||||
Scopes,
|
Scopes,
|
||||||
Dependencies,
|
Dependencies,
|
||||||
DevDependencies,
|
DevDependencies,
|
||||||
String(String),
|
String(StackString),
|
||||||
}
|
|
||||||
|
|
||||||
impl From<String> for KeyPart {
|
|
||||||
fn from(value: String) -> Self {
|
|
||||||
KeyPart::String(value)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<PackageJsonDepKind> for KeyPart {
|
impl From<PackageJsonDepKind> for KeyPart {
|
||||||
|
@ -161,7 +159,7 @@ impl KeyPart {
|
||||||
KeyPart::Scopes => "scopes",
|
KeyPart::Scopes => "scopes",
|
||||||
KeyPart::Dependencies => "dependencies",
|
KeyPart::Dependencies => "dependencies",
|
||||||
KeyPart::DevDependencies => "devDependencies",
|
KeyPart::DevDependencies => "devDependencies",
|
||||||
KeyPart::String(s) => s,
|
KeyPart::String(s) => s.as_str(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -214,12 +212,12 @@ fn import_map_entries(
|
||||||
.chain(import_map.scopes().flat_map(|scope| {
|
.chain(import_map.scopes().flat_map(|scope| {
|
||||||
let path = KeyPath::from_parts([
|
let path = KeyPath::from_parts([
|
||||||
KeyPart::Scopes,
|
KeyPart::Scopes,
|
||||||
scope.raw_key.to_string().into(),
|
KeyPart::String(scope.raw_key.into()),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
scope.imports.entries().map(move |entry| {
|
scope.imports.entries().map(move |entry| {
|
||||||
let mut full_path = path.clone();
|
let mut full_path = path.clone();
|
||||||
full_path.push(KeyPart::String(entry.raw_key.to_string()));
|
full_path.push(KeyPart::String(entry.raw_key.into()));
|
||||||
(full_path, entry)
|
(full_path, entry)
|
||||||
})
|
})
|
||||||
}))
|
}))
|
||||||
|
@ -241,22 +239,30 @@ fn to_import_map_value_from_imports(
|
||||||
fn deno_json_import_map(
|
fn deno_json_import_map(
|
||||||
deno_json: &ConfigFile,
|
deno_json: &ConfigFile,
|
||||||
) -> Result<Option<(ImportMapWithDiagnostics, ImportMapKind)>, AnyError> {
|
) -> Result<Option<(ImportMapWithDiagnostics, ImportMapKind)>, AnyError> {
|
||||||
let (value, kind) =
|
let (value, kind) = if deno_json.json.imports.is_some()
|
||||||
if deno_json.json.imports.is_some() || deno_json.json.scopes.is_some() {
|
|| deno_json.json.scopes.is_some()
|
||||||
(
|
{
|
||||||
to_import_map_value_from_imports(deno_json),
|
(
|
||||||
ImportMapKind::Inline,
|
to_import_map_value_from_imports(deno_json),
|
||||||
)
|
ImportMapKind::Inline,
|
||||||
} else {
|
)
|
||||||
match deno_json.to_import_map_path()? {
|
} else {
|
||||||
Some(path) => {
|
match deno_json.to_import_map_path()? {
|
||||||
let text = std::fs::read_to_string(&path)?;
|
Some(path) => {
|
||||||
let value = serde_json::from_str(&text)?;
|
let err_context = || {
|
||||||
(value, ImportMapKind::Outline)
|
format!(
|
||||||
}
|
"loading import map at '{}' (from \"importMap\" field in '{}')",
|
||||||
None => return Ok(None),
|
path.display(),
|
||||||
|
deno_json.specifier
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let text = std::fs::read_to_string(&path).with_context(err_context)?;
|
||||||
|
let value = serde_json::from_str(&text).with_context(err_context)?;
|
||||||
|
(value, ImportMapKind::Outline(path))
|
||||||
}
|
}
|
||||||
};
|
None => return Ok(None),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
import_map::parse_from_value(deno_json.specifier.clone(), value)
|
import_map::parse_from_value(deno_json.specifier.clone(), value)
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
|
@ -269,94 +275,6 @@ enum PackageJsonDepKind {
|
||||||
Dev,
|
Dev,
|
||||||
}
|
}
|
||||||
|
|
||||||
type PackageJsonDeps = IndexMap<
|
|
||||||
String,
|
|
||||||
Result<
|
|
||||||
(PackageJsonDepKind, PackageJsonDepValue),
|
|
||||||
PackageJsonDepValueParseError,
|
|
||||||
>,
|
|
||||||
>;
|
|
||||||
|
|
||||||
/// Resolve the package.json's dependencies.
|
|
||||||
// TODO(nathanwhit): Remove once we update deno_package_json with dev deps split out
|
|
||||||
fn resolve_local_package_json_deps(
|
|
||||||
package_json: &PackageJsonRc,
|
|
||||||
) -> PackageJsonDeps {
|
|
||||||
/// Gets the name and raw version constraint for a registry info or
|
|
||||||
/// package.json dependency entry taking into account npm package aliases.
|
|
||||||
fn parse_dep_entry_name_and_raw_version<'a>(
|
|
||||||
key: &'a str,
|
|
||||||
value: &'a str,
|
|
||||||
) -> (&'a str, &'a str) {
|
|
||||||
if let Some(package_and_version) = value.strip_prefix("npm:") {
|
|
||||||
if let Some((name, version)) = package_and_version.rsplit_once('@') {
|
|
||||||
// if empty, then the name was scoped and there's no version
|
|
||||||
if name.is_empty() {
|
|
||||||
(package_and_version, "*")
|
|
||||||
} else {
|
|
||||||
(name, version)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(package_and_version, "*")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(key, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_entry(
|
|
||||||
key: &str,
|
|
||||||
value: &str,
|
|
||||||
) -> Result<PackageJsonDepValue, PackageJsonDepValueParseError> {
|
|
||||||
if let Some(workspace_key) = value.strip_prefix("workspace:") {
|
|
||||||
let version_req = VersionReq::parse_from_npm(workspace_key)?;
|
|
||||||
return Ok(PackageJsonDepValue::Workspace(version_req));
|
|
||||||
}
|
|
||||||
if value.starts_with("file:")
|
|
||||||
|| value.starts_with("git:")
|
|
||||||
|| value.starts_with("http:")
|
|
||||||
|| value.starts_with("https:")
|
|
||||||
{
|
|
||||||
return Err(PackageJsonDepValueParseError::Unsupported {
|
|
||||||
scheme: value.split(':').next().unwrap().to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value);
|
|
||||||
let result = VersionReq::parse_from_npm(version_req);
|
|
||||||
match result {
|
|
||||||
Ok(version_req) => Ok(PackageJsonDepValue::Req(PackageReq {
|
|
||||||
name: name.to_string(),
|
|
||||||
version_req,
|
|
||||||
})),
|
|
||||||
Err(err) => Err(PackageJsonDepValueParseError::VersionReq(err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_deps(
|
|
||||||
deps: Option<&IndexMap<String, String>>,
|
|
||||||
result: &mut PackageJsonDeps,
|
|
||||||
kind: PackageJsonDepKind,
|
|
||||||
) {
|
|
||||||
if let Some(deps) = deps {
|
|
||||||
for (key, value) in deps {
|
|
||||||
result.entry(key.to_string()).or_insert_with(|| {
|
|
||||||
parse_entry(key, value).map(|entry| (kind, entry))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let deps = package_json.dependencies.as_ref();
|
|
||||||
let dev_deps = package_json.dev_dependencies.as_ref();
|
|
||||||
let mut result = IndexMap::new();
|
|
||||||
|
|
||||||
// favors the deps over dev_deps
|
|
||||||
insert_deps(deps, &mut result, PackageJsonDepKind::Normal);
|
|
||||||
insert_deps(dev_deps, &mut result, PackageJsonDepKind::Dev);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_deps_from_deno_json(
|
fn add_deps_from_deno_json(
|
||||||
deno_json: &Arc<ConfigFile>,
|
deno_json: &Arc<ConfigFile>,
|
||||||
mut filter: impl DepFilter,
|
mut filter: impl DepFilter,
|
||||||
|
@ -394,7 +312,7 @@ fn add_deps_from_deno_json(
|
||||||
location: DepLocation::DenoJson(
|
location: DepLocation::DenoJson(
|
||||||
deno_json.clone(),
|
deno_json.clone(),
|
||||||
key_path,
|
key_path,
|
||||||
import_map_kind,
|
import_map_kind.clone(),
|
||||||
),
|
),
|
||||||
kind,
|
kind,
|
||||||
req,
|
req,
|
||||||
|
@ -406,40 +324,67 @@ fn add_deps_from_deno_json(
|
||||||
|
|
||||||
fn add_deps_from_package_json(
|
fn add_deps_from_package_json(
|
||||||
package_json: &PackageJsonRc,
|
package_json: &PackageJsonRc,
|
||||||
mut filter: impl DepFilter,
|
filter: impl DepFilter,
|
||||||
deps: &mut Vec<Dep>,
|
deps: &mut Vec<Dep>,
|
||||||
) {
|
) {
|
||||||
let package_json_deps = resolve_local_package_json_deps(package_json);
|
let package_json_deps = package_json.resolve_local_package_json_deps();
|
||||||
for (k, v) in package_json_deps {
|
|
||||||
let (package_dep_kind, v) = match v {
|
fn iterate(
|
||||||
Ok((k, v)) => (k, v),
|
package_json: &PackageJsonRc,
|
||||||
Err(e) => {
|
mut filter: impl DepFilter,
|
||||||
log::warn!("bad package json dep value: {e}");
|
package_dep_kind: PackageJsonDepKind,
|
||||||
continue;
|
package_json_deps: &PackageJsonDepsMap,
|
||||||
}
|
deps: &mut Vec<Dep>,
|
||||||
};
|
) {
|
||||||
match v {
|
for (k, v) in package_json_deps {
|
||||||
deno_package_json::PackageJsonDepValue::Req(req) => {
|
let v = match v {
|
||||||
let alias = k.as_str();
|
Ok(v) => v,
|
||||||
let alias = (alias != req.name).then(|| alias.to_string());
|
Err(e) => {
|
||||||
if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) {
|
log::warn!("bad package json dep value: {e}");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let id = DepId(deps.len());
|
};
|
||||||
deps.push(Dep {
|
match v {
|
||||||
id,
|
deno_package_json::PackageJsonDepValue::Req(req) => {
|
||||||
kind: DepKind::Npm,
|
let alias = k.as_str();
|
||||||
location: DepLocation::PackageJson(
|
let alias = (alias != req.name).then(|| alias.to_string());
|
||||||
package_json.clone(),
|
if !filter.should_include(alias.as_deref(), req, DepKind::Npm) {
|
||||||
KeyPath::from_parts([package_dep_kind.into(), k.into()]),
|
continue;
|
||||||
),
|
}
|
||||||
req,
|
let id = DepId(deps.len());
|
||||||
alias,
|
deps.push(Dep {
|
||||||
})
|
id,
|
||||||
|
kind: DepKind::Npm,
|
||||||
|
location: DepLocation::PackageJson(
|
||||||
|
package_json.clone(),
|
||||||
|
KeyPath::from_parts([
|
||||||
|
package_dep_kind.into(),
|
||||||
|
KeyPart::String(k.clone()),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
req: req.clone(),
|
||||||
|
alias,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
deno_package_json::PackageJsonDepValue::Workspace(_) => continue,
|
||||||
}
|
}
|
||||||
deno_package_json::PackageJsonDepValue::Workspace(_) => continue,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
iterate(
|
||||||
|
package_json,
|
||||||
|
filter,
|
||||||
|
PackageJsonDepKind::Normal,
|
||||||
|
&package_json_deps.dependencies,
|
||||||
|
deps,
|
||||||
|
);
|
||||||
|
iterate(
|
||||||
|
package_json,
|
||||||
|
filter,
|
||||||
|
PackageJsonDepKind::Dev,
|
||||||
|
&package_json_deps.dev_dependencies,
|
||||||
|
deps,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deps_from_workspace(
|
fn deps_from_workspace(
|
||||||
|
@ -501,7 +446,7 @@ pub struct DepManager {
|
||||||
|
|
||||||
pending_changes: Vec<Change>,
|
pending_changes: Vec<Change>,
|
||||||
|
|
||||||
dependencies_resolved: AtomicBool,
|
dependencies_resolved: AtomicFlag,
|
||||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||||
// TODO(nathanwhit): probably shouldn't be pub
|
// TODO(nathanwhit): probably shouldn't be pub
|
||||||
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
|
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
|
||||||
|
@ -543,7 +488,7 @@ impl DepManager {
|
||||||
resolved_versions: Vec::new(),
|
resolved_versions: Vec::new(),
|
||||||
latest_versions: Vec::new(),
|
latest_versions: Vec::new(),
|
||||||
jsr_fetch_resolver,
|
jsr_fetch_resolver,
|
||||||
dependencies_resolved: AtomicBool::new(false),
|
dependencies_resolved: AtomicFlag::lowered(),
|
||||||
module_load_preparer,
|
module_load_preparer,
|
||||||
npm_fetch_resolver,
|
npm_fetch_resolver,
|
||||||
npm_resolver,
|
npm_resolver,
|
||||||
|
@ -584,10 +529,7 @@ impl DepManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run_dependency_resolution(&self) -> Result<(), AnyError> {
|
async fn run_dependency_resolution(&self) -> Result<(), AnyError> {
|
||||||
if self
|
if self.dependencies_resolved.is_raised() {
|
||||||
.dependencies_resolved
|
|
||||||
.load(std::sync::atomic::Ordering::Relaxed)
|
|
||||||
{
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -599,19 +541,8 @@ impl DepManager {
|
||||||
// populate the information from the lockfile
|
// populate the information from the lockfile
|
||||||
if let Some(lockfile) = &self.lockfile {
|
if let Some(lockfile) = &self.lockfile {
|
||||||
let lockfile = lockfile.lock();
|
let lockfile = lockfile.lock();
|
||||||
graph.fill_from_lockfile(FillFromLockfileOptions {
|
|
||||||
redirects: lockfile
|
crate::graph_util::fill_graph_from_lockfile(graph, &lockfile);
|
||||||
.content
|
|
||||||
.redirects
|
|
||||||
.iter()
|
|
||||||
.map(|(from, to)| (from.as_str(), to.as_str())),
|
|
||||||
package_specifiers: lockfile
|
|
||||||
.content
|
|
||||||
.packages
|
|
||||||
.specifiers
|
|
||||||
.iter()
|
|
||||||
.map(|(dep, id)| (dep, id.as_str())),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let npm_resolver = self.npm_resolver.as_managed().unwrap();
|
let npm_resolver = self.npm_resolver.as_managed().unwrap();
|
||||||
|
@ -621,9 +552,7 @@ impl DepManager {
|
||||||
}
|
}
|
||||||
DepKind::Jsr => graph.packages.mappings().contains_key(&dep.req),
|
DepKind::Jsr => graph.packages.mappings().contains_key(&dep.req),
|
||||||
}) {
|
}) {
|
||||||
self
|
self.dependencies_resolved.raise();
|
||||||
.dependencies_resolved
|
|
||||||
.store(true, std::sync::atomic::Ordering::Relaxed);
|
|
||||||
graph_permit.commit();
|
graph_permit.commit();
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -678,6 +607,7 @@ impl DepManager {
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
self.dependencies_resolved.raise();
|
||||||
graph_permit.commit();
|
graph_permit.commit();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -720,10 +650,6 @@ impl DepManager {
|
||||||
if self.latest_versions.len() == self.deps.len() {
|
if self.latest_versions.len() == self.deps.len() {
|
||||||
return Ok(self.latest_versions.clone());
|
return Ok(self.latest_versions.clone());
|
||||||
}
|
}
|
||||||
let latest_tag_req = deno_semver::VersionReq::from_raw_text_and_inner(
|
|
||||||
"latest".into(),
|
|
||||||
deno_semver::RangeSetOrTag::Tag("latest".into()),
|
|
||||||
);
|
|
||||||
let mut latest_versions = Vec::with_capacity(self.deps.len());
|
let mut latest_versions = Vec::with_capacity(self.deps.len());
|
||||||
|
|
||||||
let npm_sema = Semaphore::new(32);
|
let npm_sema = Semaphore::new(32);
|
||||||
|
@ -735,14 +661,25 @@ impl DepManager {
|
||||||
DepKind::Npm => futs.push_back(
|
DepKind::Npm => futs.push_back(
|
||||||
async {
|
async {
|
||||||
let semver_req = &dep.req;
|
let semver_req = &dep.req;
|
||||||
let latest_req = PackageReq {
|
|
||||||
name: dep.req.name.clone(),
|
|
||||||
version_req: latest_tag_req.clone(),
|
|
||||||
};
|
|
||||||
let _permit = npm_sema.acquire().await;
|
let _permit = npm_sema.acquire().await;
|
||||||
let semver_compatible =
|
let semver_compatible =
|
||||||
self.npm_fetch_resolver.req_to_nv(semver_req).await;
|
self.npm_fetch_resolver.req_to_nv(semver_req).await;
|
||||||
let latest = self.npm_fetch_resolver.req_to_nv(&latest_req).await;
|
let info =
|
||||||
|
self.npm_fetch_resolver.package_info(&semver_req.name).await;
|
||||||
|
let latest = info
|
||||||
|
.and_then(|info| {
|
||||||
|
let latest_tag = info.dist_tags.get("latest")?;
|
||||||
|
let lower_bound = &semver_compatible.as_ref()?.version;
|
||||||
|
if latest_tag > lower_bound {
|
||||||
|
Some(latest_tag.clone())
|
||||||
|
} else {
|
||||||
|
latest_version(Some(latest_tag), info.versions.keys())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|version| PackageNv {
|
||||||
|
name: semver_req.name.clone(),
|
||||||
|
version,
|
||||||
|
});
|
||||||
PackageLatestVersion {
|
PackageLatestVersion {
|
||||||
latest,
|
latest,
|
||||||
semver_compatible,
|
semver_compatible,
|
||||||
|
@ -753,14 +690,29 @@ impl DepManager {
|
||||||
DepKind::Jsr => futs.push_back(
|
DepKind::Jsr => futs.push_back(
|
||||||
async {
|
async {
|
||||||
let semver_req = &dep.req;
|
let semver_req = &dep.req;
|
||||||
let latest_req = PackageReq {
|
|
||||||
name: dep.req.name.clone(),
|
|
||||||
version_req: deno_semver::WILDCARD_VERSION_REQ.clone(),
|
|
||||||
};
|
|
||||||
let _permit = jsr_sema.acquire().await;
|
let _permit = jsr_sema.acquire().await;
|
||||||
let semver_compatible =
|
let semver_compatible =
|
||||||
self.jsr_fetch_resolver.req_to_nv(semver_req).await;
|
self.jsr_fetch_resolver.req_to_nv(semver_req).await;
|
||||||
let latest = self.jsr_fetch_resolver.req_to_nv(&latest_req).await;
|
let info =
|
||||||
|
self.jsr_fetch_resolver.package_info(&semver_req.name).await;
|
||||||
|
let latest = info
|
||||||
|
.and_then(|info| {
|
||||||
|
let lower_bound = &semver_compatible.as_ref()?.version;
|
||||||
|
latest_version(
|
||||||
|
Some(lower_bound),
|
||||||
|
info.versions.iter().filter_map(|(version, version_info)| {
|
||||||
|
if !version_info.yanked {
|
||||||
|
Some(version)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|version| PackageNv {
|
||||||
|
name: semver_req.name.clone(),
|
||||||
|
version,
|
||||||
|
});
|
||||||
PackageLatestVersion {
|
PackageLatestVersion {
|
||||||
latest,
|
latest,
|
||||||
semver_compatible,
|
semver_compatible,
|
||||||
|
@ -825,11 +777,7 @@ impl DepManager {
|
||||||
let dep = &mut self.deps[dep_id.0];
|
let dep = &mut self.deps[dep_id.0];
|
||||||
dep.req.version_req = version_req.clone();
|
dep.req.version_req = version_req.clone();
|
||||||
match &dep.location {
|
match &dep.location {
|
||||||
DepLocation::DenoJson(arc, key_path, import_map_kind) => {
|
DepLocation::DenoJson(arc, key_path, _) => {
|
||||||
if matches!(import_map_kind, ImportMapKind::Outline) {
|
|
||||||
// not supported
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let updater =
|
let updater =
|
||||||
get_or_create_updater(&mut config_updaters, &dep.location)?;
|
get_or_create_updater(&mut config_updaters, &dep.location)?;
|
||||||
|
|
||||||
|
@ -962,3 +910,18 @@ fn parse_req_reference(
|
||||||
DepKind::Jsr => JsrPackageReqReference::from_str(input)?.into_inner(),
|
DepKind::Jsr => JsrPackageReqReference::from_str(input)?.into_inner(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn latest_version<'a>(
|
||||||
|
start: Option<&Version>,
|
||||||
|
versions: impl IntoIterator<Item = &'a Version>,
|
||||||
|
) -> Option<Version> {
|
||||||
|
let mut best = start;
|
||||||
|
for version in versions {
|
||||||
|
match best {
|
||||||
|
Some(best_version) if version > best_version => best = Some(version),
|
||||||
|
None => best = Some(version),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
best.cloned()
|
||||||
|
}
|
||||||
|
|
|
@ -3,18 +3,20 @@
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||||
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_semver::package::PackageNv;
|
use deno_semver::package::PackageNv;
|
||||||
use deno_semver::package::PackageReq;
|
use deno_semver::package::PackageReq;
|
||||||
|
use deno_semver::StackString;
|
||||||
use deno_semver::VersionReq;
|
use deno_semver::VersionReq;
|
||||||
use deno_terminal::colors;
|
use deno_terminal::colors;
|
||||||
|
|
||||||
use crate::args::CacheSetting;
|
|
||||||
use crate::args::CliOptions;
|
use crate::args::CliOptions;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::args::OutdatedFlags;
|
use crate::args::OutdatedFlags;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::jsr::JsrFetchResolver;
|
use crate::jsr::JsrFetchResolver;
|
||||||
use crate::npm::NpmFetchResolver;
|
use crate::npm::NpmFetchResolver;
|
||||||
use crate::tools::registry::pm::deps::DepKind;
|
use crate::tools::registry::pm::deps::DepKind;
|
||||||
|
@ -30,7 +32,7 @@ struct OutdatedPackage {
|
||||||
latest: String,
|
latest: String,
|
||||||
semver_compatible: String,
|
semver_compatible: String,
|
||||||
current: String,
|
current: String,
|
||||||
name: String,
|
name: StackString,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::print_stdout)]
|
#[allow(clippy::print_stdout)]
|
||||||
|
@ -100,6 +102,23 @@ fn print_outdated_table(packages: &[OutdatedPackage]) {
|
||||||
println!("└{package_fill}┴{current_fill}┴{update_fill}┴{latest_fill}┘",);
|
println!("└{package_fill}┴{current_fill}┴{update_fill}┴{latest_fill}┘",);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_suggestion(compatible: bool) {
|
||||||
|
log::info!("");
|
||||||
|
let (cmd, txt) = if compatible {
|
||||||
|
("", "compatible")
|
||||||
|
} else {
|
||||||
|
(" --latest", "available")
|
||||||
|
};
|
||||||
|
log::info!(
|
||||||
|
"{}",
|
||||||
|
color_print::cformat!(
|
||||||
|
"<p(245)>Run</> <u>deno outdated --update{}</> <p(245)>to update to the latest {} versions,</>\n<p(245)>or</> <u>deno outdated --help</> <p(245)>for more information.</>",
|
||||||
|
cmd,
|
||||||
|
txt,
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
fn print_outdated(
|
fn print_outdated(
|
||||||
deps: &mut DepManager,
|
deps: &mut DepManager,
|
||||||
compatible: bool,
|
compatible: bool,
|
||||||
|
@ -148,6 +167,7 @@ fn print_outdated(
|
||||||
if !outdated.is_empty() {
|
if !outdated.is_empty() {
|
||||||
outdated.sort();
|
outdated.sort();
|
||||||
print_outdated_table(&outdated);
|
print_outdated_table(&outdated);
|
||||||
|
print_suggestion(compatible);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -162,15 +182,15 @@ pub async fn outdated(
|
||||||
let workspace = cli_options.workspace();
|
let workspace = cli_options.workspace();
|
||||||
let http_client = factory.http_client_provider();
|
let http_client = factory.http_client_provider();
|
||||||
let deps_http_cache = factory.global_http_cache()?;
|
let deps_http_cache = factory.global_http_cache()?;
|
||||||
let mut file_fetcher = FileFetcher::new(
|
let file_fetcher = CliFileFetcher::new(
|
||||||
deps_http_cache.clone(),
|
deps_http_cache.clone(),
|
||||||
CacheSetting::RespectHeaders,
|
|
||||||
true,
|
|
||||||
http_client.clone(),
|
http_client.clone(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
None,
|
None,
|
||||||
|
true,
|
||||||
|
CacheSetting::RespectHeaders,
|
||||||
|
log::Level::Trace,
|
||||||
);
|
);
|
||||||
file_fetcher.set_download_log_level(log::Level::Trace);
|
|
||||||
let file_fetcher = Arc::new(file_fetcher);
|
let file_fetcher = Arc::new(file_fetcher);
|
||||||
let npm_fetch_resolver = Arc::new(NpmFetchResolver::new(
|
let npm_fetch_resolver = Arc::new(NpmFetchResolver::new(
|
||||||
file_fetcher.clone(),
|
file_fetcher.clone(),
|
||||||
|
@ -179,6 +199,15 @@ pub async fn outdated(
|
||||||
let jsr_fetch_resolver =
|
let jsr_fetch_resolver =
|
||||||
Arc::new(JsrFetchResolver::new(file_fetcher.clone()));
|
Arc::new(JsrFetchResolver::new(file_fetcher.clone()));
|
||||||
|
|
||||||
|
if !cli_options.start_dir.has_deno_json()
|
||||||
|
&& !cli_options.start_dir.has_pkg_json()
|
||||||
|
{
|
||||||
|
bail!(
|
||||||
|
"No deno.json or package.json in \"{}\".",
|
||||||
|
cli_options.initial_cwd().display(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
let args = dep_manager_args(
|
let args = dep_manager_args(
|
||||||
&factory,
|
&factory,
|
||||||
cli_options,
|
cli_options,
|
||||||
|
|
|
@ -1,19 +1,35 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use deno_ast::diagnostics::Diagnostic;
|
||||||
|
use deno_ast::diagnostics::DiagnosticLevel;
|
||||||
|
use deno_ast::diagnostics::DiagnosticLocation;
|
||||||
|
use deno_ast::diagnostics::DiagnosticSnippet;
|
||||||
|
use deno_ast::diagnostics::DiagnosticSnippetHighlight;
|
||||||
|
use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
|
||||||
|
use deno_ast::diagnostics::DiagnosticSourcePos;
|
||||||
|
use deno_ast::diagnostics::DiagnosticSourceRange;
|
||||||
use deno_ast::ParsedSource;
|
use deno_ast::ParsedSource;
|
||||||
use deno_ast::SourceRange;
|
use deno_ast::SourceRange;
|
||||||
use deno_ast::SourceTextInfo;
|
use deno_ast::SourceTextInfo;
|
||||||
|
use deno_ast::SourceTextProvider;
|
||||||
use deno_config::workspace::MappedResolution;
|
use deno_config::workspace::MappedResolution;
|
||||||
use deno_config::workspace::PackageJsonDepResolution;
|
use deno_config::workspace::PackageJsonDepResolution;
|
||||||
use deno_config::workspace::WorkspaceResolver;
|
use deno_config::workspace::WorkspaceResolver;
|
||||||
|
use deno_core::anyhow;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
use deno_graph::DependencyDescriptor;
|
use deno_graph::DependencyDescriptor;
|
||||||
use deno_graph::DynamicTemplatePart;
|
use deno_graph::DynamicTemplatePart;
|
||||||
use deno_graph::ParserModuleAnalyzer;
|
use deno_graph::ParserModuleAnalyzer;
|
||||||
use deno_graph::TypeScriptReference;
|
use deno_graph::TypeScriptReference;
|
||||||
use deno_package_json::PackageJsonDepValue;
|
use deno_package_json::PackageJsonDepValue;
|
||||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
use deno_package_json::PackageJsonDepWorkspaceReq;
|
||||||
|
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
|
||||||
use deno_runtime::deno_node::is_builtin_node_module;
|
use deno_runtime::deno_node::is_builtin_node_module;
|
||||||
|
use deno_semver::Version;
|
||||||
|
use deno_semver::VersionReq;
|
||||||
|
|
||||||
use crate::resolver::CliSloppyImportsResolver;
|
use crate::resolver::CliSloppyImportsResolver;
|
||||||
|
|
||||||
|
@ -24,34 +40,163 @@ pub enum SpecifierUnfurlerDiagnostic {
|
||||||
text_info: SourceTextInfo,
|
text_info: SourceTextInfo,
|
||||||
range: SourceRange,
|
range: SourceRange,
|
||||||
},
|
},
|
||||||
|
ResolvingNpmWorkspacePackage {
|
||||||
|
specifier: ModuleSpecifier,
|
||||||
|
package_name: String,
|
||||||
|
text_info: SourceTextInfo,
|
||||||
|
range: SourceRange,
|
||||||
|
reason: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SpecifierUnfurlerDiagnostic {
|
impl Diagnostic for SpecifierUnfurlerDiagnostic {
|
||||||
pub fn code(&self) -> &'static str {
|
fn level(&self) -> DiagnosticLevel {
|
||||||
match self {
|
match self {
|
||||||
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import",
|
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
|
||||||
}
|
DiagnosticLevel::Warning
|
||||||
}
|
}
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
|
||||||
pub fn message(&self) -> &'static str {
|
DiagnosticLevel::Error
|
||||||
match self {
|
|
||||||
Self::UnanalyzableDynamicImport { .. } => {
|
|
||||||
"unable to analyze dynamic import"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn code(&self) -> Cow<'_, str> {
|
||||||
|
match self {
|
||||||
|
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import",
|
||||||
|
Self::ResolvingNpmWorkspacePackage { .. } => "npm-workspace-package",
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn message(&self) -> Cow<'_, str> {
|
||||||
|
match self {
|
||||||
|
Self::UnanalyzableDynamicImport { .. } => {
|
||||||
|
"unable to analyze dynamic import".into()
|
||||||
|
}
|
||||||
|
Self::ResolvingNpmWorkspacePackage {
|
||||||
|
package_name,
|
||||||
|
reason,
|
||||||
|
..
|
||||||
|
} => format!(
|
||||||
|
"failed resolving npm workspace package '{}': {}",
|
||||||
|
package_name, reason
|
||||||
|
)
|
||||||
|
.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn location(&self) -> deno_ast::diagnostics::DiagnosticLocation {
|
||||||
|
match self {
|
||||||
|
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
|
||||||
|
specifier,
|
||||||
|
text_info,
|
||||||
|
range,
|
||||||
|
} => DiagnosticLocation::ModulePosition {
|
||||||
|
specifier: Cow::Borrowed(specifier),
|
||||||
|
text_info: Cow::Borrowed(text_info),
|
||||||
|
source_pos: DiagnosticSourcePos::SourcePos(range.start),
|
||||||
|
},
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
|
||||||
|
specifier,
|
||||||
|
text_info,
|
||||||
|
range,
|
||||||
|
..
|
||||||
|
} => DiagnosticLocation::ModulePosition {
|
||||||
|
specifier: Cow::Borrowed(specifier),
|
||||||
|
text_info: Cow::Borrowed(text_info),
|
||||||
|
source_pos: DiagnosticSourcePos::SourcePos(range.start),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn snippet(&self) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
|
||||||
|
match self {
|
||||||
|
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
|
||||||
|
text_info,
|
||||||
|
range,
|
||||||
|
..
|
||||||
|
} => Some(DiagnosticSnippet {
|
||||||
|
source: Cow::Borrowed(text_info),
|
||||||
|
highlights: vec![DiagnosticSnippetHighlight {
|
||||||
|
style: DiagnosticSnippetHighlightStyle::Warning,
|
||||||
|
range: DiagnosticSourceRange {
|
||||||
|
start: DiagnosticSourcePos::SourcePos(range.start),
|
||||||
|
end: DiagnosticSourcePos::SourcePos(range.end),
|
||||||
|
},
|
||||||
|
description: Some("the unanalyzable dynamic import".into()),
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
|
||||||
|
text_info,
|
||||||
|
range,
|
||||||
|
..
|
||||||
|
} => Some(DiagnosticSnippet {
|
||||||
|
source: Cow::Borrowed(text_info),
|
||||||
|
highlights: vec![DiagnosticSnippetHighlight {
|
||||||
|
style: DiagnosticSnippetHighlightStyle::Warning,
|
||||||
|
range: DiagnosticSourceRange {
|
||||||
|
start: DiagnosticSourcePos::SourcePos(range.start),
|
||||||
|
end: DiagnosticSourcePos::SourcePos(range.end),
|
||||||
|
},
|
||||||
|
description: Some("the unresolved import".into()),
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hint(&self) -> Option<Cow<'_, str>> {
|
||||||
|
match self {
|
||||||
|
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => Some(
|
||||||
|
"make sure the npm workspace package is resolvable and has a version field in its package.json".into()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn snippet_fixed(
|
||||||
|
&self,
|
||||||
|
) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
|
||||||
|
match self {
|
||||||
|
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
|
||||||
|
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
|
||||||
|
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
|
||||||
|
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
|
||||||
|
]),
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
|
||||||
|
Cow::Borrowed(&[])
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn docs_url(&self) -> Option<Cow<'_, str>> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum UnfurlSpecifierError {
|
||||||
|
Workspace {
|
||||||
|
package_name: String,
|
||||||
|
reason: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SpecifierUnfurler {
|
pub struct SpecifierUnfurler {
|
||||||
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
|
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||||
workspace_resolver: WorkspaceResolver,
|
workspace_resolver: Arc<WorkspaceResolver>,
|
||||||
bare_node_builtins: bool,
|
bare_node_builtins: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SpecifierUnfurler {
|
impl SpecifierUnfurler {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
|
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||||
workspace_resolver: WorkspaceResolver,
|
workspace_resolver: Arc<WorkspaceResolver>,
|
||||||
bare_node_builtins: bool,
|
bare_node_builtins: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
debug_assert_eq!(
|
debug_assert_eq!(
|
||||||
|
@ -65,11 +210,45 @@ impl SpecifierUnfurler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn unfurl_specifier_reporting_diagnostic(
|
||||||
|
&self,
|
||||||
|
referrer: &ModuleSpecifier,
|
||||||
|
specifier: &str,
|
||||||
|
text_info: &SourceTextInfo,
|
||||||
|
range: &deno_graph::PositionRange,
|
||||||
|
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
|
||||||
|
) -> Option<String> {
|
||||||
|
match self.unfurl_specifier(referrer, specifier) {
|
||||||
|
Ok(maybe_unfurled) => maybe_unfurled,
|
||||||
|
Err(diagnostic) => match diagnostic {
|
||||||
|
UnfurlSpecifierError::Workspace {
|
||||||
|
package_name,
|
||||||
|
reason,
|
||||||
|
} => {
|
||||||
|
let range = to_range(text_info, range);
|
||||||
|
diagnostic_reporter(
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
|
||||||
|
specifier: referrer.clone(),
|
||||||
|
package_name,
|
||||||
|
text_info: text_info.clone(),
|
||||||
|
range: SourceRange::new(
|
||||||
|
text_info.start_pos() + range.start,
|
||||||
|
text_info.start_pos() + range.end,
|
||||||
|
),
|
||||||
|
reason,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn unfurl_specifier(
|
fn unfurl_specifier(
|
||||||
&self,
|
&self,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
specifier: &str,
|
specifier: &str,
|
||||||
) -> Option<String> {
|
) -> Result<Option<String>, UnfurlSpecifierError> {
|
||||||
let resolved = if let Ok(resolved) =
|
let resolved = if let Ok(resolved) =
|
||||||
self.workspace_resolver.resolve(specifier, referrer)
|
self.workspace_resolver.resolve(specifier, referrer)
|
||||||
{
|
{
|
||||||
|
@ -120,8 +299,40 @@ impl SpecifierUnfurler {
|
||||||
))
|
))
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
PackageJsonDepValue::Workspace(version_req) => {
|
PackageJsonDepValue::Workspace(workspace_version_req) => {
|
||||||
// todo(#24612): consider warning or error when this is also a jsr package?
|
let version_req = match workspace_version_req {
|
||||||
|
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
|
||||||
|
Cow::Borrowed(version_req)
|
||||||
|
}
|
||||||
|
PackageJsonDepWorkspaceReq::Caret => {
|
||||||
|
let version = self
|
||||||
|
.find_workspace_npm_dep_version(alias)
|
||||||
|
.map_err(|err| UnfurlSpecifierError::Workspace {
|
||||||
|
package_name: alias.to_string(),
|
||||||
|
reason: err.to_string(),
|
||||||
|
})?;
|
||||||
|
// version was validated, so ok to unwrap
|
||||||
|
Cow::Owned(
|
||||||
|
VersionReq::parse_from_npm(&format!("^{}", version))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
PackageJsonDepWorkspaceReq::Tilde => {
|
||||||
|
let version = self
|
||||||
|
.find_workspace_npm_dep_version(alias)
|
||||||
|
.map_err(|err| UnfurlSpecifierError::Workspace {
|
||||||
|
package_name: alias.to_string(),
|
||||||
|
reason: err.to_string(),
|
||||||
|
})?;
|
||||||
|
// version was validated, so ok to unwrap
|
||||||
|
Cow::Owned(
|
||||||
|
VersionReq::parse_from_npm(&format!("~{}", version))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// todo(#24612): warn when this is also a jsr package telling
|
||||||
|
// people to map the specifiers in the import map
|
||||||
ModuleSpecifier::parse(&format!(
|
ModuleSpecifier::parse(&format!(
|
||||||
"npm:{}@{}{}",
|
"npm:{}@{}{}",
|
||||||
alias,
|
alias,
|
||||||
|
@ -151,10 +362,14 @@ impl SpecifierUnfurler {
|
||||||
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
|
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
|
||||||
format!("node:{specifier}").parse().unwrap()
|
format!("node:{specifier}").parse().unwrap()
|
||||||
}
|
}
|
||||||
None => ModuleSpecifier::options()
|
None => match ModuleSpecifier::options()
|
||||||
.base_url(Some(referrer))
|
.base_url(Some(referrer))
|
||||||
.parse(specifier)
|
.parse(specifier)
|
||||||
.ok()?,
|
.ok()
|
||||||
|
{
|
||||||
|
Some(value) => value,
|
||||||
|
None => return Ok(None),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
// TODO(lucacasonato): this requires integration in deno_graph first
|
// TODO(lucacasonato): this requires integration in deno_graph first
|
||||||
// let resolved = if let Ok(specifier) =
|
// let resolved = if let Ok(specifier) =
|
||||||
|
@ -180,7 +395,7 @@ impl SpecifierUnfurler {
|
||||||
let resolved =
|
let resolved =
|
||||||
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
|
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
|
||||||
sloppy_imports_resolver
|
sloppy_imports_resolver
|
||||||
.resolve(&resolved, SloppyImportsResolutionMode::Execution)
|
.resolve(&resolved, SloppyImportsResolutionKind::Execution)
|
||||||
.map(|res| res.into_specifier())
|
.map(|res| res.into_specifier())
|
||||||
.unwrap_or(resolved)
|
.unwrap_or(resolved)
|
||||||
} else {
|
} else {
|
||||||
|
@ -188,7 +403,7 @@ impl SpecifierUnfurler {
|
||||||
};
|
};
|
||||||
let relative_resolved = relative_url(&resolved, referrer);
|
let relative_resolved = relative_url(&resolved, referrer);
|
||||||
if relative_resolved == specifier {
|
if relative_resolved == specifier {
|
||||||
None // nothing to unfurl
|
Ok(None) // nothing to unfurl
|
||||||
} else {
|
} else {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"Unfurled specifier: {} from {} -> {}",
|
"Unfurled specifier: {} from {} -> {}",
|
||||||
|
@ -196,7 +411,29 @@ impl SpecifierUnfurler {
|
||||||
referrer,
|
referrer,
|
||||||
relative_resolved
|
relative_resolved
|
||||||
);
|
);
|
||||||
Some(relative_resolved)
|
Ok(Some(relative_resolved))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_workspace_npm_dep_version(
|
||||||
|
&self,
|
||||||
|
pkg_name: &str,
|
||||||
|
) -> Result<Version, anyhow::Error> {
|
||||||
|
// todo(#24612): warn when this is also a jsr package telling
|
||||||
|
// people to map the specifiers in the import map
|
||||||
|
let pkg_json = self
|
||||||
|
.workspace_resolver
|
||||||
|
.package_jsons()
|
||||||
|
.find(|pkg| pkg.name.as_deref() == Some(pkg_name))
|
||||||
|
.ok_or_else(|| {
|
||||||
|
anyhow::anyhow!("unable to find npm package in workspace")
|
||||||
|
})?;
|
||||||
|
if let Some(version) = &pkg_json.version {
|
||||||
|
Ok(Version::parse_from_npm(version)?)
|
||||||
|
} else {
|
||||||
|
Err(anyhow::anyhow!(
|
||||||
|
"missing version in package.json of npm package",
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,6 +445,7 @@ impl SpecifierUnfurler {
|
||||||
text_info: &SourceTextInfo,
|
text_info: &SourceTextInfo,
|
||||||
dep: &deno_graph::DynamicDependencyDescriptor,
|
dep: &deno_graph::DynamicDependencyDescriptor,
|
||||||
text_changes: &mut Vec<deno_ast::TextChange>,
|
text_changes: &mut Vec<deno_ast::TextChange>,
|
||||||
|
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
|
||||||
) -> bool {
|
) -> bool {
|
||||||
match &dep.argument {
|
match &dep.argument {
|
||||||
deno_graph::DynamicArgument::String(specifier) => {
|
deno_graph::DynamicArgument::String(specifier) => {
|
||||||
|
@ -217,8 +455,14 @@ impl SpecifierUnfurler {
|
||||||
let Some(relative_index) = maybe_relative_index else {
|
let Some(relative_index) = maybe_relative_index else {
|
||||||
return true; // always say it's analyzable for a string
|
return true; // always say it's analyzable for a string
|
||||||
};
|
};
|
||||||
let unfurled = self.unfurl_specifier(module_url, specifier);
|
let maybe_unfurled = self.unfurl_specifier_reporting_diagnostic(
|
||||||
if let Some(unfurled) = unfurled {
|
module_url,
|
||||||
|
specifier,
|
||||||
|
text_info,
|
||||||
|
&dep.argument_range,
|
||||||
|
diagnostic_reporter,
|
||||||
|
);
|
||||||
|
if let Some(unfurled) = maybe_unfurled {
|
||||||
let start = range.start + relative_index;
|
let start = range.start + relative_index;
|
||||||
text_changes.push(deno_ast::TextChange {
|
text_changes.push(deno_ast::TextChange {
|
||||||
range: start..start + specifier.len(),
|
range: start..start + specifier.len(),
|
||||||
|
@ -238,7 +482,13 @@ impl SpecifierUnfurler {
|
||||||
if !specifier.ends_with('/') {
|
if !specifier.ends_with('/') {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
let unfurled = self.unfurl_specifier(module_url, specifier);
|
let unfurled = self.unfurl_specifier_reporting_diagnostic(
|
||||||
|
module_url,
|
||||||
|
specifier,
|
||||||
|
text_info,
|
||||||
|
&dep.argument_range,
|
||||||
|
diagnostic_reporter,
|
||||||
|
);
|
||||||
let Some(unfurled) = unfurled else {
|
let Some(unfurled) = unfurled else {
|
||||||
return true; // nothing to unfurl
|
return true; // nothing to unfurl
|
||||||
};
|
};
|
||||||
|
@ -280,8 +530,15 @@ impl SpecifierUnfurler {
|
||||||
let analyze_specifier =
|
let analyze_specifier =
|
||||||
|specifier: &str,
|
|specifier: &str,
|
||||||
range: &deno_graph::PositionRange,
|
range: &deno_graph::PositionRange,
|
||||||
text_changes: &mut Vec<deno_ast::TextChange>| {
|
text_changes: &mut Vec<deno_ast::TextChange>,
|
||||||
if let Some(unfurled) = self.unfurl_specifier(url, specifier) {
|
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic)| {
|
||||||
|
if let Some(unfurled) = self.unfurl_specifier_reporting_diagnostic(
|
||||||
|
url,
|
||||||
|
specifier,
|
||||||
|
text_info,
|
||||||
|
range,
|
||||||
|
diagnostic_reporter,
|
||||||
|
) {
|
||||||
text_changes.push(deno_ast::TextChange {
|
text_changes.push(deno_ast::TextChange {
|
||||||
range: to_range(text_info, range),
|
range: to_range(text_info, range),
|
||||||
new_text: unfurled,
|
new_text: unfurled,
|
||||||
|
@ -295,11 +552,17 @@ impl SpecifierUnfurler {
|
||||||
&dep.specifier,
|
&dep.specifier,
|
||||||
&dep.specifier_range,
|
&dep.specifier_range,
|
||||||
&mut text_changes,
|
&mut text_changes,
|
||||||
|
diagnostic_reporter,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
DependencyDescriptor::Dynamic(dep) => {
|
DependencyDescriptor::Dynamic(dep) => {
|
||||||
let success =
|
let success = self.try_unfurl_dynamic_dep(
|
||||||
self.try_unfurl_dynamic_dep(url, text_info, dep, &mut text_changes);
|
url,
|
||||||
|
text_info,
|
||||||
|
dep,
|
||||||
|
&mut text_changes,
|
||||||
|
diagnostic_reporter,
|
||||||
|
);
|
||||||
|
|
||||||
if !success {
|
if !success {
|
||||||
let start_pos = text_info.line_start(dep.argument_range.start.line)
|
let start_pos = text_info.line_start(dep.argument_range.start.line)
|
||||||
|
@ -319,20 +582,22 @@ impl SpecifierUnfurler {
|
||||||
}
|
}
|
||||||
for ts_ref in &module_info.ts_references {
|
for ts_ref in &module_info.ts_references {
|
||||||
let specifier_with_range = match ts_ref {
|
let specifier_with_range = match ts_ref {
|
||||||
TypeScriptReference::Path(range) => range,
|
TypeScriptReference::Path(s) => s,
|
||||||
TypeScriptReference::Types(range) => range,
|
TypeScriptReference::Types { specifier, .. } => specifier,
|
||||||
};
|
};
|
||||||
analyze_specifier(
|
analyze_specifier(
|
||||||
&specifier_with_range.text,
|
&specifier_with_range.text,
|
||||||
&specifier_with_range.range,
|
&specifier_with_range.range,
|
||||||
&mut text_changes,
|
&mut text_changes,
|
||||||
|
diagnostic_reporter,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
for specifier_with_range in &module_info.jsdoc_imports {
|
for jsdoc in &module_info.jsdoc_imports {
|
||||||
analyze_specifier(
|
analyze_specifier(
|
||||||
&specifier_with_range.text,
|
&jsdoc.specifier.text,
|
||||||
&specifier_with_range.range,
|
&jsdoc.specifier.range,
|
||||||
&mut text_changes,
|
&mut text_changes,
|
||||||
|
diagnostic_reporter,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if let Some(specifier_with_range) = &module_info.jsx_import_source {
|
if let Some(specifier_with_range) = &module_info.jsx_import_source {
|
||||||
|
@ -340,6 +605,7 @@ impl SpecifierUnfurler {
|
||||||
&specifier_with_range.text,
|
&specifier_with_range.text,
|
||||||
&specifier_with_range.range,
|
&specifier_with_range.range,
|
||||||
&mut text_changes,
|
&mut text_changes,
|
||||||
|
diagnostic_reporter,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -458,10 +724,10 @@ mod tests {
|
||||||
);
|
);
|
||||||
let fs = Arc::new(RealFs);
|
let fs = Arc::new(RealFs);
|
||||||
let unfurler = SpecifierUnfurler::new(
|
let unfurler = SpecifierUnfurler::new(
|
||||||
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
|
Some(Arc::new(CliSloppyImportsResolver::new(
|
||||||
fs,
|
SloppyImportsCachedFs::new(fs),
|
||||||
))),
|
))),
|
||||||
workspace_resolver,
|
Arc::new(workspace_resolver),
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -547,4 +813,114 @@ const warn2 = await import(`${expr}`);
|
||||||
assert_eq!(unfurled_source, expected_source);
|
assert_eq!(unfurled_source, expected_source);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unfurling_npm_dep_workspace_specifier() {
|
||||||
|
let cwd = testdata_path().join("unfurl").to_path_buf();
|
||||||
|
|
||||||
|
let pkg_json_add = PackageJson::load_from_value(
|
||||||
|
cwd.join("add/package.json"),
|
||||||
|
json!({ "name": "add", "version": "0.1.0", }),
|
||||||
|
);
|
||||||
|
let pkg_json_subtract = PackageJson::load_from_value(
|
||||||
|
cwd.join("subtract/package.json"),
|
||||||
|
json!({ "name": "subtract", "version": "0.2.0", }),
|
||||||
|
);
|
||||||
|
let pkg_json_publishing = PackageJson::load_from_value(
|
||||||
|
cwd.join("publish/package.json"),
|
||||||
|
json!({
|
||||||
|
"name": "@denotest/main",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": {
|
||||||
|
"add": "workspace:~",
|
||||||
|
"subtract": "workspace:^",
|
||||||
|
"non-existent": "workspace:~",
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
let root_pkg_json = PackageJson::load_from_value(
|
||||||
|
cwd.join("package.json"),
|
||||||
|
json!({ "workspaces": ["./publish", "./subtract", "./add"] }),
|
||||||
|
);
|
||||||
|
let workspace_resolver = WorkspaceResolver::new_raw(
|
||||||
|
Arc::new(ModuleSpecifier::from_directory_path(&cwd).unwrap()),
|
||||||
|
None,
|
||||||
|
vec![ResolverWorkspaceJsrPackage {
|
||||||
|
is_patch: false,
|
||||||
|
base: ModuleSpecifier::from_directory_path(
|
||||||
|
cwd.join("publish/jsr.json"),
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
name: "@denotest/main".to_string(),
|
||||||
|
version: Some(Version::parse_standard("1.0.0").unwrap()),
|
||||||
|
exports: IndexMap::from([(".".to_string(), "mod.ts".to_string())]),
|
||||||
|
}],
|
||||||
|
vec![
|
||||||
|
Arc::new(root_pkg_json),
|
||||||
|
Arc::new(pkg_json_add),
|
||||||
|
Arc::new(pkg_json_subtract),
|
||||||
|
Arc::new(pkg_json_publishing),
|
||||||
|
],
|
||||||
|
deno_config::workspace::PackageJsonDepResolution::Enabled,
|
||||||
|
);
|
||||||
|
let fs = Arc::new(RealFs);
|
||||||
|
let unfurler = SpecifierUnfurler::new(
|
||||||
|
Some(Arc::new(CliSloppyImportsResolver::new(
|
||||||
|
SloppyImportsCachedFs::new(fs),
|
||||||
|
))),
|
||||||
|
Arc::new(workspace_resolver),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
{
|
||||||
|
let source_code = r#"import add from "add";
|
||||||
|
import subtract from "subtract";
|
||||||
|
|
||||||
|
console.log(add, subtract);
|
||||||
|
"#;
|
||||||
|
let specifier =
|
||||||
|
ModuleSpecifier::from_file_path(cwd.join("publish").join("mod.ts"))
|
||||||
|
.unwrap();
|
||||||
|
let source = parse_ast(&specifier, source_code);
|
||||||
|
let mut d = Vec::new();
|
||||||
|
let mut reporter = |diagnostic| d.push(diagnostic);
|
||||||
|
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
|
||||||
|
assert_eq!(d.len(), 0);
|
||||||
|
// it will inline the version
|
||||||
|
let expected_source = r#"import add from "npm:add@~0.1.0";
|
||||||
|
import subtract from "npm:subtract@^0.2.0";
|
||||||
|
|
||||||
|
console.log(add, subtract);
|
||||||
|
"#;
|
||||||
|
assert_eq!(unfurled_source, expected_source);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let source_code = r#"import nonExistent from "non-existent";
|
||||||
|
console.log(nonExistent);
|
||||||
|
"#;
|
||||||
|
let specifier =
|
||||||
|
ModuleSpecifier::from_file_path(cwd.join("publish").join("other.ts"))
|
||||||
|
.unwrap();
|
||||||
|
let source = parse_ast(&specifier, source_code);
|
||||||
|
let mut d = Vec::new();
|
||||||
|
let mut reporter = |diagnostic| d.push(diagnostic);
|
||||||
|
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
|
||||||
|
assert_eq!(d.len(), 1);
|
||||||
|
match &d[0] {
|
||||||
|
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
|
||||||
|
package_name,
|
||||||
|
reason,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
assert_eq!(package_name, "non-existent");
|
||||||
|
assert_eq!(reason, "unable to find npm package in workspace");
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
// won't make any changes, but the above will be a fatal error
|
||||||
|
assert!(matches!(d[0].level(), DiagnosticLevel::Error));
|
||||||
|
assert_eq!(unfurled_source, source_code);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,8 @@ use crate::args::ReplFlags;
|
||||||
use crate::cdp;
|
use crate::cdp;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::FileFetcher;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
|
use crate::file_fetcher::TextDecodedFile;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::futures::StreamExt;
|
use deno_core::futures::StreamExt;
|
||||||
use deno_core::serde_json;
|
use deno_core::serde_json;
|
||||||
|
@ -143,7 +144,7 @@ async fn read_line_and_poll(
|
||||||
|
|
||||||
async fn read_eval_file(
|
async fn read_eval_file(
|
||||||
cli_options: &CliOptions,
|
cli_options: &CliOptions,
|
||||||
file_fetcher: &FileFetcher,
|
file_fetcher: &CliFileFetcher,
|
||||||
eval_file: &str,
|
eval_file: &str,
|
||||||
) -> Result<Arc<str>, AnyError> {
|
) -> Result<Arc<str>, AnyError> {
|
||||||
let specifier =
|
let specifier =
|
||||||
|
@ -151,7 +152,7 @@ async fn read_eval_file(
|
||||||
|
|
||||||
let file = file_fetcher.fetch_bypass_permissions(&specifier).await?;
|
let file = file_fetcher.fetch_bypass_permissions(&specifier).await?;
|
||||||
|
|
||||||
Ok(file.into_text_decoded()?.source)
|
Ok(TextDecodedFile::decode(file)?.source)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::print_stdout)]
|
#[allow(clippy::print_stdout)]
|
||||||
|
|
|
@ -43,13 +43,13 @@ use deno_core::unsync::spawn;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use deno_core::LocalInspectorSession;
|
use deno_core::LocalInspectorSession;
|
||||||
use deno_core::PollEventLoopOptions;
|
use deno_core::PollEventLoopOptions;
|
||||||
use deno_graph::source::ResolutionMode;
|
|
||||||
use deno_graph::Position;
|
use deno_graph::Position;
|
||||||
use deno_graph::PositionRange;
|
use deno_graph::PositionRange;
|
||||||
use deno_graph::SpecifierWithRange;
|
use deno_graph::SpecifierWithRange;
|
||||||
use deno_runtime::worker::MainWorker;
|
use deno_runtime::worker::MainWorker;
|
||||||
use deno_semver::npm::NpmPackageReqReference;
|
use deno_semver::npm::NpmPackageReqReference;
|
||||||
use node_resolver::NodeModuleKind;
|
use node_resolver::NodeResolutionKind;
|
||||||
|
use node_resolver::ResolutionMode;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Match;
|
use regex::Match;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
@ -701,11 +701,6 @@ impl ReplSession {
|
||||||
let mut collector = ImportCollector::new();
|
let mut collector = ImportCollector::new();
|
||||||
program.visit_with(&mut collector);
|
program.visit_with(&mut collector);
|
||||||
|
|
||||||
let referrer_range = deno_graph::Range {
|
|
||||||
specifier: self.referrer.clone(),
|
|
||||||
start: deno_graph::Position::zeroed(),
|
|
||||||
end: deno_graph::Position::zeroed(),
|
|
||||||
};
|
|
||||||
let resolved_imports = collector
|
let resolved_imports = collector
|
||||||
.imports
|
.imports
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -714,9 +709,10 @@ impl ReplSession {
|
||||||
.resolver
|
.resolver
|
||||||
.resolve(
|
.resolve(
|
||||||
i,
|
i,
|
||||||
&referrer_range,
|
&self.referrer,
|
||||||
NodeModuleKind::Esm,
|
deno_graph::Position::zeroed(),
|
||||||
ResolutionMode::Execution,
|
ResolutionMode::Import,
|
||||||
|
NodeResolutionKind::Execution,
|
||||||
)
|
)
|
||||||
.ok()
|
.ok()
|
||||||
.or_else(|| ModuleSpecifier::parse(i).ok())
|
.or_else(|| ModuleSpecifier::parse(i).ok())
|
||||||
|
@ -731,7 +727,9 @@ impl ReplSession {
|
||||||
let has_node_specifier =
|
let has_node_specifier =
|
||||||
resolved_imports.iter().any(|url| url.scheme() == "node");
|
resolved_imports.iter().any(|url| url.scheme() == "node");
|
||||||
if !npm_imports.is_empty() || has_node_specifier {
|
if !npm_imports.is_empty() || has_node_specifier {
|
||||||
npm_resolver.add_package_reqs(&npm_imports).await?;
|
npm_resolver
|
||||||
|
.add_and_cache_package_reqs(&npm_imports)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// prevent messages in the repl about @types/node not being cached
|
// prevent messages in the repl about @types/node not being cached
|
||||||
if has_node_specifier {
|
if has_node_specifier {
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use deno_cache_dir::file_fetcher::File;
|
||||||
use deno_config::deno_json::NodeModulesDirMode;
|
use deno_config::deno_json::NodeModulesDirMode;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_runtime::WorkerExecutionMode;
|
use deno_runtime::WorkerExecutionMode;
|
||||||
|
@ -11,7 +12,6 @@ use crate::args::EvalFlags;
|
||||||
use crate::args::Flags;
|
use crate::args::Flags;
|
||||||
use crate::args::WatchFlagsWithPaths;
|
use crate::args::WatchFlagsWithPaths;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::File;
|
|
||||||
use crate::util;
|
use crate::util;
|
||||||
use crate::util::file_watcher::WatcherRestartMode;
|
use crate::util::file_watcher::WatcherRestartMode;
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
||||||
// Save a fake file into file fetcher cache
|
// Save a fake file into file fetcher cache
|
||||||
// to allow module access by TS compiler
|
// to allow module access by TS compiler
|
||||||
file_fetcher.insert_memory_files(File {
|
file_fetcher.insert_memory_files(File {
|
||||||
specifier: main_module.clone(),
|
url: main_module.clone(),
|
||||||
maybe_headers: None,
|
maybe_headers: None,
|
||||||
source: source.into(),
|
source: source.into(),
|
||||||
});
|
});
|
||||||
|
@ -184,7 +184,7 @@ pub async fn eval_command(
|
||||||
// Save a fake file into file fetcher cache
|
// Save a fake file into file fetcher cache
|
||||||
// to allow module access by TS compiler.
|
// to allow module access by TS compiler.
|
||||||
file_fetcher.insert_memory_files(File {
|
file_fetcher.insert_memory_files(File {
|
||||||
specifier: main_module.clone(),
|
url: main_module.clone(),
|
||||||
maybe_headers: None,
|
maybe_headers: None,
|
||||||
source: source_code.into_bytes().into(),
|
source: source_code.into_bytes().into(),
|
||||||
});
|
});
|
||||||
|
@ -198,13 +198,23 @@ pub async fn eval_command(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> {
|
pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> {
|
||||||
|
let cli_options = factory.cli_options()?;
|
||||||
// ensure an "npm install" is done if the user has explicitly
|
// ensure an "npm install" is done if the user has explicitly
|
||||||
// opted into using a managed node_modules directory
|
// opted into using a managed node_modules directory
|
||||||
if factory.cli_options()?.node_modules_dir()?
|
if cli_options.node_modules_dir()? == Some(NodeModulesDirMode::Auto) {
|
||||||
== Some(NodeModulesDirMode::Auto)
|
|
||||||
{
|
|
||||||
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
|
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
|
||||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
let already_done =
|
||||||
|
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||||
|
if !already_done
|
||||||
|
&& matches!(
|
||||||
|
cli_options.default_npm_caching_strategy(),
|
||||||
|
crate::graph_util::NpmCachingStrategy::Eager
|
||||||
|
)
|
||||||
|
{
|
||||||
|
npm_resolver
|
||||||
|
.cache_packages(crate::npm::PackageCaching::All)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -8,6 +8,7 @@ use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use console_static_text::ansi::strip_ansi_codes;
|
||||||
use deno_config::workspace::FolderConfigs;
|
use deno_config::workspace::FolderConfigs;
|
||||||
use deno_config::workspace::TaskDefinition;
|
use deno_config::workspace::TaskDefinition;
|
||||||
use deno_config::workspace::TaskOrScript;
|
use deno_config::workspace::TaskOrScript;
|
||||||
|
@ -25,6 +26,7 @@ use deno_core::futures::StreamExt;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use deno_path_util::normalize_path;
|
use deno_path_util::normalize_path;
|
||||||
use deno_runtime::deno_node::NodeResolver;
|
use deno_runtime::deno_node::NodeResolver;
|
||||||
|
use deno_task_shell::KillSignal;
|
||||||
use deno_task_shell::ShellCommand;
|
use deno_task_shell::ShellCommand;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
@ -36,6 +38,7 @@ use crate::colors;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::npm::CliNpmResolver;
|
use crate::npm::CliNpmResolver;
|
||||||
use crate::task_runner;
|
use crate::task_runner;
|
||||||
|
use crate::task_runner::run_future_forwarding_signals;
|
||||||
use crate::util::fs::canonicalize_path;
|
use crate::util::fs::canonicalize_path;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -75,43 +78,29 @@ pub async fn execute_script(
|
||||||
let packages_task_configs: Vec<PackageTaskInfo> = if let Some(filter) =
|
let packages_task_configs: Vec<PackageTaskInfo> = if let Some(filter) =
|
||||||
&task_flags.filter
|
&task_flags.filter
|
||||||
{
|
{
|
||||||
let task_name = task_flags.task.as_ref().unwrap();
|
|
||||||
|
|
||||||
// Filter based on package name
|
// Filter based on package name
|
||||||
let package_regex = arg_to_regex(filter)?;
|
let package_regex = arg_to_regex(filter)?;
|
||||||
let task_regex = arg_to_regex(task_name)?;
|
let workspace = cli_options.workspace();
|
||||||
|
|
||||||
|
let Some(task_name) = &task_flags.task else {
|
||||||
|
print_available_tasks_workspace(
|
||||||
|
cli_options,
|
||||||
|
&package_regex,
|
||||||
|
filter,
|
||||||
|
force_use_pkg_json,
|
||||||
|
task_flags.recursive,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
return Ok(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
let task_name_filter = arg_to_task_name_filter(task_name)?;
|
||||||
let mut packages_task_info: Vec<PackageTaskInfo> = vec![];
|
let mut packages_task_info: Vec<PackageTaskInfo> = vec![];
|
||||||
|
|
||||||
fn matches_package(
|
|
||||||
config: &FolderConfigs,
|
|
||||||
force_use_pkg_json: bool,
|
|
||||||
regex: &Regex,
|
|
||||||
) -> bool {
|
|
||||||
if !force_use_pkg_json {
|
|
||||||
if let Some(deno_json) = &config.deno_json {
|
|
||||||
if let Some(name) = &deno_json.json.name {
|
|
||||||
if regex.is_match(name) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(package_json) = &config.pkg_json {
|
|
||||||
if let Some(name) = &package_json.name {
|
|
||||||
if regex.is_match(name) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
let workspace = cli_options.workspace();
|
|
||||||
for folder in workspace.config_folders() {
|
for folder in workspace.config_folders() {
|
||||||
if !matches_package(folder.1, force_use_pkg_json, &package_regex) {
|
if !task_flags.recursive
|
||||||
|
&& !matches_package(folder.1, force_use_pkg_json, &package_regex)
|
||||||
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,12 +137,20 @@ pub async fn execute_script(
|
||||||
|
|
||||||
// Match tasks in deno.json
|
// Match tasks in deno.json
|
||||||
for name in tasks_config.task_names() {
|
for name in tasks_config.task_names() {
|
||||||
if task_regex.is_match(name) && !visited.contains(name) {
|
let matches_filter = match &task_name_filter {
|
||||||
|
TaskNameFilter::Exact(n) => *n == name,
|
||||||
|
TaskNameFilter::Regex(re) => re.is_match(name),
|
||||||
|
};
|
||||||
|
if matches_filter && !visited.contains(name) {
|
||||||
matched.insert(name.to_string());
|
matched.insert(name.to_string());
|
||||||
visit_task(&tasks_config, &mut visited, name);
|
visit_task(&tasks_config, &mut visited, name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if matched.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
packages_task_info.push(PackageTaskInfo {
|
packages_task_info.push(PackageTaskInfo {
|
||||||
matched_tasks: matched
|
matched_tasks: matched
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -195,6 +192,7 @@ pub async fn execute_script(
|
||||||
&mut std::io::stdout(),
|
&mut std::io::stdout(),
|
||||||
&cli_options.start_dir,
|
&cli_options.start_dir,
|
||||||
&tasks_config,
|
&tasks_config,
|
||||||
|
None,
|
||||||
)?;
|
)?;
|
||||||
return Ok(0);
|
return Ok(0);
|
||||||
};
|
};
|
||||||
|
@ -225,28 +223,36 @@ pub async fn execute_script(
|
||||||
concurrency: no_of_concurrent_tasks.into(),
|
concurrency: no_of_concurrent_tasks.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
if task_flags.eval {
|
let kill_signal = KillSignal::default();
|
||||||
return task_runner
|
run_future_forwarding_signals(kill_signal.clone(), async {
|
||||||
.run_deno_task(
|
if task_flags.eval {
|
||||||
&Url::from_directory_path(cli_options.initial_cwd()).unwrap(),
|
return task_runner
|
||||||
"",
|
.run_deno_task(
|
||||||
&TaskDefinition {
|
&Url::from_directory_path(cli_options.initial_cwd()).unwrap(),
|
||||||
command: task_flags.task.as_ref().unwrap().to_string(),
|
"",
|
||||||
dependencies: vec![],
|
&TaskDefinition {
|
||||||
description: None,
|
command: Some(task_flags.task.as_ref().unwrap().to_string()),
|
||||||
},
|
dependencies: vec![],
|
||||||
)
|
description: None,
|
||||||
.await;
|
},
|
||||||
}
|
kill_signal,
|
||||||
|
cli_options.argv(),
|
||||||
for task_config in &packages_task_configs {
|
)
|
||||||
let exit_code = task_runner.run_tasks(task_config).await?;
|
.await;
|
||||||
if exit_code > 0 {
|
|
||||||
return Ok(exit_code);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(0)
|
for task_config in &packages_task_configs {
|
||||||
|
let exit_code = task_runner
|
||||||
|
.run_tasks(task_config, &kill_signal, cli_options.argv())
|
||||||
|
.await?;
|
||||||
|
if exit_code > 0 {
|
||||||
|
return Ok(exit_code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(0)
|
||||||
|
})
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RunSingleOptions<'a> {
|
struct RunSingleOptions<'a> {
|
||||||
|
@ -254,6 +260,8 @@ struct RunSingleOptions<'a> {
|
||||||
script: &'a str,
|
script: &'a str,
|
||||||
cwd: &'a Path,
|
cwd: &'a Path,
|
||||||
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||||
|
kill_signal: KillSignal,
|
||||||
|
argv: &'a [String],
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TaskRunner<'a> {
|
struct TaskRunner<'a> {
|
||||||
|
@ -269,9 +277,11 @@ impl<'a> TaskRunner<'a> {
|
||||||
pub async fn run_tasks(
|
pub async fn run_tasks(
|
||||||
&self,
|
&self,
|
||||||
pkg_tasks_config: &PackageTaskInfo,
|
pkg_tasks_config: &PackageTaskInfo,
|
||||||
|
kill_signal: &KillSignal,
|
||||||
|
argv: &[String],
|
||||||
) -> Result<i32, deno_core::anyhow::Error> {
|
) -> Result<i32, deno_core::anyhow::Error> {
|
||||||
match sort_tasks_topo(pkg_tasks_config) {
|
match sort_tasks_topo(pkg_tasks_config) {
|
||||||
Ok(sorted) => self.run_tasks_in_parallel(sorted).await,
|
Ok(sorted) => self.run_tasks_in_parallel(sorted, kill_signal, argv).await,
|
||||||
Err(err) => match err {
|
Err(err) => match err {
|
||||||
TaskError::NotFound(name) => {
|
TaskError::NotFound(name) => {
|
||||||
if self.task_flags.is_run {
|
if self.task_flags.is_run {
|
||||||
|
@ -300,12 +310,15 @@ impl<'a> TaskRunner<'a> {
|
||||||
&mut std::io::stderr(),
|
&mut std::io::stderr(),
|
||||||
&self.cli_options.start_dir,
|
&self.cli_options.start_dir,
|
||||||
tasks_config,
|
tasks_config,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run_tasks_in_parallel(
|
async fn run_tasks_in_parallel(
|
||||||
&self,
|
&self,
|
||||||
tasks: Vec<ResolvedTask<'a>>,
|
tasks: Vec<ResolvedTask<'a>>,
|
||||||
|
kill_signal: &KillSignal,
|
||||||
|
args: &[String],
|
||||||
) -> Result<i32, deno_core::anyhow::Error> {
|
) -> Result<i32, deno_core::anyhow::Error> {
|
||||||
struct PendingTasksContext<'a> {
|
struct PendingTasksContext<'a> {
|
||||||
completed: HashSet<usize>,
|
completed: HashSet<usize>,
|
||||||
|
@ -326,13 +339,22 @@ impl<'a> TaskRunner<'a> {
|
||||||
fn get_next_task<'b>(
|
fn get_next_task<'b>(
|
||||||
&mut self,
|
&mut self,
|
||||||
runner: &'b TaskRunner<'b>,
|
runner: &'b TaskRunner<'b>,
|
||||||
|
kill_signal: &KillSignal,
|
||||||
|
argv: &'a [String],
|
||||||
) -> Option<
|
) -> Option<
|
||||||
LocalBoxFuture<'b, Result<(i32, &'a ResolvedTask<'a>), AnyError>>,
|
LocalBoxFuture<'b, Result<(i32, &'a ResolvedTask<'a>), AnyError>>,
|
||||||
>
|
>
|
||||||
where
|
where
|
||||||
'a: 'b,
|
'a: 'b,
|
||||||
{
|
{
|
||||||
for task in self.tasks.iter() {
|
let mut tasks_iter = self.tasks.iter().peekable();
|
||||||
|
while let Some(task) = tasks_iter.next() {
|
||||||
|
let args = if tasks_iter.peek().is_none() {
|
||||||
|
argv
|
||||||
|
} else {
|
||||||
|
&[]
|
||||||
|
};
|
||||||
|
|
||||||
if self.completed.contains(&task.id)
|
if self.completed.contains(&task.id)
|
||||||
|| self.running.contains(&task.id)
|
|| self.running.contains(&task.id)
|
||||||
{
|
{
|
||||||
|
@ -348,15 +370,30 @@ impl<'a> TaskRunner<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.running.insert(task.id);
|
self.running.insert(task.id);
|
||||||
|
let kill_signal = kill_signal.clone();
|
||||||
return Some(
|
return Some(
|
||||||
async move {
|
async move {
|
||||||
match task.task_or_script {
|
match task.task_or_script {
|
||||||
TaskOrScript::Task(_, def) => {
|
TaskOrScript::Task(_, def) => {
|
||||||
runner.run_deno_task(task.folder_url, task.name, def).await
|
runner
|
||||||
|
.run_deno_task(
|
||||||
|
task.folder_url,
|
||||||
|
task.name,
|
||||||
|
def,
|
||||||
|
kill_signal,
|
||||||
|
args,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
TaskOrScript::Script(scripts, _) => {
|
TaskOrScript::Script(scripts, _) => {
|
||||||
runner
|
runner
|
||||||
.run_npm_script(task.folder_url, task.name, scripts)
|
.run_npm_script(
|
||||||
|
task.folder_url,
|
||||||
|
task.name,
|
||||||
|
scripts,
|
||||||
|
kill_signal,
|
||||||
|
args,
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -379,7 +416,7 @@ impl<'a> TaskRunner<'a> {
|
||||||
|
|
||||||
while context.has_remaining_tasks() {
|
while context.has_remaining_tasks() {
|
||||||
while queue.len() < self.concurrency {
|
while queue.len() < self.concurrency {
|
||||||
if let Some(task) = context.get_next_task(self) {
|
if let Some(task) = context.get_next_task(self, kill_signal, args) {
|
||||||
queue.push(task);
|
queue.push(task);
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
|
@ -408,7 +445,26 @@ impl<'a> TaskRunner<'a> {
|
||||||
dir_url: &Url,
|
dir_url: &Url,
|
||||||
task_name: &str,
|
task_name: &str,
|
||||||
definition: &TaskDefinition,
|
definition: &TaskDefinition,
|
||||||
|
kill_signal: KillSignal,
|
||||||
|
argv: &'a [String],
|
||||||
) -> Result<i32, deno_core::anyhow::Error> {
|
) -> Result<i32, deno_core::anyhow::Error> {
|
||||||
|
let Some(command) = &definition.command else {
|
||||||
|
log::info!(
|
||||||
|
"{} {} {}",
|
||||||
|
colors::green("Task"),
|
||||||
|
colors::cyan(task_name),
|
||||||
|
colors::gray("(no command)")
|
||||||
|
);
|
||||||
|
return Ok(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||||
|
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||||
|
npm_resolver
|
||||||
|
.cache_packages(crate::npm::PackageCaching::All)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
let cwd = match &self.task_flags.cwd {
|
let cwd = match &self.task_flags.cwd {
|
||||||
Some(path) => canonicalize_path(&PathBuf::from(path))
|
Some(path) => canonicalize_path(&PathBuf::from(path))
|
||||||
.context("failed canonicalizing --cwd")?,
|
.context("failed canonicalizing --cwd")?,
|
||||||
|
@ -419,12 +475,15 @@ impl<'a> TaskRunner<'a> {
|
||||||
self.npm_resolver,
|
self.npm_resolver,
|
||||||
self.node_resolver,
|
self.node_resolver,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
self
|
self
|
||||||
.run_single(RunSingleOptions {
|
.run_single(RunSingleOptions {
|
||||||
task_name,
|
task_name,
|
||||||
script: &definition.command,
|
script: command,
|
||||||
cwd: &cwd,
|
cwd: &cwd,
|
||||||
custom_commands,
|
custom_commands,
|
||||||
|
kill_signal,
|
||||||
|
argv,
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -434,10 +493,15 @@ impl<'a> TaskRunner<'a> {
|
||||||
dir_url: &Url,
|
dir_url: &Url,
|
||||||
task_name: &str,
|
task_name: &str,
|
||||||
scripts: &IndexMap<String, String>,
|
scripts: &IndexMap<String, String>,
|
||||||
|
kill_signal: KillSignal,
|
||||||
|
argv: &[String],
|
||||||
) -> Result<i32, deno_core::anyhow::Error> {
|
) -> Result<i32, deno_core::anyhow::Error> {
|
||||||
// ensure the npm packages are installed if using a managed resolver
|
// ensure the npm packages are installed if using a managed resolver
|
||||||
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||||
|
npm_resolver
|
||||||
|
.cache_packages(crate::npm::PackageCaching::All)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let cwd = match &self.task_flags.cwd {
|
let cwd = match &self.task_flags.cwd {
|
||||||
|
@ -457,6 +521,7 @@ impl<'a> TaskRunner<'a> {
|
||||||
self.npm_resolver,
|
self.npm_resolver,
|
||||||
self.node_resolver,
|
self.node_resolver,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
for task_name in &task_names {
|
for task_name in &task_names {
|
||||||
if let Some(script) = scripts.get(task_name) {
|
if let Some(script) = scripts.get(task_name) {
|
||||||
let exit_code = self
|
let exit_code = self
|
||||||
|
@ -465,6 +530,8 @@ impl<'a> TaskRunner<'a> {
|
||||||
script,
|
script,
|
||||||
cwd: &cwd,
|
cwd: &cwd,
|
||||||
custom_commands: custom_commands.clone(),
|
custom_commands: custom_commands.clone(),
|
||||||
|
kill_signal: kill_signal.clone(),
|
||||||
|
argv,
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
if exit_code > 0 {
|
if exit_code > 0 {
|
||||||
|
@ -485,11 +552,13 @@ impl<'a> TaskRunner<'a> {
|
||||||
script,
|
script,
|
||||||
cwd,
|
cwd,
|
||||||
custom_commands,
|
custom_commands,
|
||||||
|
kill_signal,
|
||||||
|
argv,
|
||||||
} = opts;
|
} = opts;
|
||||||
|
|
||||||
output_task(
|
output_task(
|
||||||
opts.task_name,
|
opts.task_name,
|
||||||
&task_runner::get_script_with_args(script, self.cli_options.argv()),
|
&task_runner::get_script_with_args(script, argv),
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(
|
Ok(
|
||||||
|
@ -500,9 +569,10 @@ impl<'a> TaskRunner<'a> {
|
||||||
env_vars: self.env_vars.clone(),
|
env_vars: self.env_vars.clone(),
|
||||||
custom_commands,
|
custom_commands,
|
||||||
init_cwd: self.cli_options.initial_cwd(),
|
init_cwd: self.cli_options.initial_cwd(),
|
||||||
argv: self.cli_options.argv(),
|
argv,
|
||||||
root_node_modules_dir: self.npm_resolver.root_node_modules_path(),
|
root_node_modules_dir: self.npm_resolver.root_node_modules_path(),
|
||||||
stdio: None,
|
stdio: None,
|
||||||
|
kill_signal,
|
||||||
})
|
})
|
||||||
.await?
|
.await?
|
||||||
.exit_code,
|
.exit_code,
|
||||||
|
@ -623,6 +693,32 @@ fn sort_tasks_topo<'a>(
|
||||||
Ok(sorted)
|
Ok(sorted)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn matches_package(
|
||||||
|
config: &FolderConfigs,
|
||||||
|
force_use_pkg_json: bool,
|
||||||
|
regex: &Regex,
|
||||||
|
) -> bool {
|
||||||
|
if !force_use_pkg_json {
|
||||||
|
if let Some(deno_json) = &config.deno_json {
|
||||||
|
if let Some(name) = &deno_json.json.name {
|
||||||
|
if regex.is_match(name) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(package_json) = &config.pkg_json {
|
||||||
|
if let Some(name) = &package_json.name {
|
||||||
|
if regex.is_match(name) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
fn output_task(task_name: &str, script: &str) {
|
fn output_task(task_name: &str, script: &str) {
|
||||||
log::info!(
|
log::info!(
|
||||||
"{} {} {}",
|
"{} {} {}",
|
||||||
|
@ -632,12 +728,70 @@ fn output_task(task_name: &str, script: &str) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_available_tasks_workspace(
|
||||||
|
cli_options: &Arc<CliOptions>,
|
||||||
|
package_regex: &Regex,
|
||||||
|
filter: &str,
|
||||||
|
force_use_pkg_json: bool,
|
||||||
|
recursive: bool,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let workspace = cli_options.workspace();
|
||||||
|
|
||||||
|
let mut matched = false;
|
||||||
|
for folder in workspace.config_folders() {
|
||||||
|
if !recursive
|
||||||
|
&& !matches_package(folder.1, force_use_pkg_json, package_regex)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
matched = true;
|
||||||
|
|
||||||
|
let member_dir = workspace.resolve_member_dir(folder.0);
|
||||||
|
let mut tasks_config = member_dir.to_tasks_config()?;
|
||||||
|
|
||||||
|
let mut pkg_name = folder
|
||||||
|
.1
|
||||||
|
.deno_json
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|deno| deno.json.name.clone())
|
||||||
|
.or(folder.1.pkg_json.as_ref().and_then(|pkg| pkg.name.clone()));
|
||||||
|
|
||||||
|
if force_use_pkg_json {
|
||||||
|
tasks_config = tasks_config.with_only_pkg_json();
|
||||||
|
pkg_name = folder.1.pkg_json.as_ref().and_then(|pkg| pkg.name.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
print_available_tasks(
|
||||||
|
&mut std::io::stdout(),
|
||||||
|
&cli_options.start_dir,
|
||||||
|
&tasks_config,
|
||||||
|
pkg_name,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !matched {
|
||||||
|
log::warn!(
|
||||||
|
"{}",
|
||||||
|
colors::red(format!("No package name matched the filter '{}' in available 'deno.json' or 'package.json' files.", filter))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn print_available_tasks(
|
fn print_available_tasks(
|
||||||
writer: &mut dyn std::io::Write,
|
writer: &mut dyn std::io::Write,
|
||||||
workspace_dir: &Arc<WorkspaceDirectory>,
|
workspace_dir: &Arc<WorkspaceDirectory>,
|
||||||
tasks_config: &WorkspaceTasksConfig,
|
tasks_config: &WorkspaceTasksConfig,
|
||||||
|
pkg_name: Option<String>,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
writeln!(writer, "{}", colors::green("Available tasks:"))?;
|
let heading = if let Some(s) = pkg_name {
|
||||||
|
format!("Available tasks ({}):", colors::cyan(s))
|
||||||
|
} else {
|
||||||
|
"Available tasks:".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
writeln!(writer, "{}", colors::green(heading))?;
|
||||||
let is_cwd_root_dir = tasks_config.root.is_none();
|
let is_cwd_root_dir = tasks_config.root.is_none();
|
||||||
|
|
||||||
if tasks_config.is_empty() {
|
if tasks_config.is_empty() {
|
||||||
|
@ -693,7 +847,7 @@ fn print_available_tasks(
|
||||||
is_deno: false,
|
is_deno: false,
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
task: deno_config::deno_json::TaskDefinition {
|
task: deno_config::deno_json::TaskDefinition {
|
||||||
command: script.to_string(),
|
command: Some(script.to_string()),
|
||||||
dependencies: vec![],
|
dependencies: vec![],
|
||||||
description: None,
|
description: None,
|
||||||
},
|
},
|
||||||
|
@ -721,22 +875,88 @@ fn print_available_tasks(
|
||||||
)?;
|
)?;
|
||||||
if let Some(description) = &desc.task.description {
|
if let Some(description) = &desc.task.description {
|
||||||
let slash_slash = colors::italic_gray("//");
|
let slash_slash = colors::italic_gray("//");
|
||||||
|
for line in description.lines() {
|
||||||
|
writeln!(
|
||||||
|
writer,
|
||||||
|
" {slash_slash} {}",
|
||||||
|
colors::italic_gray(strip_ansi_codes_and_escape_control_chars(line))
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(command) = &desc.task.command {
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
" {slash_slash} {}",
|
" {}",
|
||||||
colors::italic_gray(description)
|
strip_ansi_codes_and_escape_control_chars(command)
|
||||||
)?;
|
)?;
|
||||||
}
|
};
|
||||||
writeln!(writer, " {}", desc.task.command)?;
|
|
||||||
if !desc.task.dependencies.is_empty() {
|
if !desc.task.dependencies.is_empty() {
|
||||||
|
let dependencies = desc
|
||||||
|
.task
|
||||||
|
.dependencies
|
||||||
|
.into_iter()
|
||||||
|
.map(|d| strip_ansi_codes_and_escape_control_chars(&d))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
" {} {}",
|
" {} {}",
|
||||||
colors::gray("depends on:"),
|
colors::gray("depends on:"),
|
||||||
colors::cyan(desc.task.dependencies.join(", "))
|
colors::cyan(dependencies)
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn strip_ansi_codes_and_escape_control_chars(s: &str) -> String {
|
||||||
|
strip_ansi_codes(s)
|
||||||
|
.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'\n' => "\\n".to_string(),
|
||||||
|
'\r' => "\\r".to_string(),
|
||||||
|
'\t' => "\\t".to_string(),
|
||||||
|
c if c.is_control() => format!("\\x{:02x}", c as u8),
|
||||||
|
c => c.to_string(),
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn arg_to_task_name_filter(input: &str) -> Result<TaskNameFilter, AnyError> {
|
||||||
|
if !input.contains("*") {
|
||||||
|
return Ok(TaskNameFilter::Exact(input));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut regex_str = regex::escape(input);
|
||||||
|
regex_str = regex_str.replace("\\*", ".*");
|
||||||
|
let re = Regex::new(®ex_str)?;
|
||||||
|
Ok(TaskNameFilter::Regex(re))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum TaskNameFilter<'s> {
|
||||||
|
Exact(&'s str),
|
||||||
|
Regex(regex::Regex),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_arg_to_task_name_filter() {
|
||||||
|
assert!(matches!(
|
||||||
|
arg_to_task_name_filter("test").unwrap(),
|
||||||
|
TaskNameFilter::Exact("test")
|
||||||
|
));
|
||||||
|
assert!(matches!(
|
||||||
|
arg_to_task_name_filter("test-").unwrap(),
|
||||||
|
TaskNameFilter::Exact("test-")
|
||||||
|
));
|
||||||
|
assert!(matches!(
|
||||||
|
arg_to_task_name_filter("test*").unwrap(),
|
||||||
|
TaskNameFilter::Regex(_)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -7,8 +7,7 @@ use crate::args::TestReporterConfig;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
use crate::display;
|
use crate::display;
|
||||||
use crate::factory::CliFactory;
|
use crate::factory::CliFactory;
|
||||||
use crate::file_fetcher::File;
|
use crate::file_fetcher::CliFileFetcher;
|
||||||
use crate::file_fetcher::FileFetcher;
|
|
||||||
use crate::graph_util::has_graph_root_local_dependent_changed;
|
use crate::graph_util::has_graph_root_local_dependent_changed;
|
||||||
use crate::ops;
|
use crate::ops;
|
||||||
use crate::util::extract::extract_doc_tests;
|
use crate::util::extract::extract_doc_tests;
|
||||||
|
@ -21,6 +20,7 @@ use crate::worker::CliMainWorkerFactory;
|
||||||
use crate::worker::CoverageCollector;
|
use crate::worker::CoverageCollector;
|
||||||
|
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
use deno_cache_dir::file_fetcher::File;
|
||||||
use deno_config::glob::FilePatterns;
|
use deno_config::glob::FilePatterns;
|
||||||
use deno_config::glob::WalkEntry;
|
use deno_config::glob::WalkEntry;
|
||||||
use deno_core::anyhow;
|
use deno_core::anyhow;
|
||||||
|
@ -616,7 +616,10 @@ async fn configure_main_worker(
|
||||||
WorkerExecutionMode::Test,
|
WorkerExecutionMode::Test,
|
||||||
specifier.clone(),
|
specifier.clone(),
|
||||||
permissions_container,
|
permissions_container,
|
||||||
vec![ops::testing::deno_test::init_ops(worker_sender.sender)],
|
vec![
|
||||||
|
ops::testing::deno_test::init_ops(worker_sender.sender),
|
||||||
|
ops::lint::deno_lint::init_ops(),
|
||||||
|
],
|
||||||
Stdio {
|
Stdio {
|
||||||
stdin: StdioPipe::inherit(),
|
stdin: StdioPipe::inherit(),
|
||||||
stdout: StdioPipe::file(worker_sender.stdout),
|
stdout: StdioPipe::file(worker_sender.stdout),
|
||||||
|
@ -1514,7 +1517,7 @@ fn collect_specifiers_with_test_mode(
|
||||||
/// as well.
|
/// as well.
|
||||||
async fn fetch_specifiers_with_test_mode(
|
async fn fetch_specifiers_with_test_mode(
|
||||||
cli_options: &CliOptions,
|
cli_options: &CliOptions,
|
||||||
file_fetcher: &FileFetcher,
|
file_fetcher: &CliFileFetcher,
|
||||||
member_patterns: impl Iterator<Item = FilePatterns>,
|
member_patterns: impl Iterator<Item = FilePatterns>,
|
||||||
doc: &bool,
|
doc: &bool,
|
||||||
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
|
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
|
||||||
|
@ -1716,7 +1719,11 @@ pub async fn run_tests_with_watch(
|
||||||
&cli_options.permissions_options(),
|
&cli_options.permissions_options(),
|
||||||
)?;
|
)?;
|
||||||
let graph = module_graph_creator
|
let graph = module_graph_creator
|
||||||
.create_graph(graph_kind, test_modules)
|
.create_graph(
|
||||||
|
graph_kind,
|
||||||
|
test_modules,
|
||||||
|
crate::graph_util::NpmCachingStrategy::Eager,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
module_graph_creator.graph_valid(&graph)?;
|
module_graph_creator.graph_valid(&graph)?;
|
||||||
let test_modules = &graph.roots;
|
let test_modules = &graph.roots;
|
||||||
|
@ -1818,7 +1825,7 @@ pub async fn run_tests_with_watch(
|
||||||
/// Extracts doc tests from files specified by the given specifiers.
|
/// Extracts doc tests from files specified by the given specifiers.
|
||||||
async fn get_doc_tests(
|
async fn get_doc_tests(
|
||||||
specifiers_with_mode: &[(Url, TestMode)],
|
specifiers_with_mode: &[(Url, TestMode)],
|
||||||
file_fetcher: &FileFetcher,
|
file_fetcher: &CliFileFetcher,
|
||||||
) -> Result<Vec<File>, AnyError> {
|
) -> Result<Vec<File>, AnyError> {
|
||||||
let specifiers_needing_extraction = specifiers_with_mode
|
let specifiers_needing_extraction = specifiers_with_mode
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -1843,7 +1850,7 @@ fn get_target_specifiers(
|
||||||
specifiers_with_mode
|
specifiers_with_mode
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(s, mode)| mode.needs_test_run().then_some(s))
|
.filter_map(|(s, mode)| mode.needs_test_run().then_some(s))
|
||||||
.chain(doc_tests.iter().map(|d| d.specifier.clone()))
|
.chain(doc_tests.iter().map(|d| d.url.clone()))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::unsync::spawn;
|
use deno_core::unsync::spawn;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
|
use deno_semver::SmallStackString;
|
||||||
use deno_semver::Version;
|
use deno_semver::Version;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
@ -255,7 +256,7 @@ async fn print_release_notes(
|
||||||
let is_deno_2_rc = new_semver.major == 2
|
let is_deno_2_rc = new_semver.major == 2
|
||||||
&& new_semver.minor == 0
|
&& new_semver.minor == 0
|
||||||
&& new_semver.patch == 0
|
&& new_semver.patch == 0
|
||||||
&& new_semver.pre.first() == Some(&"rc".to_string());
|
&& new_semver.pre.first().map(|s| s.as_str()) == Some("rc");
|
||||||
|
|
||||||
if is_deno_2_rc || is_switching_from_deno1_to_deno2 {
|
if is_deno_2_rc || is_switching_from_deno1_to_deno2 {
|
||||||
log::info!(
|
log::info!(
|
||||||
|
@ -674,7 +675,7 @@ impl RequestedVersion {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
if semver.pre.contains(&"rc".to_string()) {
|
if semver.pre.contains(&SmallStackString::from_static("rc")) {
|
||||||
(ReleaseChannel::Rc, passed_version)
|
(ReleaseChannel::Rc, passed_version)
|
||||||
} else {
|
} else {
|
||||||
(ReleaseChannel::Stable, passed_version)
|
(ReleaseChannel::Stable, passed_version)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue