mirror of
https://github.com/denoland/deno.git
synced 2025-02-07 23:06:50 -05:00
Merge remote-tracking branch 'upstream/main' into feat/ffi-feature-flag
This commit is contained in:
commit
e525058e3d
2087 changed files with 90495 additions and 28200 deletions
|
@ -13,7 +13,7 @@
|
|||
},
|
||||
"exec": {
|
||||
"commands": [{
|
||||
"command": "rustfmt --config imports_granularity=item",
|
||||
"command": "rustfmt --config imports_granularity=item --config group_imports=StdExternalCrate",
|
||||
"exts": ["rs"]
|
||||
}]
|
||||
},
|
||||
|
|
2
.github/mtime_cache/action.js
vendored
2
.github/mtime_cache/action.js
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// This file contains the implementation of a Github Action. Github uses
|
||||
// Node.js v20.x to run actions, so this is Node code and not Deno code.
|
||||
|
||||
|
|
2
.github/workflows/cargo_publish.yml
vendored
2
.github/workflows/cargo_publish.yml
vendored
|
@ -35,7 +35,7 @@ jobs:
|
|||
- name: Install deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
deno-version: v2.x
|
||||
|
||||
- name: Publish
|
||||
env:
|
||||
|
|
89
.github/workflows/ci.generate.ts
vendored
89
.github/workflows/ci.generate.ts
vendored
|
@ -1,11 +1,11 @@
|
|||
#!/usr/bin/env -S deno run --allow-write=. --lock=./tools/deno.lock.json
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
||||
|
||||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 28;
|
||||
const cacheVersion = 37;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-24.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||
|
@ -14,7 +14,7 @@ const windowsX86Runner = "windows-2022";
|
|||
const windowsX86XlRunner = "windows-2022-xl";
|
||||
const macosX86Runner = "macos-13";
|
||||
const macosArmRunner = "macos-14";
|
||||
const selfHostedMacosArmRunner = "self-hosted";
|
||||
const selfHostedMacosArmRunner = "ghcr.io/cirruslabs/macos-runner:sonoma";
|
||||
|
||||
const Runners = {
|
||||
linuxX86: {
|
||||
|
@ -41,8 +41,14 @@ const Runners = {
|
|||
macosArm: {
|
||||
os: "macos",
|
||||
arch: "aarch64",
|
||||
runner: macosArmRunner,
|
||||
},
|
||||
macosArmSelfHosted: {
|
||||
os: "macos",
|
||||
arch: "aarch64",
|
||||
// Actually use self-hosted runner only in denoland/deno on `main` branch and for tags (release) builds.
|
||||
runner:
|
||||
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
|
||||
`\${{ github.repository == 'denoland/deno' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
|
||||
},
|
||||
windowsX86: {
|
||||
os: "windows",
|
||||
|
@ -59,6 +65,15 @@ const Runners = {
|
|||
|
||||
const prCacheKeyPrefix =
|
||||
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
|
||||
const prCacheKey = `${prCacheKeyPrefix}\${{ github.sha }}`;
|
||||
const prCachePath = [
|
||||
// this must match for save and restore (https://github.com/actions/cache/issues/1444)
|
||||
"./target",
|
||||
"!./target/*/gn_out",
|
||||
"!./target/*/gn_root",
|
||||
"!./target/*/*.zip",
|
||||
"!./target/*/*.tar.gz",
|
||||
].join("\n");
|
||||
|
||||
// Note that you may need to add more version to the `apt-get remove` line below if you change this
|
||||
const llvmVersion = 19;
|
||||
|
@ -115,9 +130,7 @@ cat /sysroot/.env
|
|||
# to build because the object formats are not compatible.
|
||||
echo "
|
||||
CARGO_PROFILE_BENCH_INCREMENTAL=false
|
||||
CARGO_PROFILE_BENCH_LTO=false
|
||||
CARGO_PROFILE_RELEASE_INCREMENTAL=false
|
||||
CARGO_PROFILE_RELEASE_LTO=false
|
||||
RUSTFLAGS<<__1
|
||||
-C linker-plugin-lto=true
|
||||
-C linker=clang-${llvmVersion}
|
||||
|
@ -141,7 +154,7 @@ RUSTDOCFLAGS<<__1
|
|||
$RUSTFLAGS
|
||||
__1
|
||||
CC=/usr/bin/clang-${llvmVersion}
|
||||
CFLAGS=-flto=thin $CFLAGS
|
||||
CFLAGS=$CFLAGS
|
||||
" > $GITHUB_ENV`,
|
||||
};
|
||||
|
||||
|
@ -196,7 +209,7 @@ const installNodeStep = {
|
|||
const installDenoStep = {
|
||||
name: "Install Deno",
|
||||
uses: "denoland/setup-deno@v2",
|
||||
with: { "deno-version": "v1.x" },
|
||||
with: { "deno-version": "v2.x" },
|
||||
};
|
||||
|
||||
const authenticateWithGoogleCloud = {
|
||||
|
@ -351,7 +364,7 @@ const ci = {
|
|||
needs: ["pre_build"],
|
||||
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
|
||||
"runs-on": "${{ matrix.runner }}",
|
||||
"timeout-minutes": 180,
|
||||
"timeout-minutes": 240,
|
||||
defaults: {
|
||||
run: {
|
||||
// GH actions does not fail fast by default on
|
||||
|
@ -375,7 +388,7 @@ const ci = {
|
|||
job: "test",
|
||||
profile: "debug",
|
||||
}, {
|
||||
...Runners.macosArm,
|
||||
...Runners.macosArmSelfHosted,
|
||||
job: "test",
|
||||
profile: "release",
|
||||
skip_pr: true,
|
||||
|
@ -475,6 +488,27 @@ const ci = {
|
|||
" -czvf target/release/deno_src.tar.gz -C .. deno",
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
name: "Cache Cargo home",
|
||||
uses: "cirruslabs/cache@v4",
|
||||
with: {
|
||||
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
|
||||
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
|
||||
path: [
|
||||
"~/.cargo/.crates.toml",
|
||||
"~/.cargo/.crates2.json",
|
||||
"~/.cargo/bin",
|
||||
"~/.cargo/registry/index",
|
||||
"~/.cargo/registry/cache",
|
||||
"~/.cargo/git/db",
|
||||
].join("\n"),
|
||||
key:
|
||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
|
||||
// We will try to restore from the closest cargo-home we can find
|
||||
"restore-keys":
|
||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-`,
|
||||
},
|
||||
},
|
||||
installRustStep,
|
||||
{
|
||||
if:
|
||||
|
@ -598,23 +632,6 @@ const ci = {
|
|||
installBenchTools,
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
name: "Cache Cargo home",
|
||||
uses: "actions/cache@v4",
|
||||
with: {
|
||||
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
|
||||
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
|
||||
path: [
|
||||
"~/.cargo/registry/index",
|
||||
"~/.cargo/registry/cache",
|
||||
].join("\n"),
|
||||
key:
|
||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
|
||||
// We will try to restore from the closest cargo-home we can find
|
||||
"restore-keys":
|
||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Restore cache from the latest 'main' branch build.
|
||||
name: "Restore cache build output (PR)",
|
||||
|
@ -622,13 +639,7 @@ const ci = {
|
|||
if:
|
||||
"github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')",
|
||||
with: {
|
||||
path: [
|
||||
"./target",
|
||||
"!./target/*/gn_out",
|
||||
"!./target/*/gn_root",
|
||||
"!./target/*/*.zip",
|
||||
"!./target/*/*.tar.gz",
|
||||
].join("\n"),
|
||||
path: prCachePath,
|
||||
key: "never_saved",
|
||||
"restore-keys": prCacheKeyPrefix,
|
||||
},
|
||||
|
@ -1080,14 +1091,8 @@ const ci = {
|
|||
if:
|
||||
"(matrix.job == 'test' || matrix.job == 'lint') && github.ref == 'refs/heads/main'",
|
||||
with: {
|
||||
path: [
|
||||
"./target",
|
||||
"!./target/*/gn_out",
|
||||
"!./target/*/*.zip",
|
||||
"!./target/*/*.sha256sum",
|
||||
"!./target/*/*.tar.gz",
|
||||
].join("\n"),
|
||||
key: prCacheKeyPrefix + "${{ github.sha }}",
|
||||
path: prCachePath,
|
||||
key: prCacheKey,
|
||||
},
|
||||
},
|
||||
]),
|
||||
|
|
40
.github/workflows/ci.yml
vendored
40
.github/workflows/ci.yml
vendored
|
@ -48,7 +48,7 @@ jobs:
|
|||
- pre_build
|
||||
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
|
||||
runs-on: '${{ matrix.runner }}'
|
||||
timeout-minutes: 180
|
||||
timeout-minutes: 240
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
@ -68,12 +68,12 @@ jobs:
|
|||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
|
||||
runner: macos-14
|
||||
job: test
|
||||
profile: debug
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')) && ''ghcr.io/cirruslabs/macos-runner:sonoma'' || ''macos-14'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
|
@ -174,13 +174,26 @@ jobs:
|
|||
mkdir -p target/release
|
||||
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
|
||||
-czvf target/release/deno_src.tar.gz -C .. deno
|
||||
- name: Cache Cargo home
|
||||
uses: cirruslabs/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cargo/.crates.toml
|
||||
~/.cargo/.crates2.json
|
||||
~/.cargo/bin
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
~/.cargo/git/db
|
||||
key: '37-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '37-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
|
||||
if: '!(matrix.skip)'
|
||||
- uses: dsherret/rust-toolchain-file@v1
|
||||
if: '!(matrix.skip)'
|
||||
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
|
||||
name: Install Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
deno-version: v2.x
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
|
@ -294,9 +307,7 @@ jobs:
|
|||
# to build because the object formats are not compatible.
|
||||
echo "
|
||||
CARGO_PROFILE_BENCH_INCREMENTAL=false
|
||||
CARGO_PROFILE_BENCH_LTO=false
|
||||
CARGO_PROFILE_RELEASE_INCREMENTAL=false
|
||||
CARGO_PROFILE_RELEASE_LTO=false
|
||||
RUSTFLAGS<<__1
|
||||
-C linker-plugin-lto=true
|
||||
-C linker=clang-19
|
||||
|
@ -320,7 +331,7 @@ jobs:
|
|||
$RUSTFLAGS
|
||||
__1
|
||||
CC=/usr/bin/clang-19
|
||||
CFLAGS=-flto=thin $CFLAGS
|
||||
CFLAGS=$CFLAGS
|
||||
" > $GITHUB_ENV
|
||||
- name: Remove macOS cURL --ipv4 flag
|
||||
run: |-
|
||||
|
@ -355,15 +366,6 @@ jobs:
|
|||
- name: Install benchmark tools
|
||||
if: '!(matrix.skip) && (matrix.job == ''bench'')'
|
||||
run: ./tools/install_prebuilt.js wrk hyperfine
|
||||
- name: Cache Cargo home
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '28-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '28-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
if: '!(matrix.skip) && (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||
|
@ -375,7 +377,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '28-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '37-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -682,10 +684,10 @@ jobs:
|
|||
path: |-
|
||||
./target
|
||||
!./target/*/gn_out
|
||||
!./target/*/gn_root
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '28-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '37-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-24.04
|
||||
|
|
45
.github/workflows/npm_publish.yml
vendored
Normal file
45
.github/workflows/npm_publish.yml
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
name: npm_publish
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version'
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: npm publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global core.symlinks true
|
||||
git config --global fetch.parallel 32
|
||||
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Install Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v2.x
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
- name: Publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: ./tools/release/npm/build.ts ${{ github.event.inputs.version }} --publish
|
2
.github/workflows/promote_to_release.yml
vendored
2
.github/workflows/promote_to_release.yml
vendored
|
@ -42,7 +42,7 @@ jobs:
|
|||
- name: Install deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
deno-version: v2.x
|
||||
|
||||
- name: Install rust-codesign
|
||||
run: |-
|
||||
|
|
2
.github/workflows/start_release.yml
vendored
2
.github/workflows/start_release.yml
vendored
|
@ -36,7 +36,7 @@ jobs:
|
|||
- name: Install deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
deno-version: v2.x
|
||||
|
||||
- name: Create Gist URL
|
||||
env:
|
||||
|
|
2
.github/workflows/version_bump.yml
vendored
2
.github/workflows/version_bump.yml
vendored
|
@ -41,7 +41,7 @@ jobs:
|
|||
- name: Install deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
deno-version: v2.x
|
||||
|
||||
- name: Run version bump
|
||||
run: |
|
||||
|
|
1147
Cargo.lock
generated
1147
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
120
Cargo.toml
120
Cargo.toml
|
@ -1,10 +1,13 @@
|
|||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"bench_util",
|
||||
"cli",
|
||||
"cli/lib",
|
||||
"cli/rt",
|
||||
"cli/snapshot",
|
||||
"ext/broadcast_channel",
|
||||
"ext/cache",
|
||||
"ext/canvas",
|
||||
|
@ -48,66 +51,72 @@ repository = "https://github.com/denoland/deno"
|
|||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
|
||||
deno_core = { version = "0.323.0" }
|
||||
deno_core = { version = "0.331.0" }
|
||||
|
||||
deno_bench_util = { version = "0.174.0", path = "./bench_util" }
|
||||
deno_config = { version = "=0.39.3", features = ["workspace", "sync"] }
|
||||
deno_lockfile = "=0.23.2"
|
||||
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
|
||||
deno_npm = "=0.26.0"
|
||||
deno_path_util = "=0.2.1"
|
||||
deno_permissions = { version = "0.40.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.189.0", path = "./runtime" }
|
||||
deno_semver = "=0.6.0"
|
||||
deno_bench_util = { version = "0.181.0", path = "./bench_util" }
|
||||
deno_config = { version = "=0.45.0", features = ["workspace"] }
|
||||
deno_lockfile = "=0.24.0"
|
||||
deno_media_type = { version = "=0.2.5", features = ["module_specifier"] }
|
||||
deno_npm = "=0.27.2"
|
||||
deno_path_util = "=0.3.1"
|
||||
deno_permissions = { version = "0.46.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.195.0", path = "./runtime" }
|
||||
deno_semver = "=0.7.1"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.110.0", path = "./ext/napi/sym" }
|
||||
napi_sym = { version = "0.117.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.4"
|
||||
denokv_remote = "0.8.4"
|
||||
denokv_proto = "0.9.0"
|
||||
denokv_remote = "0.9.0"
|
||||
# denokv_sqlite brings in bundled sqlite if we don't disable the default features
|
||||
denokv_sqlite = { default-features = false, version = "0.8.4" }
|
||||
denokv_sqlite = { default-features = false, version = "0.9.0" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.174.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.112.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.49.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.180.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.60.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.194.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.204.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.167.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.90.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.178.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.90.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.88.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.111.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.172.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.117.0", path = "./ext/node" }
|
||||
deno_telemetry = { version = "0.2.0", path = "./ext/telemetry" }
|
||||
deno_tls = { version = "0.167.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.180.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.211.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.147.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.180.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.185.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.175.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.181.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.119.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.56.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.187.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.67.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.201.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.211.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.174.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.97.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.185.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.97.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.95.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.118.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.179.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.125.0", path = "./ext/node" }
|
||||
deno_os = { version = "0.4.0", path = "./ext/os" }
|
||||
deno_process = { version = "0.2.0", path = "./ext/process" }
|
||||
deno_telemetry = { version = "0.9.0", path = "./ext/telemetry" }
|
||||
deno_tls = { version = "0.174.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.187.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.218.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.154.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.187.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.192.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.182.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_npm_cache = { version = "0.0.1", path = "./resolvers/npm_cache" }
|
||||
deno_resolver = { version = "0.12.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.19.0", path = "./resolvers/node" }
|
||||
# workspace libraries
|
||||
deno_lib = { version = "0.3.0", path = "./cli/lib" }
|
||||
deno_npm_cache = { version = "0.6.0", path = "./resolvers/npm_cache" }
|
||||
deno_resolver = { version = "0.18.0", path = "./resolvers/deno" }
|
||||
deno_snapshots = { version = "0.2.0", path = "./cli/snapshot" }
|
||||
node_resolver = { version = "0.25.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
async-once-cell = "0.5.4"
|
||||
async-trait = "0.1.73"
|
||||
base32 = "=0.5.1"
|
||||
base64 = "0.21.7"
|
||||
bencher = "0.1"
|
||||
boxed_error = "0.2.2"
|
||||
boxed_error = "0.2.3"
|
||||
brotli = "6.0.0"
|
||||
bytes = "1.4.0"
|
||||
cache_control = "=0.2.0"
|
||||
capacity_builder = "0.5.0"
|
||||
cbc = { version = "=0.1.2", features = ["alloc"] }
|
||||
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
|
||||
# Instead use util::time::utc_now()
|
||||
|
@ -116,9 +125,10 @@ color-print = "0.3.5"
|
|||
console_static_text = "=0.8.1"
|
||||
dashmap = "5.5.3"
|
||||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
deno_cache_dir = "=0.14.0"
|
||||
deno_package_json = { version = "0.2.1", default-features = false }
|
||||
data-url = "=0.3.1"
|
||||
deno_cache_dir = "=0.17.0"
|
||||
deno_error = "=0.5.5"
|
||||
deno_package_json = { version = "0.4.0", default-features = false }
|
||||
deno_unsync = "0.4.2"
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
|
@ -133,7 +143,7 @@ fs3 = "0.5.0"
|
|||
futures = "0.3.21"
|
||||
glob = "0.3.1"
|
||||
h2 = "0.4.4"
|
||||
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
|
||||
hickory-resolver = { version = "0.25.0-alpha.4", features = ["tokio-runtime", "serde"] }
|
||||
http = "1.0"
|
||||
http-body = "1.0"
|
||||
http-body-util = "0.1.2"
|
||||
|
@ -141,13 +151,14 @@ http_v02 = { package = "http", version = "0.2.9" }
|
|||
httparse = "1.8.0"
|
||||
hyper = { version = "1.4.1", features = ["full"] }
|
||||
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
|
||||
hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
|
||||
hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
|
||||
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
ipnet = "2.3"
|
||||
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
||||
lazy-regex = "3"
|
||||
libc = "0.2.126"
|
||||
libc = "0.2.168"
|
||||
libsui = "0.5.0"
|
||||
libz-sys = { version = "1.1.20", default-features = false }
|
||||
log = { version = "0.4.20", features = ["kv"] }
|
||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
|
@ -191,16 +202,17 @@ slab = "0.4"
|
|||
smallvec = "1.8"
|
||||
socket2 = { version = "0.5.3", features = ["all"] }
|
||||
spki = "0.7.2"
|
||||
sys_traits = "=0.1.7"
|
||||
tar = "=0.4.40"
|
||||
tempfile = "3.4.0"
|
||||
termcolor = "1.1.3"
|
||||
thiserror = "1.0.61"
|
||||
thiserror = "2.0.3"
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
tokio-metrics = { version = "0.3.0", features = ["rt"] }
|
||||
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
|
||||
tokio-socks = "0.5.1"
|
||||
tokio-util = "0.7.4"
|
||||
tower = { version = "0.4.13", default-features = false, features = ["util"] }
|
||||
tower = { version = "0.5.2", default-features = false, features = ["retry", "util"] }
|
||||
tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
|
||||
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
|
||||
tower-service = "0.3.2"
|
||||
|
@ -209,7 +221,7 @@ url = { version = "2.5", features = ["serde", "expose_internals"] }
|
|||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
webpki-root-certs = "0.26.5"
|
||||
webpki-roots = "0.26"
|
||||
which = "4.2.5"
|
||||
which = "6"
|
||||
yoke = { version = "0.7.4", features = ["derive"] }
|
||||
zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
zstd = "=0.12.4"
|
||||
|
@ -237,9 +249,9 @@ syn = { version = "2", features = ["full", "extra-traits"] }
|
|||
nix = "=0.27.1"
|
||||
|
||||
# windows deps
|
||||
junction = "=0.2.0"
|
||||
junction = "=1.2.0"
|
||||
winapi = "=0.3.9"
|
||||
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
|
||||
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel", "Win32_System_Threading", "Win32_UI", "Win32_UI_Shell"] }
|
||||
winres = "=0.1.12"
|
||||
|
||||
[profile.release]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright 2018-2024 the Deno authors
|
||||
Copyright 2018-2025 the Deno authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
|
||||
<img align="right" src="https://deno.land/logo.svg" height="150px" alt="the deno mascot dinosaur standing in the rain">
|
||||
|
||||
[Deno](https://www.deno.com)
|
||||
([/ˈdiːnoʊ/](http://ipa-reader.xyz/?text=%CB%88di%CB%90no%CA%8A), pronounced
|
||||
[Deno](https://deno.com)
|
||||
([/ˈdiːnoʊ/](https://ipa-reader.com/?text=%CB%88di%CB%90no%CA%8A), pronounced
|
||||
`dee-no`) is a JavaScript, TypeScript, and WebAssembly runtime with secure
|
||||
defaults and a great developer experience. It's built on [V8](https://v8.dev/),
|
||||
[Rust](https://www.rust-lang.org/), and [Tokio](https://tokio.rs/).
|
||||
|
|
158
Releases.md
158
Releases.md
|
@ -6,6 +6,164 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.1.7 / 2025.01.21
|
||||
|
||||
- fix(deps): update yanked crates (#27512)
|
||||
- fix(ext/node): GCM auth tag check on DechiperIv#final (#27733)
|
||||
- fix(ext/node): add FileHandle#sync (#27677)
|
||||
- fix(ext/node): propagate socket error to client request object (#27678)
|
||||
- fix(ext/node): tls.connect regression (#27707)
|
||||
- fix(ext/os): pass SignalState to web worker (#27741)
|
||||
- fix(install/global): remove importMap field from specified config file
|
||||
(#27744)
|
||||
- fix: use 'getrandom' feature for 'sys_traits' crate
|
||||
- perf(compile): remove swc from denort (#27721)
|
||||
|
||||
### 2.1.6 / 2025.01.16
|
||||
|
||||
- fix(check/lsp): correctly resolve compilerOptions.types (#27686)
|
||||
- fix(check/lsp): fix bugs with tsc type resolution, allow npm packages to
|
||||
augment `ImportMeta` (#27690)
|
||||
- fix(compile): store embedded fs case sensitivity (#27653)
|
||||
- fix(compile/windows): better handling of deno_dir on different drive letter
|
||||
than code (#27654)
|
||||
- fix(ext/console): change Temporal color (#27684)
|
||||
- fix(ext/node): add `writev` method to `FileHandle` (#27563)
|
||||
- fix(ext/node): add chown method to FileHandle class (#27638)
|
||||
- fix(ext/node): apply `@npmcli/agent` workaround to `npm-check-updates`
|
||||
(#27639)
|
||||
- fix(ext/node): fix playwright http client (#27662)
|
||||
- fix(ext/node): show bare-node-builtin hint when using an import map (#27632)
|
||||
- fix(ext/node): use primordials in `ext/node/polyfills/_fs_common.ts` (#27589)
|
||||
- fix(lsp): handle pathless untitled URIs (#27637)
|
||||
- fix(lsp/check): don't resolve unknown media types to a `.js` extension
|
||||
(#27631)
|
||||
- fix(node): Prevent node:child_process from always inheriting the parent
|
||||
environment (#27343) (#27340)
|
||||
- fix(node/fs): add utimes method to the FileHandle class (#27582)
|
||||
- fix(outdated): Use `latest` tag even when it's the same as the current version
|
||||
(#27699)
|
||||
- fix(outdated): retain strict semver specifier when updating (#27701)
|
||||
|
||||
### 2.1.5 / 2025.01.09
|
||||
|
||||
- feat(unstable): implement QUIC (#21942)
|
||||
- feat(unstable): add JS linting plugin infrastructure (#27416)
|
||||
- feat(unstable): add OTEL MeterProvider (#27240)
|
||||
- feat(unstable): no config npm:@opentelemetry/api integration (#27541)
|
||||
- feat(unstable): replace SpanExporter with TracerProvider (#27473)
|
||||
- feat(unstable): support selectors in JS lint plugins (#27452)
|
||||
- fix(check): line-break between diagnostic message chain entries (#27543)
|
||||
- fix(check): move module not found errors to typescript diagnostics (#27533)
|
||||
- fix(compile): analyze modules in directory specified in --include (#27296)
|
||||
- fix(compile): be more deterministic when compiling the same code in different
|
||||
directories (#27395)
|
||||
- fix(compile): display embedded file sizes and total (#27360)
|
||||
- fix(compile): output contents of embedded file system (#27302)
|
||||
- fix(ext/fetch): better error message when body resource is unavailable
|
||||
(#27429)
|
||||
- fix(ext/fetch): retry some http/2 errors (#27417)
|
||||
- fix(ext/fs): do not throw for bigint ctime/mtime/atime (#27453)
|
||||
- fix(ext/http): improve error message when underlying resource of request body
|
||||
unavailable (#27463)
|
||||
- fix(ext/net): update moka cache to avoid potential panic in `Deno.resolveDns`
|
||||
on some laptops with Ryzen CPU (#27572)
|
||||
- fix(ext/node): fix `fs.access`/`fs.promises.access` with `X_OK` mode parameter
|
||||
on Windows (#27407)
|
||||
- fix(ext/node): fix `os.cpus()` on Linux (#27592)
|
||||
- fix(ext/node): RangeError timingSafeEqual with different byteLength (#27470)
|
||||
- fix(ext/node): add `truncate` method to the `FileHandle` class (#27389)
|
||||
- fix(ext/node): add support of any length IV for aes-(128|256)-gcm ciphers
|
||||
(#27476)
|
||||
- fix(ext/node): convert brotli chunks with proper byte offset (#27455)
|
||||
- fix(ext/node): do not exit worker thread when there is pending async op
|
||||
(#27378)
|
||||
- fix(ext/node): have `process` global available in Node context (#27562)
|
||||
- fix(ext/node): make getCiphers return supported ciphers (#27466)
|
||||
- fix(ext/node): sort list of built-in modules alphabetically (#27410)
|
||||
- fix(ext/node): support createConnection option in node:http.request() (#25470)
|
||||
- fix(ext/node): support private key export in JWK format (#27325)
|
||||
- fix(ext/web): add `[[ErrorData]]` slot to `DOMException` (#27342)
|
||||
- fix(ext/websocket): Fix close code without reason (#27578)
|
||||
- fix(jsr): Wasm imports fail to load (#27594)
|
||||
- fix(kv): improve backoff error message and inline documentation (#27537)
|
||||
- fix(lint): fix single char selectors being ignored (#27576)
|
||||
- fix(lockfile): include dependencies listed in external import map in lockfile
|
||||
(#27337)
|
||||
- fix(lsp): css preprocessor formatting (#27526)
|
||||
- fix(lsp): don't skip dirs with enabled subdirs (#27580)
|
||||
- fix(lsp): include "node:" prefix for node builtin auto-imports (#27404)
|
||||
- fix(lsp): respect "typescript.suggestionActions.enabled" setting (#27373)
|
||||
- fix(lsp): rewrite imports for 'Move to a new file' action (#27427)
|
||||
- fix(lsp): sql and component file formatting (#27350)
|
||||
- fix(lsp): use verbatim specifier for URL auto-imports (#27605)
|
||||
- fix(no-slow-types): handle rest param with internal assignments (#27581)
|
||||
- fix(node/fs): add a chmod method to the FileHandle class (#27522)
|
||||
- fix(node): add missing `inspector/promises` (#27491)
|
||||
- fix(node): handle cjs exports with escaped chars (#27438)
|
||||
- fix(npm): deterministically output tags to initialized file (#27514)
|
||||
- fix(npm): search node_modules folder for package matching npm specifier
|
||||
(#27345)
|
||||
- fix(outdated): ensure "Latest" version is greater than "Update" version
|
||||
(#27390)
|
||||
- fix(outdated): support updating dependencies in external import maps (#27339)
|
||||
- fix(permissions): implicit `--allow-import` when using `--cached-only`
|
||||
(#27530)
|
||||
- fix(publish): infer literal types in const contexts (#27425)
|
||||
- fix(task): properly handle task name wildcards with --recursive (#27396)
|
||||
- fix(task): support tasks without commands (#27191)
|
||||
- fix(unstable): don't error on non-existing attrs or type attr (#27456)
|
||||
- fix: FastString v8_string() should error when cannot allocated (#27375)
|
||||
- fix: deno_resolver crate without 'sync' feature (#27403)
|
||||
- fix: incorrect memory info free/available bytes on mac (#27460)
|
||||
- fix: upgrade deno_doc to 0.161.3 (#27377)
|
||||
- perf(fs/windows): stat - only open file once (#27487)
|
||||
- perf(node/fs/copy): reduce metadata lookups copying directory (#27495)
|
||||
- perf: don't store duplicate info for ops in the snapshot (#27430)
|
||||
- perf: remove now needless canonicalization getting closest package.json
|
||||
(#27437)
|
||||
- perf: upgrade to deno_semver 0.7 (#27426)
|
||||
|
||||
### 2.1.4 / 2024.12.11
|
||||
|
||||
- feat(unstable): support caching npm dependencies only as they're needed
|
||||
(#27300)
|
||||
- fix(compile): correct read length for transpiled typescript files (#27301)
|
||||
- fix(ext/node): accept file descriptor in fs.readFile(Sync) (#27252)
|
||||
- fix(ext/node): handle Float16Array in node:v8 module (#27285)
|
||||
- fix(lint): do not error providing --allow-import (#27321)
|
||||
- fix(node): update list of builtin node modules, add missing export to
|
||||
_http_common (#27294)
|
||||
- fix(outdated): error when there are no config files (#27306)
|
||||
- fix(outdated): respect --quiet flag for hints (#27317)
|
||||
- fix(outdated): show a suggestion for updating (#27304)
|
||||
- fix(task): do not always kill child on ctrl+c on windows (#27269)
|
||||
- fix(unstable): don't unwrap optional state in otel (#27292)
|
||||
- fix: do not error when subpath has an @ symbol (#27290)
|
||||
- fix: do not panic when fetching invalid file url on Windows (#27259)
|
||||
- fix: replace the @deno-types with @ts-types (#27310)
|
||||
- perf(compile): improve FileBackedVfsFile (#27299)
|
||||
|
||||
### 2.1.3 / 2024.12.05
|
||||
|
||||
- feat(unstable): add metrics to otel (#27143)
|
||||
- fix(fmt): stable formatting of HTML files with JS (#27164)
|
||||
- fix(install): use locked version of jsr package when fetching exports (#27237)
|
||||
- fix(node/fs): support `recursive` option in readdir (#27179)
|
||||
- fix(node/worker_threads): data url not encoded properly with eval (#27184)
|
||||
- fix(outdated): allow `--latest` without `--update` (#27227)
|
||||
- fix(task): `--recursive` option not working (#27183)
|
||||
- fix(task): don't panic with filter on missing task argument (#27180)
|
||||
- fix(task): forward signals to spawned sub-processes on unix (#27141)
|
||||
- fix(task): kill descendants when killing task process on Windows (#27163)
|
||||
- fix(task): only pass args to root task (#27213)
|
||||
- fix(unstable): otel context with multiple keys (#27230)
|
||||
- fix(unstable/temporal): respect locale in `Duration.prototype.toLocaleString`
|
||||
(#27000)
|
||||
- fix: clear dep analysis when module loading is done (#27204)
|
||||
- fix: improve auto-imports for npm packages (#27224)
|
||||
- fix: support `workspace:^` and `workspace:~` version constraints (#27096)
|
||||
|
||||
### 2.1.2 / 2024.11.28
|
||||
|
||||
- feat(unstable): Instrument Deno.serve (#26964)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.174.0"
|
||||
version = "0.181.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -7,7 +7,6 @@ use deno_bench_util::bench_js_sync;
|
|||
use deno_bench_util::bench_or_profile;
|
||||
use deno_bench_util::bencher::benchmark_group;
|
||||
use deno_bench_util::bencher::Bencher;
|
||||
|
||||
use deno_core::Extension;
|
||||
|
||||
#[op2]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_bench_util::bench_js_sync_with;
|
||||
use deno_bench_util::bench_or_profile;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
use bencher::Bencher;
|
||||
use deno_core::v8;
|
||||
use deno_core::Extension;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
mod js_runtime;
|
||||
mod profiling;
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
use bencher::DynBenchFn;
|
||||
use bencher::StaticBenchFn;
|
||||
use bencher::TestDescAndFn;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.1.2"
|
||||
version = "2.1.7"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -16,11 +16,6 @@ name = "deno"
|
|||
path = "main.rs"
|
||||
doc = false
|
||||
|
||||
[[bin]]
|
||||
name = "denort"
|
||||
path = "mainrt.rs"
|
||||
doc = false
|
||||
|
||||
[[test]]
|
||||
name = "integration"
|
||||
path = "integration_tests_runner.rs"
|
||||
|
@ -49,7 +44,7 @@ dhat-heap = ["dhat"]
|
|||
upgrade = []
|
||||
# A dev feature to disable creations and loading of snapshots in favor of
|
||||
# loading JS sources at runtime.
|
||||
hmr = ["deno_runtime/hmr"]
|
||||
hmr = ["deno_runtime/hmr", "deno_snapshots/disable"]
|
||||
# Vendor zlib as zlib-ng
|
||||
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
|
||||
|
||||
|
@ -60,8 +55,11 @@ lazy-regex.workspace = true
|
|||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
zstd.workspace = true
|
||||
glibc_version = "0.1.2"
|
||||
flate2 = { workspace = true, features = ["default"] }
|
||||
deno_error.workspace = true
|
||||
|
||||
[target.'cfg(unix)'.build-dependencies]
|
||||
glibc_version = "0.1.2"
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
winapi.workspace = true
|
||||
|
@ -69,32 +67,38 @@ winres.workspace = true
|
|||
|
||||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir.workspace = true
|
||||
deno_config.workspace = true
|
||||
deno_cache_dir = { workspace = true, features = ["sync"] }
|
||||
deno_config = { workspace = true, features = ["sync", "workspace"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "=0.161.2", features = ["rust", "comrak"] }
|
||||
deno_graph = { version = "=0.86.3" }
|
||||
deno_lint = { version = "=0.68.2", features = ["docs"] }
|
||||
deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] }
|
||||
deno_error.workspace = true
|
||||
deno_graph = { version = "=0.87.0" }
|
||||
deno_lib.workspace = true
|
||||
deno_lint = { version = "=0.68.2" }
|
||||
deno_lockfile.workspace = true
|
||||
deno_media_type = { workspace = true, features = ["data_url", "decoding", "module_specifier"] }
|
||||
deno_npm.workspace = true
|
||||
deno_npm_cache.workspace = true
|
||||
deno_package_json.workspace = true
|
||||
deno_package_json = { workspace = true, features = ["sync"] }
|
||||
deno_path_util.workspace = true
|
||||
deno_resolver.workspace = true
|
||||
deno_resolver = { workspace = true, features = ["sync"] }
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.20.1"
|
||||
deno_snapshots = { workspace = true }
|
||||
deno_task_shell = "=0.20.2"
|
||||
deno_telemetry.workspace = true
|
||||
deno_terminal.workspace = true
|
||||
libsui = "0.5.0"
|
||||
libsui.workspace = true
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
async-trait.workspace = true
|
||||
base64.workspace = true
|
||||
bincode = "=1.3.3"
|
||||
boxed_error.workspace = true
|
||||
bytes.workspace = true
|
||||
cache_control.workspace = true
|
||||
capacity_builder.workspace = true
|
||||
chrono = { workspace = true, features = ["now"] }
|
||||
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
||||
clap_complete = "=4.5.24"
|
||||
|
@ -110,7 +114,6 @@ dprint-plugin-json = "=0.19.4"
|
|||
dprint-plugin-jupyter = "=0.1.5"
|
||||
dprint-plugin-markdown = "=0.17.8"
|
||||
dprint-plugin-typescript = "=0.93.3"
|
||||
env_logger = "=0.10.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
|
||||
|
@ -121,7 +124,7 @@ http.workspace = true
|
|||
http-body.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper-util.workspace = true
|
||||
import_map = { version = "=0.20.1", features = ["ext"] }
|
||||
import_map = { version = "=0.21.0", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
jsonc-parser = { workspace = true, features = ["cst", "serde"] }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }
|
||||
|
@ -151,10 +154,10 @@ rustyline-derive = "=0.7.0"
|
|||
serde.workspace = true
|
||||
serde_repr.workspace = true
|
||||
sha2.workspace = true
|
||||
shell-escape = "=0.1.5"
|
||||
spki = { version = "0.7", features = ["pem"] }
|
||||
sqlformat = "=0.3.2"
|
||||
strsim = "0.11.1"
|
||||
sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] }
|
||||
tar.workspace = true
|
||||
tempfile.workspace = true
|
||||
text-size = "=1.1.0"
|
||||
|
@ -179,10 +182,12 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "
|
|||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix.workspace = true
|
||||
shell-escape = "=0.1.5"
|
||||
|
||||
[dev-dependencies]
|
||||
deno_bench_util.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
sys_traits = { workspace = true, features = ["memory"] }
|
||||
test_util.workspace = true
|
||||
|
||||
[package.metadata.winres]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
|
@ -8,60 +8,13 @@ use deno_semver::jsr::JsrDepPackageReq;
|
|||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
||||
#[cfg(test)] // happens to only be used by the tests at the moment
|
||||
pub struct DenoConfigFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
#[cfg(test)]
|
||||
impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
|
||||
fn read_to_string_lossy(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<std::borrow::Cow<'static, str>, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.read_text_file_lossy_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn stat_sync(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<deno_config::fs::FsMetadata, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.stat_sync(path)
|
||||
.map(|stat| deno_config::fs::FsMetadata {
|
||||
is_file: stat.is_file,
|
||||
is_directory: stat.is_directory,
|
||||
is_symlink: stat.is_symlink,
|
||||
})
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn read_dir(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<Vec<deno_config::fs::FsDirEntry>, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.read_dir_sync(path)
|
||||
.map_err(|err| err.into_io_error())
|
||||
.map(|entries| {
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|e| deno_config::fs::FsDirEntry {
|
||||
path: path.join(e.name),
|
||||
metadata: deno_config::fs::FsMetadata {
|
||||
is_file: e.is_file,
|
||||
is_directory: e.is_directory,
|
||||
is_symlink: e.is_symlink,
|
||||
},
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
pub fn import_map_deps(
|
||||
import_map: &serde_json::Value,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
let values = imports_values(import_map.get("imports"))
|
||||
.into_iter()
|
||||
.chain(scope_values(import_map.get("scopes")));
|
||||
values_to_set(values)
|
||||
}
|
||||
|
||||
pub fn deno_json_deps(
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
|
@ -32,21 +31,22 @@ use deno_core::error::AnyError;
|
|||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::args::UnstableConfig;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_runtime::deno_permissions::SysDescriptor;
|
||||
use deno_telemetry::OtelConfig;
|
||||
use deno_telemetry::OtelConsoleConfig;
|
||||
use log::debug;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::resolve_no_prompt;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
use super::flags_net;
|
||||
use super::jsr_url;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub enum ConfigFlag {
|
||||
|
@ -245,7 +245,7 @@ pub struct InstallFlagsGlobal {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum InstallKind {
|
||||
pub enum InstallFlags {
|
||||
Local(InstallFlagsLocal),
|
||||
Global(InstallFlagsGlobal),
|
||||
}
|
||||
|
@ -257,11 +257,6 @@ pub enum InstallFlagsLocal {
|
|||
Entrypoints(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct InstallFlags {
|
||||
pub kind: InstallKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct JSONReferenceFlags {
|
||||
pub json: deno_core::serde_json::Value,
|
||||
|
@ -506,6 +501,52 @@ impl DenoSubcommand {
|
|||
| Self::Lsp
|
||||
)
|
||||
}
|
||||
|
||||
pub fn npm_system_info(&self) -> NpmSystemInfo {
|
||||
match self {
|
||||
DenoSubcommand::Compile(CompileFlags {
|
||||
target: Some(target),
|
||||
..
|
||||
}) => {
|
||||
// the values of NpmSystemInfo align with the possible values for the
|
||||
// `arch` and `platform` fields of Node.js' `process` global:
|
||||
// https://nodejs.org/api/process.html
|
||||
match target.as_str() {
|
||||
"aarch64-apple-darwin" => NpmSystemInfo {
|
||||
os: "darwin".into(),
|
||||
cpu: "arm64".into(),
|
||||
},
|
||||
"aarch64-unknown-linux-gnu" => NpmSystemInfo {
|
||||
os: "linux".into(),
|
||||
cpu: "arm64".into(),
|
||||
},
|
||||
"x86_64-apple-darwin" => NpmSystemInfo {
|
||||
os: "darwin".into(),
|
||||
cpu: "x64".into(),
|
||||
},
|
||||
"x86_64-unknown-linux-gnu" => NpmSystemInfo {
|
||||
os: "linux".into(),
|
||||
cpu: "x64".into(),
|
||||
},
|
||||
"x86_64-pc-windows-msvc" => NpmSystemInfo {
|
||||
os: "win32".into(),
|
||||
cpu: "x64".into(),
|
||||
},
|
||||
value => {
|
||||
log::warn!(
|
||||
concat!(
|
||||
"Not implemented npm system info for target '{}'. Using current ",
|
||||
"system default. This may impact architecture specific dependencies."
|
||||
),
|
||||
value,
|
||||
);
|
||||
NpmSystemInfo::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => NpmSystemInfo::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DenoSubcommand {
|
||||
|
@ -555,15 +596,6 @@ impl Default for TypeCheckMode {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum CaData {
|
||||
/// The string is a file path
|
||||
File(String),
|
||||
/// This variant is not exposed as an option in the CLI, it is used internally
|
||||
/// for standalone binaries.
|
||||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
// Info needed to run NPM lifecycle scripts
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct LifecycleScriptsConfig {
|
||||
|
@ -591,18 +623,6 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
pub struct UnstableConfig {
|
||||
// TODO(bartlomieju): remove in Deno 2.5
|
||||
pub legacy_flag_enabled: bool, // --unstable
|
||||
pub bare_node_builtins: bool,
|
||||
pub detect_cjs: bool,
|
||||
pub sloppy_imports: bool,
|
||||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct InternalFlags {
|
||||
/// Used when the language server is configured with an
|
||||
|
@ -696,97 +716,6 @@ impl PermissionFlags {
|
|||
|| self.deny_write.is_some()
|
||||
|| self.allow_import.is_some()
|
||||
}
|
||||
|
||||
pub fn to_options(&self, cli_arg_urls: &[Cow<Url>]) -> PermissionsOptions {
|
||||
fn handle_allow<T: Default>(
|
||||
allow_all: bool,
|
||||
value: Option<T>,
|
||||
) -> Option<T> {
|
||||
if allow_all {
|
||||
assert!(value.is_none());
|
||||
Some(T::default())
|
||||
} else {
|
||||
value
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_imports(
|
||||
cli_arg_urls: &[Cow<Url>],
|
||||
imports: Option<Vec<String>>,
|
||||
) -> Option<Vec<String>> {
|
||||
if imports.is_some() {
|
||||
return imports;
|
||||
}
|
||||
|
||||
let builtin_allowed_import_hosts = [
|
||||
"jsr.io:443",
|
||||
"deno.land:443",
|
||||
"esm.sh:443",
|
||||
"cdn.jsdelivr.net:443",
|
||||
"raw.githubusercontent.com:443",
|
||||
"gist.githubusercontent.com:443",
|
||||
];
|
||||
|
||||
let mut imports =
|
||||
Vec::with_capacity(builtin_allowed_import_hosts.len() + 1);
|
||||
imports
|
||||
.extend(builtin_allowed_import_hosts.iter().map(|s| s.to_string()));
|
||||
|
||||
// also add the JSR_URL env var
|
||||
if let Some(jsr_host) = allow_import_host_from_url(jsr_url()) {
|
||||
imports.push(jsr_host);
|
||||
}
|
||||
// include the cli arg urls
|
||||
for url in cli_arg_urls {
|
||||
if let Some(host) = allow_import_host_from_url(url) {
|
||||
imports.push(host);
|
||||
}
|
||||
}
|
||||
|
||||
Some(imports)
|
||||
}
|
||||
|
||||
PermissionsOptions {
|
||||
allow_all: self.allow_all,
|
||||
allow_env: handle_allow(self.allow_all, self.allow_env.clone()),
|
||||
deny_env: self.deny_env.clone(),
|
||||
allow_net: handle_allow(self.allow_all, self.allow_net.clone()),
|
||||
deny_net: self.deny_net.clone(),
|
||||
allow_ffi: handle_allow(self.allow_all, self.allow_ffi.clone()),
|
||||
deny_ffi: self.deny_ffi.clone(),
|
||||
allow_read: handle_allow(self.allow_all, self.allow_read.clone()),
|
||||
deny_read: self.deny_read.clone(),
|
||||
allow_run: handle_allow(self.allow_all, self.allow_run.clone()),
|
||||
deny_run: self.deny_run.clone(),
|
||||
allow_sys: handle_allow(self.allow_all, self.allow_sys.clone()),
|
||||
deny_sys: self.deny_sys.clone(),
|
||||
allow_write: handle_allow(self.allow_all, self.allow_write.clone()),
|
||||
deny_write: self.deny_write.clone(),
|
||||
allow_import: handle_imports(
|
||||
cli_arg_urls,
|
||||
handle_allow(self.allow_all, self.allow_import.clone()),
|
||||
),
|
||||
prompt: !resolve_no_prompt(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the --allow-import host from the provided url
|
||||
fn allow_import_host_from_url(url: &Url) -> Option<String> {
|
||||
let host = url.host()?;
|
||||
if let Some(port) = url.port() {
|
||||
Some(format!("{}:{}", host, port))
|
||||
} else {
|
||||
use deno_core::url::Host::*;
|
||||
match host {
|
||||
Domain(domain) if domain == "jsr.io" && url.scheme() == "https" => None,
|
||||
_ => match url.scheme() {
|
||||
"https" => Some(format!("{}:443", host)),
|
||||
"http" => Some(format!("{}:80", host)),
|
||||
_ => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn join_paths(allowlist: &[String], d: &str) -> String {
|
||||
|
@ -990,21 +919,43 @@ impl Flags {
|
|||
args
|
||||
}
|
||||
|
||||
pub fn otel_config(&self) -> Option<OtelConfig> {
|
||||
if self
|
||||
pub fn otel_config(&self) -> OtelConfig {
|
||||
let has_unstable_flag = self
|
||||
.unstable_config
|
||||
.features
|
||||
.contains(&String::from("otel"))
|
||||
{
|
||||
Some(OtelConfig {
|
||||
runtime_name: Cow::Borrowed("deno"),
|
||||
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
||||
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
|
||||
.is_ok(),
|
||||
..Default::default()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
.contains(&String::from("otel"));
|
||||
|
||||
let otel_var = |name| match std::env::var(name) {
|
||||
Ok(s) if s.to_lowercase() == "true" => Some(true),
|
||||
Ok(s) if s.to_lowercase() == "false" => Some(false),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let disabled =
|
||||
!has_unstable_flag || otel_var("OTEL_SDK_DISABLED").unwrap_or(false);
|
||||
let default = !disabled && otel_var("OTEL_DENO").unwrap_or(false);
|
||||
|
||||
OtelConfig {
|
||||
tracing_enabled: !disabled
|
||||
&& otel_var("OTEL_DENO_TRACING").unwrap_or(default),
|
||||
metrics_enabled: !disabled
|
||||
&& otel_var("OTEL_DENO_METRICS").unwrap_or(default),
|
||||
console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() {
|
||||
Ok(_) if disabled => OtelConsoleConfig::Ignore,
|
||||
Ok("ignore") => OtelConsoleConfig::Ignore,
|
||||
Ok("capture") => OtelConsoleConfig::Capture,
|
||||
Ok("replace") => OtelConsoleConfig::Replace,
|
||||
_ => {
|
||||
if default {
|
||||
OtelConsoleConfig::Capture
|
||||
} else {
|
||||
OtelConsoleConfig::Ignore
|
||||
}
|
||||
}
|
||||
},
|
||||
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
|
||||
.as_deref()
|
||||
== Ok("1"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1561,14 +1512,15 @@ fn handle_repl_flags(flags: &mut Flags, repl_flags: ReplFlags) {
|
|||
}
|
||||
|
||||
pub fn clap_root() -> Command {
|
||||
debug_assert_eq!(DENO_VERSION_INFO.typescript, deno_snapshots::TS_VERSION);
|
||||
let long_version = format!(
|
||||
"{} ({}, {}, {})\nv8 {}\ntypescript {}",
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
crate::version::DENO_VERSION_INFO.release_channel.name(),
|
||||
DENO_VERSION_INFO.deno,
|
||||
DENO_VERSION_INFO.release_channel.name(),
|
||||
env!("PROFILE"),
|
||||
env!("TARGET"),
|
||||
deno_core::v8::VERSION_STRING,
|
||||
crate::version::DENO_VERSION_INFO.typescript
|
||||
DENO_VERSION_INFO.typescript
|
||||
);
|
||||
|
||||
run_args(Command::new("deno"), true)
|
||||
|
@ -1584,7 +1536,7 @@ pub fn clap_root() -> Command {
|
|||
)
|
||||
.color(ColorChoice::Auto)
|
||||
.term_width(800)
|
||||
.version(crate::version::DENO_VERSION_INFO.deno)
|
||||
.version(DENO_VERSION_INFO.deno)
|
||||
.long_version(long_version)
|
||||
.disable_version_flag(true)
|
||||
.disable_help_flag(true)
|
||||
|
@ -2664,10 +2616,10 @@ Display outdated dependencies:
|
|||
<p(245)>deno outdated</>
|
||||
<p(245)>deno outdated --compatible</>
|
||||
|
||||
Update dependencies:
|
||||
Update dependencies to latest semver compatible versions:
|
||||
<p(245)>deno outdated --update</>
|
||||
Update dependencies to latest versions, ignoring semver requirements:
|
||||
<p(245)>deno outdated --update --latest</>
|
||||
<p(245)>deno outdated --update</>
|
||||
|
||||
Filters can be used to select which packages to act on. Filters can include wildcards (*) to match multiple packages.
|
||||
<p(245)>deno outdated --update --latest \"@std/*\"</>
|
||||
|
@ -2703,7 +2655,6 @@ Specific version requirements to update to can be specified:
|
|||
.help(
|
||||
"Update to the latest version, regardless of semver constraints",
|
||||
)
|
||||
.requires("update")
|
||||
.conflicts_with("compatible"),
|
||||
)
|
||||
.arg(
|
||||
|
@ -2905,6 +2856,7 @@ To ignore linting on an entire file, you can add an ignore comment at the top of
|
|||
.arg(watch_arg(false))
|
||||
.arg(watch_exclude_arg())
|
||||
.arg(no_clear_screen_arg())
|
||||
.arg(allow_import_arg())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -4351,7 +4303,7 @@ impl CommandExt for Command {
|
|||
let mut cmd = self.arg(
|
||||
Arg::new("unstable")
|
||||
.long("unstable")
|
||||
.help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features
|
||||
.help(cstr!("The `--unstable` flag has been deprecated. Use granular `--unstable-*` flags instead
|
||||
<p(245)>To view the list of individual unstable feature flags, run this command again with --help=unstable</>"))
|
||||
.action(ArgAction::SetTrue)
|
||||
.hide(matches!(cfg, UnstableArgsConfig::None))
|
||||
|
@ -4407,6 +4359,16 @@ impl CommandExt for Command {
|
|||
})
|
||||
.help_heading(UNSTABLE_HEADING)
|
||||
.display_order(next_display_order())
|
||||
).arg(
|
||||
Arg::new("unstable-npm-lazy-caching")
|
||||
.long("unstable-npm-lazy-caching")
|
||||
.help("Enable unstable lazy caching of npm dependencies, downloading them only as needed (disabled: all npm packages in package.json are installed on startup; enabled: only npm packages that are actually referenced in an import are installed")
|
||||
.env("DENO_UNSTABLE_NPM_LAZY_CACHING")
|
||||
.value_parser(FalseyValueParser::new())
|
||||
.action(ArgAction::SetTrue)
|
||||
.hide(true)
|
||||
.help_heading(UNSTABLE_HEADING)
|
||||
.display_order(next_display_order()),
|
||||
);
|
||||
|
||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() {
|
||||
|
@ -4920,15 +4882,14 @@ fn install_parse(
|
|||
let module_url = cmd_values.next().unwrap();
|
||||
let args = cmd_values.collect();
|
||||
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
||||
flags.subcommand =
|
||||
DenoSubcommand::Install(InstallFlags::Global(InstallFlagsGlobal {
|
||||
name,
|
||||
module_url,
|
||||
args,
|
||||
root,
|
||||
force,
|
||||
}),
|
||||
});
|
||||
}));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -4937,22 +4898,19 @@ fn install_parse(
|
|||
allow_scripts_arg_parse(flags, matches)?;
|
||||
if matches.get_flag("entrypoint") {
|
||||
let entrypoints = matches.remove_many::<String>("cmd").unwrap_or_default();
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Local(InstallFlagsLocal::Entrypoints(
|
||||
entrypoints.collect(),
|
||||
)),
|
||||
});
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
|
||||
InstallFlagsLocal::Entrypoints(entrypoints.collect()),
|
||||
));
|
||||
} else if let Some(add_files) = matches
|
||||
.remove_many("cmd")
|
||||
.map(|packages| add_parse_inner(matches, Some(packages)))
|
||||
{
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Local(InstallFlagsLocal::Add(add_files)),
|
||||
})
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
|
||||
InstallFlagsLocal::Add(add_files),
|
||||
))
|
||||
} else {
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Local(InstallFlagsLocal::TopLevel),
|
||||
});
|
||||
flags.subcommand =
|
||||
DenoSubcommand::Install(InstallFlags::Local(InstallFlagsLocal::TopLevel));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -5084,6 +5042,7 @@ fn lint_parse(
|
|||
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly);
|
||||
ext_arg_parse(flags, matches);
|
||||
config_args_parse(flags, matches);
|
||||
allow_import_parse(flags, matches);
|
||||
|
||||
let files = match matches.remove_many::<String>("files") {
|
||||
Some(f) => f.collect(),
|
||||
|
@ -5278,8 +5237,15 @@ fn task_parse(
|
|||
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
|
||||
node_modules_arg_parse(flags, matches);
|
||||
|
||||
let filter = matches.remove_one::<String>("filter");
|
||||
let recursive = matches.get_flag("recursive") || filter.is_some();
|
||||
let mut recursive = matches.get_flag("recursive");
|
||||
let filter = if let Some(filter) = matches.remove_one::<String>("filter") {
|
||||
recursive = false;
|
||||
Some(filter)
|
||||
} else if recursive {
|
||||
Some("*".to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut task_flags = TaskFlags {
|
||||
cwd: matches.remove_one::<String>("cwd"),
|
||||
|
@ -5990,6 +5956,8 @@ fn unstable_args_parse(
|
|||
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
|
||||
flags.unstable_config.sloppy_imports =
|
||||
matches.get_flag("unstable-sloppy-imports");
|
||||
flags.unstable_config.npm_lazy_caching =
|
||||
matches.get_flag("unstable-npm-lazy-caching");
|
||||
|
||||
if matches!(cfg, UnstableArgsConfig::ResolutionAndRuntime) {
|
||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
|
@ -6024,9 +5992,10 @@ pub fn resolve_urls(urls: Vec<String>) -> Vec<String> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Creates vector of strings, Vec<String>
|
||||
macro_rules! svec {
|
||||
($($x:expr),* $(,)?) => (vec![$($x.to_string().into()),*]);
|
||||
|
@ -7135,6 +7104,7 @@ mod tests {
|
|||
let r = flags_from_vec(svec![
|
||||
"deno",
|
||||
"lint",
|
||||
"--allow-import",
|
||||
"--watch",
|
||||
"script_1.ts",
|
||||
"script_2.ts"
|
||||
|
@ -7156,6 +7126,10 @@ mod tests {
|
|||
compact: false,
|
||||
watch: Some(Default::default()),
|
||||
}),
|
||||
permissions: PermissionFlags {
|
||||
allow_import: Some(vec![]),
|
||||
..Default::default()
|
||||
},
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
@ -8593,15 +8567,15 @@ mod tests {
|
|||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags::Global(
|
||||
InstallFlagsGlobal {
|
||||
name: None,
|
||||
module_url: "jsr:@std/http/file-server".to_string(),
|
||||
args: vec![],
|
||||
root: None,
|
||||
force: false,
|
||||
}),
|
||||
}),
|
||||
}
|
||||
),),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
@ -8615,15 +8589,15 @@ mod tests {
|
|||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags::Global(
|
||||
InstallFlagsGlobal {
|
||||
name: None,
|
||||
module_url: "jsr:@std/http/file-server".to_string(),
|
||||
args: vec![],
|
||||
root: None,
|
||||
force: false,
|
||||
}),
|
||||
}),
|
||||
}
|
||||
),),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
@ -8636,15 +8610,15 @@ mod tests {
|
|||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(InstallFlagsGlobal {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags::Global(
|
||||
InstallFlagsGlobal {
|
||||
name: Some("file_server".to_string()),
|
||||
module_url: "jsr:@std/http/file-server".to_string(),
|
||||
args: svec!["foo", "bar"],
|
||||
root: Some("/foo".to_string()),
|
||||
force: true,
|
||||
}),
|
||||
}),
|
||||
}
|
||||
),),
|
||||
import_map_path: Some("import_map.json".to_string()),
|
||||
no_remote: true,
|
||||
config_flag: ConfigFlag::Path("tsconfig.json".to_owned()),
|
||||
|
@ -10539,7 +10513,7 @@ mod tests {
|
|||
cwd: None,
|
||||
task: Some("build".to_string()),
|
||||
is_run: false,
|
||||
recursive: true,
|
||||
recursive: false,
|
||||
filter: Some("*".to_string()),
|
||||
eval: false,
|
||||
}),
|
||||
|
@ -10556,7 +10530,7 @@ mod tests {
|
|||
task: Some("build".to_string()),
|
||||
is_run: false,
|
||||
recursive: true,
|
||||
filter: None,
|
||||
filter: Some("*".to_string()),
|
||||
eval: false,
|
||||
}),
|
||||
..Flags::default()
|
||||
|
@ -10572,7 +10546,7 @@ mod tests {
|
|||
task: Some("build".to_string()),
|
||||
is_run: false,
|
||||
recursive: true,
|
||||
filter: None,
|
||||
filter: Some("*".to_string()),
|
||||
eval: false,
|
||||
}),
|
||||
..Flags::default()
|
||||
|
@ -11198,9 +11172,9 @@ mod tests {
|
|||
..Flags::default()
|
||||
},
|
||||
"install" => Flags {
|
||||
subcommand: DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Local(InstallFlagsLocal::Add(flags)),
|
||||
}),
|
||||
subcommand: DenoSubcommand::Install(InstallFlags::Local(
|
||||
InstallFlagsLocal::Add(flags),
|
||||
)),
|
||||
..Flags::default()
|
||||
},
|
||||
_ => unreachable!(),
|
||||
|
@ -11509,8 +11483,6 @@ mod tests {
|
|||
..Default::default()
|
||||
}
|
||||
);
|
||||
// just make sure this doesn't panic
|
||||
let _ = flags.permissions.to_options(&[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -11586,29 +11558,6 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_allow_import_host_from_url() {
|
||||
fn parse(text: &str) -> Option<String> {
|
||||
allow_import_host_from_url(&Url::parse(text).unwrap())
|
||||
}
|
||||
|
||||
assert_eq!(parse("https://jsr.io"), None);
|
||||
assert_eq!(
|
||||
parse("http://127.0.0.1:4250"),
|
||||
Some("127.0.0.1:4250".to_string())
|
||||
);
|
||||
assert_eq!(parse("http://jsr.io"), Some("jsr.io:80".to_string()));
|
||||
assert_eq!(
|
||||
parse("https://example.com"),
|
||||
Some("example.com:443".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
parse("http://example.com"),
|
||||
Some("example.com:80".to_string())
|
||||
);
|
||||
assert_eq!(parse("file:///example.com"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_all_conflicts_allow_perms() {
|
||||
let flags = [
|
||||
|
@ -11687,6 +11636,14 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
|
|||
recursive: false,
|
||||
},
|
||||
),
|
||||
(
|
||||
svec!["--latest"],
|
||||
OutdatedFlags {
|
||||
filters: svec![],
|
||||
kind: OutdatedKind::PrintOutdated { compatible: false },
|
||||
recursive: false,
|
||||
},
|
||||
),
|
||||
];
|
||||
for (input, expected) in cases {
|
||||
let mut args = svec!["deno", "outdated"];
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::net::IpAddr;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_permissions::NetDescriptor;
|
||||
use std::net::IpAddr;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ParsePortError(String);
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
pub async fn resolve_import_map_value_from_specifier(
|
||||
specifier: &Url,
|
||||
file_fetcher: &FileFetcher,
|
||||
) -> Result<serde_json::Value, AnyError> {
|
||||
if specifier.scheme() == "data" {
|
||||
let data_url_text =
|
||||
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
|
||||
Ok(serde_json::from_str(&data_url_text)?)
|
||||
} else {
|
||||
let file = file_fetcher
|
||||
.fetch_bypass_permissions(specifier)
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
Ok(serde_json::from_str(&file.source)?)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
@ -9,20 +9,21 @@ use deno_core::anyhow::Context;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_core::serde_json;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lockfile::Lockfile;
|
||||
use deno_lockfile::WorkspaceMemberConfig;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_path_util::fs::atomic_write_file_with_retries;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
|
||||
use crate::cache;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::Flags;
|
||||
|
||||
use crate::args::deno_json::import_map_deps;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::InstallFlags;
|
||||
use crate::args::InstallKind;
|
||||
|
||||
use deno_lockfile::Lockfile;
|
||||
use crate::cache;
|
||||
use crate::sys::CliSys;
|
||||
use crate::Flags;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfileReadFromPathOptions {
|
||||
|
@ -34,6 +35,7 @@ pub struct CliLockfileReadFromPathOptions {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfile {
|
||||
sys: CliSys,
|
||||
lockfile: Mutex<Lockfile>,
|
||||
pub filename: PathBuf,
|
||||
frozen: bool,
|
||||
|
@ -58,6 +60,16 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum AtomicWriteFileWithRetriesError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Changed(JsErrorBox),
|
||||
#[class(inherit)]
|
||||
#[error("Failed writing lockfile")]
|
||||
Io(#[source] std::io::Error),
|
||||
}
|
||||
|
||||
impl CliLockfile {
|
||||
/// Get the inner deno_lockfile::Lockfile.
|
||||
pub fn lock(&self) -> Guard<Lockfile> {
|
||||
|
@ -77,12 +89,16 @@ impl CliLockfile {
|
|||
self.lockfile.lock().overwrite
|
||||
}
|
||||
|
||||
pub fn write_if_changed(&self) -> Result<(), AnyError> {
|
||||
pub fn write_if_changed(
|
||||
&self,
|
||||
) -> Result<(), AtomicWriteFileWithRetriesError> {
|
||||
if self.skip_write {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.error_if_changed()?;
|
||||
self
|
||||
.error_if_changed()
|
||||
.map_err(AtomicWriteFileWithRetriesError::Changed)?;
|
||||
let mut lockfile = self.lockfile.lock();
|
||||
let Some(bytes) = lockfile.resolve_write_bytes() else {
|
||||
return Ok(()); // nothing to do
|
||||
|
@ -90,18 +106,21 @@ impl CliLockfile {
|
|||
// do an atomic write to reduce the chance of multiple deno
|
||||
// processes corrupting the file
|
||||
atomic_write_file_with_retries(
|
||||
&self.sys,
|
||||
&lockfile.filename,
|
||||
bytes,
|
||||
&bytes,
|
||||
cache::CACHE_PERM,
|
||||
)
|
||||
.context("Failed writing lockfile.")?;
|
||||
.map_err(AtomicWriteFileWithRetriesError::Io)?;
|
||||
lockfile.has_content_changed = false;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn discover(
|
||||
sys: &CliSys,
|
||||
flags: &Flags,
|
||||
workspace: &Workspace,
|
||||
maybe_external_import_map: Option<&serde_json::Value>,
|
||||
) -> Result<Option<CliLockfile>, AnyError> {
|
||||
fn pkg_json_deps(
|
||||
maybe_pkg_json: Option<&PackageJson>,
|
||||
|
@ -136,10 +155,8 @@ impl CliLockfile {
|
|||
if flags.no_lock
|
||||
|| matches!(
|
||||
flags.subcommand,
|
||||
DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(..),
|
||||
..
|
||||
}) | DenoSubcommand::Uninstall(_)
|
||||
DenoSubcommand::Install(InstallFlags::Global(..))
|
||||
| DenoSubcommand::Uninstall(_)
|
||||
)
|
||||
{
|
||||
return Ok(None);
|
||||
|
@ -163,18 +180,25 @@ impl CliLockfile {
|
|||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions {
|
||||
file_path,
|
||||
frozen,
|
||||
skip_write: flags.internal.lockfile_skip_write,
|
||||
})?;
|
||||
let lockfile = Self::read_from_path(
|
||||
sys,
|
||||
CliLockfileReadFromPathOptions {
|
||||
file_path,
|
||||
frozen,
|
||||
skip_write: flags.internal.lockfile_skip_write,
|
||||
},
|
||||
)?;
|
||||
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
let root_url = workspace.root_dir();
|
||||
let config = deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
|
||||
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
|
||||
dependencies: if let Some(map) = maybe_external_import_map {
|
||||
import_map_deps(map)
|
||||
} else {
|
||||
deno_json_deps(root_folder.deno_json.as_deref())
|
||||
},
|
||||
},
|
||||
members: workspace
|
||||
.config_folders()
|
||||
|
@ -219,6 +243,7 @@ impl CliLockfile {
|
|||
}
|
||||
|
||||
pub fn read_from_path(
|
||||
sys: &CliSys,
|
||||
opts: CliLockfileReadFromPathOptions,
|
||||
) -> Result<CliLockfile, AnyError> {
|
||||
let lockfile = match std::fs::read_to_string(&opts.file_path) {
|
||||
|
@ -237,6 +262,7 @@ impl CliLockfile {
|
|||
}
|
||||
};
|
||||
Ok(CliLockfile {
|
||||
sys: sys.clone(),
|
||||
filename: lockfile.filename.clone(),
|
||||
lockfile: Mutex::new(lockfile),
|
||||
frozen: opts.frozen,
|
||||
|
@ -244,7 +270,7 @@ impl CliLockfile {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn error_if_changed(&self) -> Result<(), AnyError> {
|
||||
pub fn error_if_changed(&self) -> Result<(), JsErrorBox> {
|
||||
if !self.frozen {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -256,9 +282,7 @@ impl CliLockfile {
|
|||
let diff = crate::util::diff::diff(&contents, &new_contents);
|
||||
// has an extra newline at the end
|
||||
let diff = diff.trim_end();
|
||||
Err(deno_core::anyhow::anyhow!(
|
||||
"The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
|
||||
))
|
||||
Err(JsErrorBox::generic(format!("The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}")))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
|
|
1179
cli/args/mod.rs
1179
cli/args/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -11,19 +11,20 @@ use deno_package_json::PackageJsonDepValueParseError;
|
|||
use deno_package_json::PackageJsonDepWorkspaceReq;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::StackString;
|
||||
use deno_semver::VersionReq;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmRemotePkg {
|
||||
pub alias: Option<String>,
|
||||
pub alias: Option<StackString>,
|
||||
pub base_dir: PathBuf,
|
||||
pub req: PackageReq,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmWorkspacePkg {
|
||||
pub alias: Option<String>,
|
||||
pub alias: Option<StackString>,
|
||||
pub target_dir: PathBuf,
|
||||
}
|
||||
|
||||
|
@ -31,7 +32,7 @@ pub struct InstallNpmWorkspacePkg {
|
|||
#[error("Failed to install '{}'\n at {}", alias, location)]
|
||||
pub struct PackageJsonDepValueParseWithLocationError {
|
||||
pub location: Url,
|
||||
pub alias: String,
|
||||
pub alias: StackString,
|
||||
#[source]
|
||||
pub source: PackageJsonDepValueParseError,
|
||||
}
|
||||
|
@ -100,10 +101,8 @@ impl NpmInstallDepsProvider {
|
|||
let mut pkg_pkgs = Vec::with_capacity(
|
||||
deps.dependencies.len() + deps.dev_dependencies.len(),
|
||||
);
|
||||
for (alias, dep) in deps
|
||||
.dependencies
|
||||
.into_iter()
|
||||
.chain(deps.dev_dependencies.into_iter())
|
||||
for (alias, dep) in
|
||||
deps.dependencies.iter().chain(deps.dev_dependencies.iter())
|
||||
{
|
||||
let dep = match dep {
|
||||
Ok(dep) => dep,
|
||||
|
@ -111,8 +110,8 @@ impl NpmInstallDepsProvider {
|
|||
pkg_json_dep_errors.push(
|
||||
PackageJsonDepValueParseWithLocationError {
|
||||
location: pkg_json.specifier(),
|
||||
alias,
|
||||
source: err,
|
||||
alias: alias.clone(),
|
||||
source: err.clone(),
|
||||
},
|
||||
);
|
||||
continue;
|
||||
|
@ -121,28 +120,28 @@ impl NpmInstallDepsProvider {
|
|||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_req(&pkg_req)
|
||||
pkg.matches_req(pkg_req)
|
||||
// do not resolve to the current package
|
||||
&& pkg.pkg_json.path != pkg_json.path
|
||||
});
|
||||
|
||||
if let Some(pkg) = workspace_pkg {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias: Some(alias),
|
||||
alias: Some(alias.clone()),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||
alias: Some(alias),
|
||||
alias: Some(alias.clone()),
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
req: pkg_req,
|
||||
req: pkg_req.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(workspace_version_req) => {
|
||||
let version_req = match workspace_version_req {
|
||||
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
|
||||
version_req
|
||||
version_req.clone()
|
||||
}
|
||||
PackageJsonDepWorkspaceReq::Tilde
|
||||
| PackageJsonDepWorkspaceReq::Caret => {
|
||||
|
@ -150,10 +149,10 @@ impl NpmInstallDepsProvider {
|
|||
}
|
||||
};
|
||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
||||
pkg.matches_name_and_version_req(alias, &version_req)
|
||||
}) {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias: Some(alias),
|
||||
alias: Some(alias.clone()),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,369 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use log::debug;
|
||||
use log::error;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::net::IpAddr;
|
||||
use std::net::Ipv4Addr;
|
||||
use std::net::Ipv6Addr;
|
||||
use std::net::SocketAddr;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AuthTokenData {
|
||||
Bearer(String),
|
||||
Basic { username: String, password: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct AuthToken {
|
||||
host: AuthDomain,
|
||||
token: AuthTokenData,
|
||||
}
|
||||
|
||||
impl fmt::Display for AuthToken {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match &self.token {
|
||||
AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"),
|
||||
AuthTokenData::Basic { username, password } => {
|
||||
let credentials = format!("{username}:{password}");
|
||||
write!(f, "Basic {}", BASE64_STANDARD.encode(credentials))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure which contains bearer tokens that can be used when sending
|
||||
/// requests to websites, intended to authorize access to private resources
|
||||
/// such as remote modules.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AuthTokens(Vec<AuthToken>);
|
||||
|
||||
/// An authorization domain, either an exact or suffix match.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AuthDomain {
|
||||
Ip(IpAddr),
|
||||
IpPort(SocketAddr),
|
||||
/// Suffix match, no dot. May include a port.
|
||||
Suffix(Cow<'static, str>),
|
||||
}
|
||||
|
||||
impl<T: ToString> From<T> for AuthDomain {
|
||||
fn from(value: T) -> Self {
|
||||
let s = value.to_string().to_lowercase();
|
||||
if let Ok(ip) = SocketAddr::from_str(&s) {
|
||||
return AuthDomain::IpPort(ip);
|
||||
};
|
||||
if s.starts_with('[') && s.ends_with(']') {
|
||||
if let Ok(ip) = Ipv6Addr::from_str(&s[1..s.len() - 1]) {
|
||||
return AuthDomain::Ip(ip.into());
|
||||
}
|
||||
} else if let Ok(ip) = Ipv4Addr::from_str(&s) {
|
||||
return AuthDomain::Ip(ip.into());
|
||||
}
|
||||
if let Some(s) = s.strip_prefix('.') {
|
||||
AuthDomain::Suffix(Cow::Owned(s.to_owned()))
|
||||
} else {
|
||||
AuthDomain::Suffix(Cow::Owned(s))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthDomain {
|
||||
pub fn matches(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
let Some(host) = specifier.host_str() else {
|
||||
return false;
|
||||
};
|
||||
match *self {
|
||||
Self::Ip(ip) => {
|
||||
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
|
||||
return false;
|
||||
};
|
||||
ip == parsed && specifier.port().is_none()
|
||||
}
|
||||
Self::IpPort(ip) => {
|
||||
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
|
||||
return false;
|
||||
};
|
||||
ip.ip() == parsed && specifier.port() == Some(ip.port())
|
||||
}
|
||||
Self::Suffix(ref suffix) => {
|
||||
let hostname = if let Some(port) = specifier.port() {
|
||||
Cow::Owned(format!("{}:{}", host, port))
|
||||
} else {
|
||||
Cow::Borrowed(host)
|
||||
};
|
||||
|
||||
if suffix.len() == hostname.len() {
|
||||
return suffix == &hostname;
|
||||
}
|
||||
|
||||
// If it's a suffix match, ensure a dot
|
||||
if hostname.ends_with(suffix.as_ref())
|
||||
&& hostname.ends_with(&format!(".{suffix}"))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthTokens {
|
||||
/// Create a new set of tokens based on the provided string. It is intended
|
||||
/// that the string be the value of an environment variable and the string is
|
||||
/// parsed for token values. The string is expected to be a semi-colon
|
||||
/// separated string, where each value is `{token}@{hostname}`.
|
||||
pub fn new(maybe_tokens_str: Option<String>) -> Self {
|
||||
let mut tokens = Vec::new();
|
||||
if let Some(tokens_str) = maybe_tokens_str {
|
||||
for token_str in tokens_str.trim().split(';') {
|
||||
if token_str.contains('@') {
|
||||
let mut iter = token_str.rsplitn(2, '@');
|
||||
let host = AuthDomain::from(iter.next().unwrap());
|
||||
let token = iter.next().unwrap();
|
||||
if token.contains(':') {
|
||||
let mut iter = token.rsplitn(2, ':');
|
||||
let password = iter.next().unwrap().to_owned();
|
||||
let username = iter.next().unwrap().to_owned();
|
||||
tokens.push(AuthToken {
|
||||
host,
|
||||
token: AuthTokenData::Basic { username, password },
|
||||
});
|
||||
} else {
|
||||
tokens.push(AuthToken {
|
||||
host,
|
||||
token: AuthTokenData::Bearer(token.to_string()),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
error!("Badly formed auth token discarded.");
|
||||
}
|
||||
}
|
||||
debug!("Parsed {} auth token(s).", tokens.len());
|
||||
}
|
||||
|
||||
Self(tokens)
|
||||
}
|
||||
|
||||
/// Attempt to match the provided specifier to the tokens in the set. The
|
||||
/// matching occurs from the right of the hostname plus port, irrespective of
|
||||
/// scheme. For example `https://www.deno.land:8080/` would match a token
|
||||
/// with a host value of `deno.land:8080` but not match `www.deno.land`. The
|
||||
/// matching is case insensitive.
|
||||
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<AuthToken> {
|
||||
self.0.iter().find_map(|t| {
|
||||
if t.host.matches(specifier) {
|
||||
Some(t.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use deno_core::resolve_url;
|
||||
|
||||
#[test]
|
||||
fn test_auth_token() {
|
||||
let auth_tokens = AuthTokens::new(Some("abc123@deno.land".to_string()));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123"
|
||||
);
|
||||
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
let fixture =
|
||||
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_multiple() {
|
||||
let auth_tokens =
|
||||
AuthTokens::new(Some("abc123@deno.land;def456@example.com".to_string()));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_space() {
|
||||
let auth_tokens = AuthTokens::new(Some(
|
||||
" abc123@deno.land;def456@example.com\t".to_string(),
|
||||
));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_newline() {
|
||||
let auth_tokens = AuthTokens::new(Some(
|
||||
"\nabc123@deno.land;def456@example.com\n".to_string(),
|
||||
));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_port() {
|
||||
let auth_tokens =
|
||||
AuthTokens::new(Some("abc123@deno.land:8080".to_string()));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
let fixture = resolve_url("http://deno.land:8080/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_contain_at() {
|
||||
let auth_tokens = AuthTokens::new(Some("abc@123@deno.land".to_string()));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc@123".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_token_basic() {
|
||||
let auth_tokens = AuthTokens::new(Some("abc:123@deno.land".to_string()));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Basic YWJjOjEyMw=="
|
||||
);
|
||||
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Basic YWJjOjEyMw==".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
let fixture =
|
||||
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ip() {
|
||||
let ip = AuthDomain::from("[2001:db8:a::123]");
|
||||
assert_eq!("Ip(2001:db8:a::123)", format!("{ip:?}"));
|
||||
let ip = AuthDomain::from("[2001:db8:a::123]:8080");
|
||||
assert_eq!("IpPort([2001:db8:a::123]:8080)", format!("{ip:?}"));
|
||||
let ip = AuthDomain::from("1.1.1.1");
|
||||
assert_eq!("Ip(1.1.1.1)", format!("{ip:?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_insensitive() {
|
||||
let domain = AuthDomain::from("EXAMPLE.com");
|
||||
assert!(
|
||||
domain.matches(&ModuleSpecifier::parse("http://example.com").unwrap())
|
||||
);
|
||||
assert!(
|
||||
domain.matches(&ModuleSpecifier::parse("http://example.COM").unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches() {
|
||||
let candidates = [
|
||||
"example.com",
|
||||
"www.example.com",
|
||||
"1.1.1.1",
|
||||
"[2001:db8:a::123]",
|
||||
// These will never match
|
||||
"example.com.evil.com",
|
||||
"1.1.1.1.evil.com",
|
||||
"notexample.com",
|
||||
"www.notexample.com",
|
||||
];
|
||||
let domains = [
|
||||
("example.com", vec!["example.com", "www.example.com"]),
|
||||
(".example.com", vec!["example.com", "www.example.com"]),
|
||||
("www.example.com", vec!["www.example.com"]),
|
||||
("1.1.1.1", vec!["1.1.1.1"]),
|
||||
("[2001:db8:a::123]", vec!["[2001:db8:a::123]"]),
|
||||
];
|
||||
let url = |c: &str| ModuleSpecifier::parse(&format!("http://{c}")).unwrap();
|
||||
let url_port =
|
||||
|c: &str| ModuleSpecifier::parse(&format!("http://{c}:8080")).unwrap();
|
||||
|
||||
// Generate each candidate with and without a port
|
||||
let candidates = candidates
|
||||
.into_iter()
|
||||
.flat_map(|c| [url(c), url_port(c)])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (domain, expected_domain) in domains {
|
||||
// Test without a port -- all candidates return without a port
|
||||
let auth_domain = AuthDomain::from(domain);
|
||||
let actual = candidates
|
||||
.iter()
|
||||
.filter(|c| auth_domain.matches(c))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let expected = expected_domain.iter().map(|u| url(u)).collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Test with a port, all candidates return with a port
|
||||
let auth_domain = AuthDomain::from(&format!("{domain}:8080"));
|
||||
let actual = candidates
|
||||
.iter()
|
||||
.filter(|c| auth_domain.matches(c))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let expected = expected_domain
|
||||
.iter()
|
||||
.map(|u| url_port(u))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
const cacheName = "cache-v1";
|
||||
const cache = await caches.open(cacheName);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
Deno.bench("echo deno", async () => {
|
||||
await new Deno.Command("echo", { args: ["deno"] }).output();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
// v8 builtin that's close to the upper bound non-NOPs
|
||||
Deno.bench("date_now", { n: 5e5 }, () => {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
let total = 5;
|
||||
let current = "";
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
/** @jsx h */
|
||||
import results from "./deno.json" assert { type: "json" };
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use deno_core::serde::Deserialize;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use lsp_types::Uri;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
use test_util::lsp::LspClientBuilder;
|
||||
use test_util::PathRef;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_bench_util::bencher::benchmark_group;
|
||||
use deno_bench_util::bencher::benchmark_main;
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
#![allow(clippy::print_stdout)]
|
||||
#![allow(clippy::print_stderr)]
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::From;
|
||||
use std::env;
|
||||
|
@ -15,6 +12,10 @@ use std::path::PathBuf;
|
|||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::Value;
|
||||
use test_util::PathRef;
|
||||
|
||||
mod lsp;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
import { loadTestLibrary } from "../../../tests/napi/common.js";
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// From https://github.com/just-js/benchmarks/tree/main/01-stdio
|
||||
|
||||
#include <stdlib.h>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
//
|
||||
// From https://github.com/just-js/benchmarks/tree/main/01-stdio
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
const listener = Deno.listen({ port: 4500 });
|
||||
const response = new TextEncoder().encode(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
|
|
128
cli/build.rs
128
cli/build.rs
|
@ -1,24 +1,24 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_core::snapshot::*;
|
||||
use deno_runtime::*;
|
||||
mod shared;
|
||||
|
||||
mod ts {
|
||||
use super::*;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
use deno_error::JsErrorBox;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct BuildInfoResponse {
|
||||
|
@ -51,7 +51,7 @@ mod ts {
|
|||
fn op_script_version(
|
||||
_state: &mut OpState,
|
||||
#[string] _arg: &str,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
) -> Result<Option<String>, JsErrorBox> {
|
||||
Ok(Some("1".to_string()))
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ mod ts {
|
|||
fn op_load(
|
||||
state: &mut OpState,
|
||||
#[string] load_specifier: &str,
|
||||
) -> Result<LoadResponse, AnyError> {
|
||||
) -> Result<LoadResponse, JsErrorBox> {
|
||||
let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>();
|
||||
let path_dts = state.borrow::<PathBuf>();
|
||||
let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts");
|
||||
|
@ -91,12 +91,15 @@ mod ts {
|
|||
// if it comes from an op crate, we were supplied with the path to the
|
||||
// file.
|
||||
let path = if let Some(op_crate_lib) = op_crate_libs.get(lib) {
|
||||
PathBuf::from(op_crate_lib).canonicalize()?
|
||||
PathBuf::from(op_crate_lib)
|
||||
.canonicalize()
|
||||
.map_err(JsErrorBox::from_err)?
|
||||
// otherwise we will generate the path ourself
|
||||
} else {
|
||||
path_dts.join(format!("lib.{lib}.d.ts"))
|
||||
};
|
||||
let data = std::fs::read_to_string(path)?;
|
||||
let data =
|
||||
std::fs::read_to_string(path).map_err(JsErrorBox::from_err)?;
|
||||
Ok(LoadResponse {
|
||||
data,
|
||||
version: "1".to_string(),
|
||||
|
@ -104,13 +107,13 @@ mod ts {
|
|||
script_kind: 3,
|
||||
})
|
||||
} else {
|
||||
Err(custom_error(
|
||||
Err(JsErrorBox::new(
|
||||
"InvalidSpecifier",
|
||||
format!("An invalid specifier was requested: {}", load_specifier),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(custom_error(
|
||||
Err(JsErrorBox::new(
|
||||
"InvalidSpecifier",
|
||||
format!("An invalid specifier was requested: {}", load_specifier),
|
||||
))
|
||||
|
@ -119,10 +122,16 @@ mod ts {
|
|||
|
||||
deno_core::extension!(deno_tsc,
|
||||
ops = [op_build_info, op_is_node_file, op_load, op_script_version],
|
||||
esm_entry_point = "ext:deno_tsc/99_main_compiler.js",
|
||||
esm = [
|
||||
dir "tsc",
|
||||
"97_ts_host.js",
|
||||
"98_lsp.js",
|
||||
"99_main_compiler.js",
|
||||
],
|
||||
js = [
|
||||
dir "tsc",
|
||||
"00_typescript.js",
|
||||
"99_main_compiler.js",
|
||||
],
|
||||
options = {
|
||||
op_crate_libs: HashMap<&'static str, PathBuf>,
|
||||
|
@ -306,57 +315,6 @@ mod ts {
|
|||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn version() -> String {
|
||||
let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap();
|
||||
let version_text = " version = \"";
|
||||
for line in file_text.lines() {
|
||||
if let Some(index) = line.find(version_text) {
|
||||
let remaining_line = &line[index + version_text.len()..];
|
||||
return remaining_line[..remaining_line.find('"').unwrap()].to_string();
|
||||
}
|
||||
}
|
||||
panic!("Could not find ts version.")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) {
|
||||
use deno_runtime::ops::bootstrap::SnapshotOptions;
|
||||
|
||||
let snapshot_options = SnapshotOptions {
|
||||
ts_version: ts::version(),
|
||||
v8_version: deno_core::v8::VERSION_STRING,
|
||||
target: std::env::var("TARGET").unwrap(),
|
||||
};
|
||||
|
||||
deno_runtime::snapshot::create_runtime_snapshot(
|
||||
snapshot_path,
|
||||
snapshot_options,
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
fn git_commit_hash() -> String {
|
||||
if let Ok(output) = std::process::Command::new("git")
|
||||
.arg("rev-list")
|
||||
.arg("-1")
|
||||
.arg("HEAD")
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
std::str::from_utf8(&output.stdout[..40])
|
||||
.unwrap()
|
||||
.to_string()
|
||||
} else {
|
||||
// When not in git repository
|
||||
// (e.g. when the user install by `cargo install deno`)
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
} else {
|
||||
// When there is no git command for some reason
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
@ -366,7 +324,7 @@ fn main() {
|
|||
}
|
||||
|
||||
deno_napi::print_linker_flags("deno");
|
||||
deno_napi::print_linker_flags("denort");
|
||||
deno_webgpu::print_linker_flags("deno");
|
||||
|
||||
// Host snapshots won't work when cross compiling.
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
@ -385,51 +343,15 @@ fn main() {
|
|||
}
|
||||
println!("cargo:rerun-if-env-changed=DENO_CANARY");
|
||||
|
||||
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash());
|
||||
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
|
||||
println!(
|
||||
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
|
||||
&git_commit_hash()[..7]
|
||||
);
|
||||
|
||||
let ts_version = ts::version();
|
||||
debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
|
||||
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
|
||||
println!("cargo:rerun-if-env-changed=TS_VERSION");
|
||||
|
||||
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
|
||||
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
|
||||
|
||||
if cfg!(windows) {
|
||||
// these dls load slowly, so delay loading them
|
||||
let dlls = [
|
||||
// webgpu
|
||||
"d3dcompiler_47",
|
||||
"OPENGL32",
|
||||
// network related functions
|
||||
"iphlpapi",
|
||||
];
|
||||
for dll in dlls {
|
||||
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
|
||||
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
|
||||
}
|
||||
// enable delay loading
|
||||
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
|
||||
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
|
||||
}
|
||||
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
|
||||
|
||||
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
|
||||
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
|
||||
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
|
||||
create_cli_snapshot(cli_snapshot_path);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let mut res = winres::WindowsResource::new();
|
||||
|
|
90
cli/cache/cache_db.rs
vendored
90
cli/cache/cache_db.rs
vendored
|
@ -1,20 +1,20 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_runtime::deno_webstorage::rusqlite;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Connection;
|
||||
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Params;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::FastInsecureHasher;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_runtime::deno_webstorage::rusqlite;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Connection;
|
||||
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Params;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CacheDBHash(u64);
|
||||
|
@ -24,12 +24,12 @@ impl CacheDBHash {
|
|||
Self(hash)
|
||||
}
|
||||
|
||||
pub fn from_source(source: impl std::hash::Hash) -> Self {
|
||||
pub fn from_hashable(hashable: impl std::hash::Hash) -> Self {
|
||||
Self::new(
|
||||
// always write in the deno version just in case
|
||||
// the clearing on deno version change doesn't work
|
||||
FastInsecureHasher::new_deno_versioned()
|
||||
.write_hashable(source)
|
||||
.write_hashable(hashable)
|
||||
.finish(),
|
||||
)
|
||||
}
|
||||
|
@ -232,7 +232,7 @@ impl CacheDB {
|
|||
config: &CacheDBConfiguration,
|
||||
conn: &Connection,
|
||||
version: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), rusqlite::Error> {
|
||||
let sql = config.create_combined_sql();
|
||||
conn.execute_batch(&sql)?;
|
||||
|
||||
|
@ -265,7 +265,7 @@ impl CacheDB {
|
|||
fn open_connection_and_init(
|
||||
&self,
|
||||
path: Option<&Path>,
|
||||
) -> Result<Connection, AnyError> {
|
||||
) -> Result<Connection, rusqlite::Error> {
|
||||
let conn = self.actually_open_connection(path)?;
|
||||
Self::initialize_connection(self.config, &conn, self.version)?;
|
||||
Ok(conn)
|
||||
|
@ -368,7 +368,9 @@ impl CacheDB {
|
|||
fn open_connection(
|
||||
config: &CacheDBConfiguration,
|
||||
path: Option<&Path>,
|
||||
open_connection_and_init: impl Fn(Option<&Path>) -> Result<Connection, AnyError>,
|
||||
open_connection_and_init: impl Fn(
|
||||
Option<&Path>,
|
||||
) -> Result<Connection, rusqlite::Error>,
|
||||
) -> Result<ConnectionState, AnyError> {
|
||||
// Success on first try? We hope that this is the case.
|
||||
let err = match open_connection_and_init(path) {
|
||||
|
@ -379,9 +381,20 @@ fn open_connection(
|
|||
let Some(path) = path.as_ref() else {
|
||||
// If an in-memory DB fails, that's game over
|
||||
log::error!("Failed to initialize in-memory cache database.");
|
||||
return Err(err);
|
||||
return Err(err.into());
|
||||
};
|
||||
|
||||
// reduce logging for readonly file system
|
||||
if let rusqlite::Error::SqliteFailure(ffi_err, _) = &err {
|
||||
if ffi_err.code == rusqlite::ErrorCode::ReadOnly {
|
||||
log::debug!(
|
||||
"Failed creating cache db. Folder readonly: {}",
|
||||
path.display()
|
||||
);
|
||||
return handle_failure_mode(config, err, open_connection_and_init);
|
||||
}
|
||||
}
|
||||
|
||||
// ensure the parent directory exists
|
||||
if let Some(parent) = path.parent() {
|
||||
match std::fs::create_dir_all(parent) {
|
||||
|
@ -410,10 +423,11 @@ fn open_connection(
|
|||
// Failed, try deleting it
|
||||
let is_tty = std::io::stderr().is_terminal();
|
||||
log::log!(
|
||||
if is_tty { log::Level::Warn } else { log::Level::Trace },
|
||||
"Could not initialize cache database '{}', deleting and retrying... ({err:?})",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
if is_tty { log::Level::Warn } else { log::Level::Trace },
|
||||
"Could not initialize cache database '{}', deleting and retrying... ({err:?})",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
|
||||
if std::fs::remove_file(path).is_ok() {
|
||||
// Try a third time if we successfully deleted it
|
||||
let res = open_connection_and_init(Some(path));
|
||||
|
@ -422,6 +436,11 @@ fn open_connection(
|
|||
};
|
||||
}
|
||||
|
||||
log_failure_mode(path, is_tty, config);
|
||||
handle_failure_mode(config, err, open_connection_and_init)
|
||||
}
|
||||
|
||||
fn log_failure_mode(path: &Path, is_tty: bool, config: &CacheDBConfiguration) {
|
||||
match config.on_failure {
|
||||
CacheFailure::InMemory => {
|
||||
log::log!(
|
||||
|
@ -431,9 +450,8 @@ fn open_connection(
|
|||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', opening in-memory cache.",
|
||||
path.to_string_lossy()
|
||||
path.display()
|
||||
);
|
||||
Ok(ConnectionState::Connected(open_connection_and_init(None)?))
|
||||
}
|
||||
CacheFailure::Blackhole => {
|
||||
log::log!(
|
||||
|
@ -443,23 +461,36 @@ fn open_connection(
|
|||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', performance may be degraded.",
|
||||
path.to_string_lossy()
|
||||
path.display()
|
||||
);
|
||||
Ok(ConnectionState::Blackhole)
|
||||
}
|
||||
CacheFailure::Error => {
|
||||
log::error!(
|
||||
"Failed to open cache file '{}', expect further errors.",
|
||||
path.to_string_lossy()
|
||||
path.display()
|
||||
);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_failure_mode(
|
||||
config: &CacheDBConfiguration,
|
||||
err: rusqlite::Error,
|
||||
open_connection_and_init: impl Fn(
|
||||
Option<&Path>,
|
||||
) -> Result<Connection, rusqlite::Error>,
|
||||
) -> Result<ConnectionState, AnyError> {
|
||||
match config.on_failure {
|
||||
CacheFailure::InMemory => {
|
||||
Ok(ConnectionState::Connected(open_connection_and_init(None)?))
|
||||
}
|
||||
CacheFailure::Blackhole => Ok(ConnectionState::Blackhole),
|
||||
CacheFailure::Error => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deno_core::anyhow::anyhow;
|
||||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
@ -520,7 +551,8 @@ mod tests {
|
|||
let path = temp_dir.path().join("data").to_path_buf();
|
||||
let state = open_connection(&TEST_DB, Some(path.as_path()), |maybe_path| {
|
||||
match maybe_path {
|
||||
Some(_) => Err(anyhow!("fail")),
|
||||
// this error was chosen because it was an error easy to construct
|
||||
Some(_) => Err(rusqlite::Error::SqliteSingleThreadedMode),
|
||||
None => Ok(Connection::open_in_memory().unwrap()),
|
||||
}
|
||||
})
|
||||
|
|
13
cli/cache/caches.rs
vendored
13
cli/cache/caches.rs
vendored
|
@ -1,19 +1,20 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::check::TYPE_CHECK_CACHE_DB;
|
||||
use super::code_cache::CODE_CACHE_DB;
|
||||
use super::deno_dir::DenoDirProvider;
|
||||
use super::fast_check::FAST_CHECK_CACHE_DB;
|
||||
use super::incremental::INCREMENTAL_CACHE_DB;
|
||||
use super::module_info::MODULE_INFO_CACHE_DB;
|
||||
use super::node::NODE_ANALYSIS_CACHE_DB;
|
||||
use crate::cache::DenoDirProvider;
|
||||
|
||||
pub struct Caches {
|
||||
dir_provider: Arc<DenoDirProvider>,
|
||||
|
@ -48,13 +49,9 @@ impl Caches {
|
|||
cell
|
||||
.get_or_init(|| {
|
||||
if let Some(path) = path {
|
||||
CacheDB::from_path(
|
||||
config,
|
||||
path,
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
)
|
||||
CacheDB::from_path(config, path, DENO_VERSION_INFO.deno)
|
||||
} else {
|
||||
CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno)
|
||||
CacheDB::in_memory(config, DENO_VERSION_INFO.deno)
|
||||
}
|
||||
})
|
||||
.clone()
|
||||
|
|
9
cli/cache/check.rs
vendored
9
cli/cache/check.rs
vendored
|
@ -1,12 +1,13 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
pub static TYPE_CHECK_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: concat!(
|
||||
|
|
12
cli/cache/code_cache.rs
vendored
12
cli/cache/code_cache.rs
vendored
|
@ -1,14 +1,10 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::code_cache;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use crate::worker::CliCodeCache;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
|
@ -86,12 +82,6 @@ impl CodeCache {
|
|||
}
|
||||
}
|
||||
|
||||
impl CliCodeCache for CodeCache {
|
||||
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl code_cache::CodeCache for CodeCache {
|
||||
fn get_sync(
|
||||
&self,
|
||||
|
|
176
cli/cache/deno_dir.rs
vendored
176
cli/cache/deno_dir.rs
vendored
|
@ -1,33 +1,40 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::DiskCache;
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_cache_dir::DenoDirResolutionError;
|
||||
|
||||
use super::DiskCache;
|
||||
use crate::factory::CliDenoDirPathProvider;
|
||||
use crate::sys::CliSys;
|
||||
|
||||
/// Lazily creates the deno dir which might be useful in scenarios
|
||||
/// where functionality wants to continue if the DENO_DIR can't be created.
|
||||
pub struct DenoDirProvider {
|
||||
maybe_custom_root: Option<PathBuf>,
|
||||
deno_dir: OnceCell<std::io::Result<DenoDir>>,
|
||||
deno_dir_path_provider: Arc<CliDenoDirPathProvider>,
|
||||
sys: CliSys,
|
||||
deno_dir: once_cell::sync::OnceCell<DenoDir>,
|
||||
}
|
||||
|
||||
impl DenoDirProvider {
|
||||
pub fn new(maybe_custom_root: Option<PathBuf>) -> Self {
|
||||
pub fn new(
|
||||
sys: CliSys,
|
||||
deno_dir_path_provider: Arc<CliDenoDirPathProvider>,
|
||||
) -> Self {
|
||||
Self {
|
||||
maybe_custom_root,
|
||||
sys,
|
||||
deno_dir_path_provider,
|
||||
deno_dir: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_create(&self) -> Result<&DenoDir, std::io::Error> {
|
||||
self
|
||||
.deno_dir
|
||||
.get_or_init(|| DenoDir::new(self.maybe_custom_root.clone()))
|
||||
.as_ref()
|
||||
.map_err(|err| std::io::Error::new(err.kind(), err.to_string()))
|
||||
pub fn get_or_create(&self) -> Result<&DenoDir, DenoDirResolutionError> {
|
||||
self.deno_dir.get_or_try_init(|| {
|
||||
let path = self.deno_dir_path_provider.get_or_create()?;
|
||||
Ok(DenoDir::new(self.sys.clone(), path.clone()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,36 +49,14 @@ pub struct DenoDir {
|
|||
}
|
||||
|
||||
impl DenoDir {
|
||||
pub fn new(maybe_custom_root: Option<PathBuf>) -> std::io::Result<Self> {
|
||||
let maybe_custom_root =
|
||||
maybe_custom_root.or_else(|| env::var("DENO_DIR").map(String::into).ok());
|
||||
let root: PathBuf = if let Some(root) = maybe_custom_root {
|
||||
root
|
||||
} else if let Some(cache_dir) = dirs::cache_dir() {
|
||||
// We use the OS cache dir because all files deno writes are cache files
|
||||
// Once that changes we need to start using different roots if DENO_DIR
|
||||
// is not set, and keep a single one if it is.
|
||||
cache_dir.join("deno")
|
||||
} else if let Some(home_dir) = dirs::home_dir() {
|
||||
// fallback path
|
||||
home_dir.join(".deno")
|
||||
} else {
|
||||
panic!("Could not set the Deno root directory")
|
||||
};
|
||||
let root = if root.is_absolute() {
|
||||
root
|
||||
} else {
|
||||
std::env::current_dir()?.join(root)
|
||||
};
|
||||
pub fn new(sys: CliSys, root: PathBuf) -> Self {
|
||||
assert!(root.is_absolute());
|
||||
let gen_path = root.join("gen");
|
||||
|
||||
let deno_dir = Self {
|
||||
Self {
|
||||
root,
|
||||
gen_cache: DiskCache::new(&gen_path),
|
||||
};
|
||||
|
||||
Ok(deno_dir)
|
||||
gen_cache: DiskCache::new(sys, gen_path),
|
||||
}
|
||||
}
|
||||
|
||||
/// The root directory of the DENO_DIR for display purposes only.
|
||||
|
@ -166,112 +151,3 @@ impl DenoDir {
|
|||
self.root.join("dl")
|
||||
}
|
||||
}
|
||||
|
||||
/// To avoid the poorly managed dirs crate
|
||||
#[cfg(not(windows))]
|
||||
pub mod dirs {
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn cache_dir() -> Option<PathBuf> {
|
||||
if cfg!(target_os = "macos") {
|
||||
home_dir().map(|h| h.join("Library/Caches"))
|
||||
} else {
|
||||
std::env::var_os("XDG_CACHE_HOME")
|
||||
.map(PathBuf::from)
|
||||
.or_else(|| home_dir().map(|h| h.join(".cache")))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn home_dir() -> Option<PathBuf> {
|
||||
std::env::var_os("HOME")
|
||||
.and_then(|h| if h.is_empty() { None } else { Some(h) })
|
||||
.or_else(|| {
|
||||
// TODO(bartlomieju):
|
||||
#[allow(clippy::undocumented_unsafe_blocks)]
|
||||
unsafe {
|
||||
fallback()
|
||||
}
|
||||
})
|
||||
.map(PathBuf::from)
|
||||
}
|
||||
|
||||
// This piece of code is taken from the deprecated home_dir() function in Rust's standard library: https://github.com/rust-lang/rust/blob/master/src/libstd/sys/unix/os.rs#L579
|
||||
// The same code is used by the dirs crate
|
||||
unsafe fn fallback() -> Option<std::ffi::OsString> {
|
||||
let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) {
|
||||
n if n < 0 => 512_usize,
|
||||
n => n as usize,
|
||||
};
|
||||
let mut buf = Vec::with_capacity(amt);
|
||||
let mut passwd: libc::passwd = std::mem::zeroed();
|
||||
let mut result = std::ptr::null_mut();
|
||||
match libc::getpwuid_r(
|
||||
libc::getuid(),
|
||||
&mut passwd,
|
||||
buf.as_mut_ptr(),
|
||||
buf.capacity(),
|
||||
&mut result,
|
||||
) {
|
||||
0 if !result.is_null() => {
|
||||
let ptr = passwd.pw_dir as *const _;
|
||||
let bytes = std::ffi::CStr::from_ptr(ptr).to_bytes().to_vec();
|
||||
Some(std::os::unix::ffi::OsStringExt::from_vec(bytes))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// To avoid the poorly managed dirs crate
|
||||
// Copied from
|
||||
// https://github.com/dirs-dev/dirs-sys-rs/blob/ec7cee0b3e8685573d847f0a0f60aae3d9e07fa2/src/lib.rs#L140-L164
|
||||
// MIT license. Copyright (c) 2018-2019 dirs-rs contributors
|
||||
#[cfg(windows)]
|
||||
pub mod dirs {
|
||||
use std::ffi::OsString;
|
||||
use std::os::windows::ffi::OsStringExt;
|
||||
use std::path::PathBuf;
|
||||
use winapi::shared::winerror;
|
||||
use winapi::um::combaseapi;
|
||||
use winapi::um::knownfolders;
|
||||
use winapi::um::shlobj;
|
||||
use winapi::um::shtypes;
|
||||
use winapi::um::winbase;
|
||||
use winapi::um::winnt;
|
||||
|
||||
fn known_folder(folder_id: shtypes::REFKNOWNFOLDERID) -> Option<PathBuf> {
|
||||
// SAFETY: winapi calls
|
||||
unsafe {
|
||||
let mut path_ptr: winnt::PWSTR = std::ptr::null_mut();
|
||||
let result = shlobj::SHGetKnownFolderPath(
|
||||
folder_id,
|
||||
0,
|
||||
std::ptr::null_mut(),
|
||||
&mut path_ptr,
|
||||
);
|
||||
if result == winerror::S_OK {
|
||||
let len = winbase::lstrlenW(path_ptr) as usize;
|
||||
let path = std::slice::from_raw_parts(path_ptr, len);
|
||||
let ostr: OsString = OsStringExt::from_wide(path);
|
||||
combaseapi::CoTaskMemFree(path_ptr as *mut winapi::ctypes::c_void);
|
||||
Some(PathBuf::from(ostr))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> Option<PathBuf> {
|
||||
known_folder(&knownfolders::FOLDERID_LocalAppData)
|
||||
}
|
||||
|
||||
pub fn home_dir() -> Option<PathBuf> {
|
||||
if let Some(userprofile) = std::env::var_os("USERPROFILE") {
|
||||
if !userprofile.is_empty() {
|
||||
return Some(PathBuf::from(userprofile));
|
||||
}
|
||||
}
|
||||
|
||||
known_folder(&knownfolders::FOLDERID_Profile)
|
||||
}
|
||||
}
|
||||
|
|
43
cli/cache/disk_cache.rs
vendored
43
cli/cache/disk_cache.rs
vendored
|
@ -1,31 +1,32 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use super::CACHE_PERM;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
|
||||
use deno_cache_dir::url_to_filename;
|
||||
use deno_core::url::Host;
|
||||
use deno_core::url::Url;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::path::Component;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::path::Prefix;
|
||||
use std::str;
|
||||
|
||||
use deno_cache_dir::url_to_filename;
|
||||
use deno_cache_dir::CACHE_PERM;
|
||||
use deno_core::url::Host;
|
||||
use deno_core::url::Url;
|
||||
use deno_path_util::fs::atomic_write_file_with_retries;
|
||||
use sys_traits::FsRead;
|
||||
|
||||
use crate::sys::CliSys;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DiskCache {
|
||||
sys: CliSys,
|
||||
pub location: PathBuf,
|
||||
}
|
||||
|
||||
impl DiskCache {
|
||||
/// `location` must be an absolute path.
|
||||
pub fn new(location: &Path) -> Self {
|
||||
pub fn new(sys: CliSys, location: PathBuf) -> Self {
|
||||
assert!(location.is_absolute());
|
||||
Self {
|
||||
location: location.to_owned(),
|
||||
}
|
||||
Self { sys, location }
|
||||
}
|
||||
|
||||
fn get_cache_filename(&self, url: &Url) -> Option<PathBuf> {
|
||||
|
@ -115,25 +116,29 @@ impl DiskCache {
|
|||
|
||||
pub fn get(&self, filename: &Path) -> std::io::Result<Vec<u8>> {
|
||||
let path = self.location.join(filename);
|
||||
fs::read(path)
|
||||
Ok(self.sys.fs_read(path)?.into_owned())
|
||||
}
|
||||
|
||||
pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
|
||||
let path = self.location.join(filename);
|
||||
atomic_write_file_with_retries(&path, data, CACHE_PERM)
|
||||
atomic_write_file_with_retries(&self.sys, &path, data, CACHE_PERM)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
// ok, testing
|
||||
#[allow(clippy::disallowed_types)]
|
||||
use sys_traits::impls::RealSys;
|
||||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_set_get_cache_file() {
|
||||
let temp_dir = TempDir::new();
|
||||
let sub_dir = temp_dir.path().join("sub_dir");
|
||||
let cache = DiskCache::new(&sub_dir.to_path_buf());
|
||||
let cache = DiskCache::new(RealSys, sub_dir.to_path_buf());
|
||||
let path = PathBuf::from("foo/bar.txt");
|
||||
cache.set(&path, b"hello").unwrap();
|
||||
assert_eq!(cache.get(&path).unwrap(), b"hello");
|
||||
|
@ -147,7 +152,7 @@ mod tests {
|
|||
PathBuf::from("/deno_dir/")
|
||||
};
|
||||
|
||||
let cache = DiskCache::new(&cache_location);
|
||||
let cache = DiskCache::new(RealSys, cache_location);
|
||||
|
||||
let mut test_cases = vec![
|
||||
(
|
||||
|
@ -203,7 +208,7 @@ mod tests {
|
|||
} else {
|
||||
"/foo"
|
||||
};
|
||||
let cache = DiskCache::new(&PathBuf::from(p));
|
||||
let cache = DiskCache::new(RealSys, PathBuf::from(p));
|
||||
|
||||
let mut test_cases = vec![
|
||||
(
|
||||
|
@ -251,7 +256,7 @@ mod tests {
|
|||
PathBuf::from("/deno_dir/")
|
||||
};
|
||||
|
||||
let cache = DiskCache::new(&cache_location);
|
||||
let cache = DiskCache::new(RealSys, cache_location);
|
||||
|
||||
let mut test_cases = vec!["unknown://localhost/test.ts"];
|
||||
|
||||
|
|
11
cli/cache/emit.rs
vendored
11
cli/cache/emit.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
@ -6,6 +6,7 @@ use deno_ast::ModuleSpecifier;
|
|||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::unsync::sync::AtomicFlag;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
|
||||
use super::DiskCache;
|
||||
|
||||
|
@ -23,7 +24,7 @@ impl EmitCache {
|
|||
disk_cache,
|
||||
emit_failed_flag: Default::default(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: crate::version::DENO_VERSION_INFO.deno,
|
||||
cli_version: DENO_VERSION_INFO.deno,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -147,7 +148,7 @@ impl EmitFileSerializer {
|
|||
// it's ok to use an insecure hash here because
|
||||
// if someone can change the emit source then they
|
||||
// can also change the version hash
|
||||
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
|
||||
deno_lib::util::hash::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
|
||||
.write(bytes)
|
||||
// emit should not be re-used between cli versions
|
||||
.write_str(self.cli_version)
|
||||
|
@ -160,11 +161,13 @@ mod test {
|
|||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
use crate::sys::CliSys;
|
||||
|
||||
#[test]
|
||||
pub fn emit_cache_general_use() {
|
||||
let temp_dir = TempDir::new();
|
||||
let disk_cache = DiskCache::new(temp_dir.path().as_path());
|
||||
let disk_cache =
|
||||
DiskCache::new(CliSys::default(), temp_dir.path().to_path_buf());
|
||||
let cache = EmitCache {
|
||||
disk_cache: disk_cache.clone(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
|
|
2
cli/cache/fast_check.rs
vendored
2
cli/cache/fast_check.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::FastCheckCacheItem;
|
||||
|
|
23
cli/cache/incremental.rs
vendored
23
cli/cache/incremental.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
@ -34,12 +34,16 @@ pub static INCREMENTAL_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
|||
pub struct IncrementalCache(IncrementalCacheInner);
|
||||
|
||||
impl IncrementalCache {
|
||||
pub fn new<TState: std::hash::Hash>(
|
||||
pub fn new(
|
||||
db: CacheDB,
|
||||
state: &TState,
|
||||
state_hash: CacheDBHash,
|
||||
initial_file_paths: &[PathBuf],
|
||||
) -> Self {
|
||||
IncrementalCache(IncrementalCacheInner::new(db, state, initial_file_paths))
|
||||
IncrementalCache(IncrementalCacheInner::new(
|
||||
db,
|
||||
state_hash,
|
||||
initial_file_paths,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||
|
@ -67,12 +71,11 @@ struct IncrementalCacheInner {
|
|||
}
|
||||
|
||||
impl IncrementalCacheInner {
|
||||
pub fn new<TState: std::hash::Hash>(
|
||||
pub fn new(
|
||||
db: CacheDB,
|
||||
state: &TState,
|
||||
state_hash: CacheDBHash,
|
||||
initial_file_paths: &[PathBuf],
|
||||
) -> Self {
|
||||
let state_hash = CacheDBHash::from_source(state);
|
||||
let sql_cache = SqlIncrementalCache::new(db, state_hash);
|
||||
Self::from_sql_incremental_cache(sql_cache, initial_file_paths)
|
||||
}
|
||||
|
@ -112,13 +115,13 @@ impl IncrementalCacheInner {
|
|||
|
||||
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||
match self.previous_hashes.get(file_path) {
|
||||
Some(hash) => *hash == CacheDBHash::from_source(file_text),
|
||||
Some(hash) => *hash == CacheDBHash::from_hashable(file_text),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_file(&self, file_path: &Path, file_text: &str) {
|
||||
let hash = CacheDBHash::from_source(file_text);
|
||||
let hash = CacheDBHash::from_hashable(file_text);
|
||||
if let Some(previous_hash) = self.previous_hashes.get(file_path) {
|
||||
if *previous_hash == hash {
|
||||
return; // do not bother updating the db file because nothing has changed
|
||||
|
@ -262,7 +265,7 @@ mod test {
|
|||
let sql_cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
|
||||
let file_path = PathBuf::from("/mod.ts");
|
||||
let file_text = "test";
|
||||
let file_hash = CacheDBHash::from_source(file_text);
|
||||
let file_hash = CacheDBHash::from_hashable(file_text);
|
||||
sql_cache.set_source_hash(&file_path, file_hash).unwrap();
|
||||
let cache = IncrementalCacheInner::from_sql_incremental_cache(
|
||||
sql_cache,
|
||||
|
|
266
cli/cache/mod.rs
vendored
266
cli/cache/mod.rs
vendored
|
@ -1,18 +1,13 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FetchNoFollowOptions;
|
||||
use crate::file_fetcher::FetchOptions;
|
||||
use crate::file_fetcher::FetchPermissionsOptionRef;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::file_fetcher::FileOrRedirect;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::atomic_write_file_with_retries_and_fs;
|
||||
use crate::util::fs::AtomicWriteFileFsAdapter;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_cache_dir::file_fetcher::CacheSetting;
|
||||
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
|
||||
use deno_cache_dir::file_fetcher::FileOrRedirect;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::ModuleSpecifier;
|
||||
|
@ -20,21 +15,21 @@ use deno_graph::source::CacheInfo;
|
|||
use deno_graph::source::LoadFuture;
|
||||
use deno_graph::source::LoadResponse;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::file_fetcher::CliFetchNoFollowErrorKind;
|
||||
use crate::file_fetcher::CliFileFetcher;
|
||||
use crate::file_fetcher::FetchNoFollowOptions;
|
||||
use crate::file_fetcher::FetchPermissionsOptionRef;
|
||||
use crate::sys::CliSys;
|
||||
|
||||
mod cache_db;
|
||||
mod caches;
|
||||
mod check;
|
||||
mod code_cache;
|
||||
mod common;
|
||||
mod deno_dir;
|
||||
mod disk_cache;
|
||||
mod emit;
|
||||
|
@ -48,8 +43,8 @@ pub use cache_db::CacheDBHash;
|
|||
pub use caches::Caches;
|
||||
pub use check::TypeCheckCache;
|
||||
pub use code_cache::CodeCache;
|
||||
pub use common::FastInsecureHasher;
|
||||
pub use deno_dir::dirs::home_dir;
|
||||
/// Permissions used to save a file in the disk caches.
|
||||
pub use deno_cache_dir::CACHE_PERM;
|
||||
pub use deno_dir::DenoDir;
|
||||
pub use deno_dir::DenoDirProvider;
|
||||
pub use disk_cache::DiskCache;
|
||||
|
@ -61,123 +56,10 @@ pub use node::NodeAnalysisCache;
|
|||
pub use parsed_source::LazyGraphSourceParser;
|
||||
pub use parsed_source::ParsedSourceCache;
|
||||
|
||||
/// Permissions used to save a file in the disk caches.
|
||||
pub const CACHE_PERM: u32 = 0o644;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RealDenoCacheEnv;
|
||||
|
||||
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
||||
fn read_file_bytes(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> std::io::Result<Cow<'static, [u8]>> {
|
||||
std::fs::read(path).map(Cow::Owned)
|
||||
}
|
||||
|
||||
fn atomic_write_file(
|
||||
&self,
|
||||
path: &Path,
|
||||
bytes: &[u8],
|
||||
) -> std::io::Result<()> {
|
||||
atomic_write_file_with_retries(path, bytes, CACHE_PERM)
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
crate::util::fs::canonicalize_path(path)
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
|
||||
std::fs::create_dir_all(path)
|
||||
}
|
||||
|
||||
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
|
||||
match std::fs::metadata(path) {
|
||||
Ok(metadata) => Ok(Some(
|
||||
metadata.modified().unwrap_or_else(|_| SystemTime::now()),
|
||||
)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
path.is_file()
|
||||
}
|
||||
|
||||
fn time_now(&self) -> SystemTime {
|
||||
SystemTime::now()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DenoCacheEnvFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
|
||||
fn read_file_bytes(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> std::io::Result<Cow<'static, [u8]>> {
|
||||
self
|
||||
.0
|
||||
.read_file_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn atomic_write_file(
|
||||
&self,
|
||||
path: &Path,
|
||||
bytes: &[u8],
|
||||
) -> std::io::Result<()> {
|
||||
atomic_write_file_with_retries_and_fs(
|
||||
&AtomicWriteFileFsAdapter {
|
||||
fs: self.0,
|
||||
write_mode: CACHE_PERM,
|
||||
},
|
||||
path,
|
||||
bytes,
|
||||
)
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.0
|
||||
.mkdir_sync(path, true, None)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
|
||||
self
|
||||
.0
|
||||
.stat_sync(path)
|
||||
.map(|stat| {
|
||||
stat
|
||||
.mtime
|
||||
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
|
||||
})
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
self.0.is_file_sync(path)
|
||||
}
|
||||
|
||||
fn time_now(&self) -> SystemTime {
|
||||
SystemTime::now()
|
||||
}
|
||||
}
|
||||
|
||||
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
|
||||
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
|
||||
pub type LocalLspHttpCache =
|
||||
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
|
||||
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<CliSys>;
|
||||
pub type LocalLspHttpCache = deno_cache_dir::LocalLspHttpCache<CliSys>;
|
||||
pub use deno_cache_dir::HttpCache;
|
||||
use deno_error::JsErrorBox;
|
||||
|
||||
pub struct FetchCacherOptions {
|
||||
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
|
@ -190,31 +72,31 @@ pub struct FetchCacherOptions {
|
|||
/// a concise interface to the DENO_DIR when building module graphs.
|
||||
pub struct FetchCacher {
|
||||
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
file_fetcher: Arc<CliFileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
in_npm_pkg_checker: DenoInNpmPackageChecker,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
permissions: PermissionsContainer,
|
||||
sys: CliSys,
|
||||
is_deno_publish: bool,
|
||||
cache_info_enabled: bool,
|
||||
}
|
||||
|
||||
impl FetchCacher {
|
||||
pub fn new(
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
file_fetcher: Arc<CliFileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
in_npm_pkg_checker: DenoInNpmPackageChecker,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
sys: CliSys,
|
||||
options: FetchCacherOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
file_fetcher,
|
||||
fs,
|
||||
global_http_cache,
|
||||
in_npm_pkg_checker,
|
||||
module_info_cache,
|
||||
sys,
|
||||
file_header_overrides: options.file_header_overrides,
|
||||
permissions: options.permissions,
|
||||
is_deno_publish: options.is_deno_publish,
|
||||
|
@ -237,11 +119,7 @@ impl FetchCacher {
|
|||
} else if specifier.scheme() == "file" {
|
||||
specifier.to_file_path().ok()
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
self
|
||||
.global_http_cache
|
||||
.get_global_cache_filepath(specifier)
|
||||
.ok()
|
||||
self.global_http_cache.local_path_for_url(specifier).ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -276,9 +154,8 @@ impl Loader for FetchCacher {
|
|||
// symlinked to `/my-project-2/node_modules`), so first we checked if the path
|
||||
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare
|
||||
// against the canonicalized specifier.
|
||||
let specifier = crate::node::resolve_specifier_into_node_modules(
|
||||
specifier,
|
||||
self.fs.as_ref(),
|
||||
let specifier = node_resolver::resolve_specifier_into_node_modules(
|
||||
&self.sys, specifier,
|
||||
);
|
||||
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
|
@ -311,27 +188,27 @@ impl Loader for FetchCacher {
|
|||
LoaderCacheSetting::Use => None,
|
||||
LoaderCacheSetting::Reload => {
|
||||
if matches!(file_fetcher.cache_setting(), CacheSetting::Only) {
|
||||
return Err(deno_core::anyhow::anyhow!(
|
||||
return Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::generic(
|
||||
"Could not resolve version constraint using only cached data. Try running again without --cached-only"
|
||||
));
|
||||
))));
|
||||
}
|
||||
Some(CacheSetting::ReloadAll)
|
||||
}
|
||||
LoaderCacheSetting::Only => Some(CacheSetting::Only),
|
||||
};
|
||||
file_fetcher
|
||||
.fetch_no_follow_with_options(FetchNoFollowOptions {
|
||||
fetch_options: FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: if is_statically_analyzable {
|
||||
FetchPermissionsOptionRef::StaticContainer(&permissions)
|
||||
} else {
|
||||
FetchPermissionsOptionRef::DynamicContainer(&permissions)
|
||||
},
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
||||
},
|
||||
.fetch_no_follow(
|
||||
&specifier,
|
||||
FetchPermissionsOptionRef::Restricted(&permissions,
|
||||
if is_statically_analyzable {
|
||||
deno_runtime::deno_permissions::CheckSpecifierKind::Static
|
||||
} else {
|
||||
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic
|
||||
}),
|
||||
FetchNoFollowOptions {
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
||||
maybe_checksum: options.maybe_checksum.as_ref(),
|
||||
})
|
||||
.await
|
||||
|
@ -348,7 +225,7 @@ impl Loader for FetchCacher {
|
|||
(None, None) => None,
|
||||
};
|
||||
Ok(Some(LoadResponse::Module {
|
||||
specifier: file.specifier,
|
||||
specifier: file.url,
|
||||
maybe_headers,
|
||||
content: file.source,
|
||||
}))
|
||||
|
@ -361,18 +238,45 @@ impl Loader for FetchCacher {
|
|||
}
|
||||
})
|
||||
.unwrap_or_else(|err| {
|
||||
if let Some(io_err) = err.downcast_ref::<std::io::Error>() {
|
||||
if io_err.kind() == std::io::ErrorKind::NotFound {
|
||||
return Ok(None);
|
||||
} else {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
let error_class_name = get_error_class_name(&err);
|
||||
match error_class_name {
|
||||
"NotFound" => Ok(None),
|
||||
"NotCached" if options.cache_setting == LoaderCacheSetting::Only => Ok(None),
|
||||
_ => Err(err),
|
||||
let err = err.into_kind();
|
||||
match err {
|
||||
CliFetchNoFollowErrorKind::FetchNoFollow(err) => {
|
||||
let err = err.into_kind();
|
||||
match err {
|
||||
FetchNoFollowErrorKind::NotFound(_) => Ok(None),
|
||||
FetchNoFollowErrorKind::UrlToFilePath { .. } |
|
||||
FetchNoFollowErrorKind::ReadingBlobUrl { .. } |
|
||||
FetchNoFollowErrorKind::ReadingFile { .. } |
|
||||
FetchNoFollowErrorKind::FetchingRemote { .. } |
|
||||
FetchNoFollowErrorKind::ClientError { .. } |
|
||||
FetchNoFollowErrorKind::NoRemote { .. } |
|
||||
FetchNoFollowErrorKind::DataUrlDecode { .. } |
|
||||
FetchNoFollowErrorKind::RedirectResolution { .. } |
|
||||
FetchNoFollowErrorKind::CacheRead { .. } |
|
||||
FetchNoFollowErrorKind::CacheSave { .. } |
|
||||
FetchNoFollowErrorKind::UnsupportedScheme { .. } |
|
||||
FetchNoFollowErrorKind::RedirectHeaderParse { .. } |
|
||||
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err)))),
|
||||
FetchNoFollowErrorKind::NotCached { .. } => {
|
||||
if options.cache_setting == LoaderCacheSetting::Only {
|
||||
Ok(None)
|
||||
} else {
|
||||
Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err))))
|
||||
}
|
||||
},
|
||||
FetchNoFollowErrorKind::ChecksumIntegrity(err) => {
|
||||
// convert to the equivalent deno_graph error so that it
|
||||
// enhances it if this is passed to deno_graph
|
||||
Err(
|
||||
deno_graph::source::LoadError::ChecksumIntegrity(deno_graph::source::ChecksumIntegrityError {
|
||||
actual: err.actual,
|
||||
expected: err.expected,
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(permission_check_error)))),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -387,7 +291,7 @@ impl Loader for FetchCacher {
|
|||
module_info: &deno_graph::ModuleInfo,
|
||||
) {
|
||||
log::debug!("Caching module info for {}", specifier);
|
||||
let source_hash = CacheDBHash::from_source(source);
|
||||
let source_hash = CacheDBHash::from_hashable(source);
|
||||
let result = self.module_info_cache.set_module_info(
|
||||
specifier,
|
||||
media_type,
|
||||
|
|
6
cli/cache/module_info.rs
vendored
6
cli/cache/module_info.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -194,7 +194,7 @@ impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
|
|||
source: &Arc<str>,
|
||||
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
|
||||
// attempt to load from the cache
|
||||
let source_hash = CacheDBHash::from_source(source);
|
||||
let source_hash = CacheDBHash::from_hashable(source);
|
||||
if let Some(info) =
|
||||
self.load_cached_module_info(specifier, media_type, source_hash)
|
||||
{
|
||||
|
@ -228,7 +228,7 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
|
|||
media_type: MediaType,
|
||||
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
|
||||
// attempt to load from the cache
|
||||
let source_hash = CacheDBHash::from_source(&source);
|
||||
let source_hash = CacheDBHash::from_hashable(&source);
|
||||
if let Some(info) =
|
||||
self.load_cached_module_info(specifier, media_type, source_hash)
|
||||
{
|
||||
|
|
5
cli/cache/node.rs
vendored
5
cli/cache/node.rs
vendored
|
@ -1,15 +1,14 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use crate::node::CliCjsAnalysis;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheFailure;
|
||||
use super::CacheDBHash;
|
||||
use crate::node::CliCjsAnalysis;
|
||||
|
||||
pub static NODE_ANALYSIS_CACHE_DB: CacheDBConfiguration =
|
||||
CacheDBConfiguration {
|
||||
|
|
12
cli/cache/parsed_source.rs
vendored
12
cli/cache/parsed_source.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
@ -95,11 +95,21 @@ impl ParsedSourceCache {
|
|||
self.sources.lock().remove(specifier);
|
||||
}
|
||||
|
||||
/// Fress all parsed sources from memory.
|
||||
pub fn free_all(&self) {
|
||||
self.sources.lock().clear();
|
||||
}
|
||||
|
||||
/// Creates a parser that will reuse a ParsedSource from the store
|
||||
/// if it exists, or else parse.
|
||||
pub fn as_capturing_parser(&self) -> CapturingEsParser {
|
||||
CapturingEsParser::new(None, self)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.sources.lock().len()
|
||||
}
|
||||
}
|
||||
|
||||
/// It's ok that this is racy since in non-LSP situations
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
/// <https://chromedevtools.github.io/devtools-protocol/tot/>
|
||||
use deno_core::serde_json::Value;
|
||||
|
|
|
@ -4,6 +4,7 @@ disallowed-methods = [
|
|||
]
|
||||
disallowed-types = [
|
||||
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
|
||||
{ path = "sys_traits::impls::RealSys", reason = "use crate::sys::CliSys instead" },
|
||||
]
|
||||
ignore-interior-mutability = [
|
||||
"lsp_types::Uri",
|
||||
|
|
102
cli/emit.rs
102
cli/emit.rs
|
@ -1,10 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::resolver::CjsTracker;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::EmittedSourceText;
|
||||
use deno_ast::ModuleKind;
|
||||
use deno_ast::SourceMapOption;
|
||||
use deno_ast::SourceRange;
|
||||
|
@ -13,18 +11,24 @@ use deno_ast::SourceRangedForSpanned;
|
|||
use deno_ast::TranspileModuleOptions;
|
||||
use deno_ast::TranspileResult;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::CoreError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_graph::MediaType;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use std::sync::Arc;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::resolver::CliCjsTracker;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Emitter {
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
cjs_tracker: Arc<CliCjsTracker>,
|
||||
emit_cache: Arc<EmitCache>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
transpile_and_emit_options:
|
||||
|
@ -35,7 +39,7 @@ pub struct Emitter {
|
|||
|
||||
impl Emitter {
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
cjs_tracker: Arc<CliCjsTracker>,
|
||||
emit_cache: Arc<EmitCache>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
transpile_options: deno_ast::TranspileOptions,
|
||||
|
@ -108,9 +112,9 @@ impl Emitter {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
module_kind: deno_ast::ModuleKind,
|
||||
module_kind: ModuleKind,
|
||||
source: &Arc<str>,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, EmitParsedSourceHelperError> {
|
||||
// Note: keep this in sync with the sync version below
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, module_kind, source) {
|
||||
|
@ -122,7 +126,7 @@ impl Emitter {
|
|||
let transpiled_source = deno_core::unsync::spawn_blocking({
|
||||
let specifier = specifier.clone();
|
||||
let source = source.clone();
|
||||
move || -> Result<_, AnyError> {
|
||||
move || {
|
||||
EmitParsedSourceHelper::transpile(
|
||||
&parsed_source_cache,
|
||||
&specifier,
|
||||
|
@ -132,6 +136,7 @@ impl Emitter {
|
|||
&transpile_and_emit_options.0,
|
||||
&transpile_and_emit_options.1,
|
||||
)
|
||||
.map(|r| r.text)
|
||||
}
|
||||
})
|
||||
.await
|
||||
|
@ -152,7 +157,7 @@ impl Emitter {
|
|||
media_type: MediaType,
|
||||
module_kind: deno_ast::ModuleKind,
|
||||
source: &Arc<str>,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, EmitParsedSourceHelperError> {
|
||||
// Note: keep this in sync with the async version above
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, module_kind, source) {
|
||||
|
@ -166,7 +171,8 @@ impl Emitter {
|
|||
source.clone(),
|
||||
&self.transpile_and_emit_options.0,
|
||||
&self.transpile_and_emit_options.1,
|
||||
)?;
|
||||
)?
|
||||
.text;
|
||||
helper.post_emit_parsed_source(
|
||||
specifier,
|
||||
&transpiled_source,
|
||||
|
@ -177,11 +183,36 @@ impl Emitter {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn emit_parsed_source_for_deno_compile(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
module_kind: deno_ast::ModuleKind,
|
||||
source: &Arc<str>,
|
||||
) -> Result<(String, String), AnyError> {
|
||||
let mut emit_options = self.transpile_and_emit_options.1.clone();
|
||||
emit_options.inline_sources = false;
|
||||
emit_options.source_map = SourceMapOption::Separate;
|
||||
// strip off the path to have more deterministic builds as we don't care
|
||||
// about the source name because we manually provide the source map to v8
|
||||
emit_options.source_map_base = Some(deno_path_util::url_parent(specifier));
|
||||
let source = EmitParsedSourceHelper::transpile(
|
||||
&self.parsed_source_cache,
|
||||
specifier,
|
||||
media_type,
|
||||
module_kind,
|
||||
source.clone(),
|
||||
&self.transpile_and_emit_options.0,
|
||||
&emit_options,
|
||||
)?;
|
||||
Ok((source.text, source.source_map.unwrap()))
|
||||
}
|
||||
|
||||
/// Expects a file URL, panics otherwise.
|
||||
pub async fn load_and_emit_for_hmr(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, CoreError> {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
let source_code = tokio::fs::read_to_string(
|
||||
ModuleSpecifier::to_file_path(specifier).unwrap(),
|
||||
|
@ -196,17 +227,21 @@ impl Emitter {
|
|||
let source_arc: Arc<str> = source_code.into();
|
||||
let parsed_source = self
|
||||
.parsed_source_cache
|
||||
.remove_or_parse_module(specifier, source_arc, media_type)?;
|
||||
.remove_or_parse_module(specifier, source_arc, media_type)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
// HMR doesn't work with embedded source maps for some reason, so set
|
||||
// the option to not use them (though you should test this out because
|
||||
// this statement is probably wrong)
|
||||
let mut options = self.transpile_and_emit_options.1.clone();
|
||||
options.source_map = SourceMapOption::None;
|
||||
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
media_type,
|
||||
parsed_source.compute_is_script(),
|
||||
)?;
|
||||
let is_cjs = self
|
||||
.cjs_tracker
|
||||
.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
media_type,
|
||||
parsed_source.compute_is_script(),
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let transpiled_source = parsed_source
|
||||
.transpile(
|
||||
&self.transpile_and_emit_options.0,
|
||||
|
@ -214,7 +249,8 @@ impl Emitter {
|
|||
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
|
||||
},
|
||||
&options,
|
||||
)?
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?
|
||||
.into_source();
|
||||
Ok(transpiled_source.text)
|
||||
}
|
||||
|
@ -253,6 +289,19 @@ enum PreEmitResult {
|
|||
NotCached { source_hash: u64 },
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum EmitParsedSourceHelperError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
ParseDiagnostic(#[from] deno_ast::ParseDiagnostic),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Transpile(#[from] deno_ast::TranspileError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Other(#[from] JsErrorBox),
|
||||
}
|
||||
|
||||
/// Helper to share code between async and sync emit_parsed_source methods.
|
||||
struct EmitParsedSourceHelper<'a>(&'a Emitter);
|
||||
|
||||
|
@ -282,7 +331,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
source: Arc<str>,
|
||||
transpile_options: &deno_ast::TranspileOptions,
|
||||
emit_options: &deno_ast::EmitOptions,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<EmittedSourceText, EmitParsedSourceHelperError> {
|
||||
// nothing else needs the parsed source at this point, so remove from
|
||||
// the cache in order to not transpile owned
|
||||
let parsed_source = parsed_source_cache
|
||||
|
@ -302,8 +351,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
source
|
||||
}
|
||||
};
|
||||
debug_assert!(transpiled_source.source_map.is_none());
|
||||
Ok(transpiled_source.text)
|
||||
Ok(transpiled_source)
|
||||
}
|
||||
|
||||
pub fn post_emit_parsed_source(
|
||||
|
@ -323,7 +371,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
// todo(dsherret): this is a temporary measure until we have swc erroring for this
|
||||
fn ensure_no_import_assertion(
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), JsErrorBox> {
|
||||
fn has_import_assertion(text: &str) -> bool {
|
||||
// good enough
|
||||
text.contains(" assert ") && !text.contains(" with ")
|
||||
|
@ -332,7 +380,7 @@ fn ensure_no_import_assertion(
|
|||
fn create_err(
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
range: SourceRange,
|
||||
) -> AnyError {
|
||||
) -> JsErrorBox {
|
||||
let text_info = parsed_source.text_info_lazy();
|
||||
let loc = text_info.line_and_column_display(range.start);
|
||||
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
|
||||
|
@ -345,7 +393,7 @@ fn ensure_no_import_assertion(
|
|||
loc.line_number,
|
||||
loc.column_number,
|
||||
));
|
||||
deno_core::anyhow::anyhow!("{}", msg)
|
||||
JsErrorBox::generic(msg)
|
||||
}
|
||||
|
||||
let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {
|
||||
|
|
119
cli/errors.rs
119
cli/errors.rs
|
@ -1,119 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
//! There are many types of errors in Deno:
|
||||
//! - AnyError: a generic wrapper that can encapsulate any type of error.
|
||||
//! - JsError: a container for the error message and stack trace for exceptions
|
||||
//! thrown in JavaScript code. We use this to pretty-print stack traces.
|
||||
//! - Diagnostic: these are errors that originate in TypeScript's compiler.
|
||||
//! They're similar to JsError, in that they have line numbers. But
|
||||
//! Diagnostics are compile-time type errors, whereas JsErrors are runtime
|
||||
//! exceptions.
|
||||
|
||||
use deno_ast::ParseDiagnostic;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraphError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::ResolutionError;
|
||||
use import_map::ImportMapError;
|
||||
|
||||
fn get_import_map_error_class(_: &ImportMapError) -> &'static str {
|
||||
"URIError"
|
||||
}
|
||||
|
||||
fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str {
|
||||
"SyntaxError"
|
||||
}
|
||||
|
||||
fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::NpmLoadError;
|
||||
|
||||
match err {
|
||||
ModuleGraphError::ResolutionError(err)
|
||||
| ModuleGraphError::TypesResolutionError(err) => {
|
||||
get_resolution_error_class(err)
|
||||
}
|
||||
ModuleGraphError::ModuleError(err) => match err {
|
||||
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
|
||||
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
|
||||
ModuleError::WasmParseErr(..) => "SyntaxError",
|
||||
ModuleError::UnsupportedMediaType { .. }
|
||||
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
|
||||
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
|
||||
"NotFound"
|
||||
}
|
||||
ModuleError::LoadingErr(_, _, err) => match err {
|
||||
ModuleLoadError::Loader(err) => get_error_class_name(err.as_ref()),
|
||||
ModuleLoadError::HttpsChecksumIntegrity(_)
|
||||
| ModuleLoadError::TooManyRedirects => "Error",
|
||||
ModuleLoadError::NodeUnknownBuiltinModule(_) => "NotFound",
|
||||
ModuleLoadError::Decode(_) => "TypeError",
|
||||
ModuleLoadError::Npm(err) => match err {
|
||||
NpmLoadError::NotSupportedEnvironment
|
||||
| NpmLoadError::PackageReqResolution(_)
|
||||
| NpmLoadError::RegistryInfo(_) => "Error",
|
||||
NpmLoadError::PackageReqReferenceParse(_) => "TypeError",
|
||||
},
|
||||
ModuleLoadError::Jsr(err) => match err {
|
||||
JsrLoadError::UnsupportedManifestChecksum
|
||||
| JsrLoadError::PackageFormat(_) => "TypeError",
|
||||
JsrLoadError::ContentLoadExternalSpecifier
|
||||
| JsrLoadError::ContentLoad(_)
|
||||
| JsrLoadError::ContentChecksumIntegrity(_)
|
||||
| JsrLoadError::PackageManifestLoad(_, _)
|
||||
| JsrLoadError::PackageVersionManifestChecksumIntegrity(..)
|
||||
| JsrLoadError::PackageVersionManifestLoad(_, _)
|
||||
| JsrLoadError::RedirectInPackage(_) => "Error",
|
||||
JsrLoadError::PackageNotFound(_)
|
||||
| JsrLoadError::PackageReqNotFound(_)
|
||||
| JsrLoadError::PackageVersionNotFound(_)
|
||||
| JsrLoadError::UnknownExport { .. } => "NotFound",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
|
||||
match err {
|
||||
ResolutionError::ResolverError { error, .. } => {
|
||||
use ResolveError::*;
|
||||
match error.as_ref() {
|
||||
Specifier(_) => "TypeError",
|
||||
Other(e) => get_error_class_name(e),
|
||||
}
|
||||
}
|
||||
_ => "TypeError",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
|
||||
"TypeError"
|
||||
}
|
||||
|
||||
pub fn get_error_class_name(e: &AnyError) -> &'static str {
|
||||
deno_runtime::errors::get_error_class_name(e)
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<ImportMapError>()
|
||||
.map(get_import_map_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<ParseDiagnostic>()
|
||||
.map(get_diagnostic_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<ModuleGraphError>()
|
||||
.map(get_module_graph_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<ResolutionError>()
|
||||
.map(get_resolution_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<std::num::TryFromIntError>()
|
||||
.map(get_try_from_int_error_class)
|
||||
})
|
||||
.unwrap_or("Error")
|
||||
}
|
1078
cli/factory.rs
1078
cli/factory.rs
File diff suppressed because it is too large
Load diff
1448
cli/file_fetcher.rs
1448
cli/file_fetcher.rs
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
|
|
|
@ -1,9 +1,49 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::deno_json;
|
||||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::deno_json::NodeModulesDirMode;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_error::JsErrorClass;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
use deno_graph::source::ResolutionKind;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::FillFromLockfileOptions;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ModuleGraphError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_graph::WorkspaceFastCheckOption;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::SmallStackString;
|
||||
|
||||
use crate::args::config_to_deno_graph_workspace_member;
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliOptions;
|
||||
pub use crate::args::NpmCachingStrategy;
|
||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||
use crate::cache;
|
||||
use crate::cache::FetchCacher;
|
||||
|
@ -11,53 +51,20 @@ use crate::cache::GlobalHttpCache;
|
|||
use crate::cache::ModuleInfoCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::colors;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::file_fetcher::CliFileFetcher;
|
||||
use crate::npm::installer::NpmInstaller;
|
||||
use crate::npm::installer::PackageCaching;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CliCjsTracker;
|
||||
use crate::resolver::CliNpmGraphResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::sys::CliSys;
|
||||
use crate::tools::check;
|
||||
use crate::tools::check::CheckError;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
use deno_graph::source::ResolutionKind;
|
||||
use deno_graph::FillFromLockfileOptions;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::WorkspaceFastCheckOption;
|
||||
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ModuleGraphError;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::package::PackageNv;
|
||||
use import_map::ImportMapError;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GraphValidOptions {
|
||||
|
@ -78,17 +85,17 @@ pub struct GraphValidOptions {
|
|||
/// for the CLI.
|
||||
pub fn graph_valid(
|
||||
graph: &ModuleGraph,
|
||||
fs: &Arc<dyn FileSystem>,
|
||||
sys: &CliSys,
|
||||
roots: &[ModuleSpecifier],
|
||||
options: GraphValidOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), JsErrorBox> {
|
||||
if options.exit_integrity_errors {
|
||||
graph_exit_integrity_errors(graph);
|
||||
}
|
||||
|
||||
let mut errors = graph_walk_errors(
|
||||
graph,
|
||||
fs,
|
||||
sys,
|
||||
roots,
|
||||
GraphWalkErrorsOptions {
|
||||
check_js: options.check_js,
|
||||
|
@ -100,15 +107,34 @@ pub fn graph_valid(
|
|||
} else {
|
||||
// finally surface the npm resolution result
|
||||
if let Err(err) = &graph.npm_dep_graph_result {
|
||||
return Err(custom_error(
|
||||
get_error_class_name(err),
|
||||
format_deno_graph_error(err.as_ref().deref()),
|
||||
return Err(JsErrorBox::new(
|
||||
err.get_class(),
|
||||
format_deno_graph_error(err),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_graph_from_lockfile(
|
||||
graph: &mut ModuleGraph,
|
||||
lockfile: &deno_lockfile::Lockfile,
|
||||
) {
|
||||
graph.fill_from_lockfile(FillFromLockfileOptions {
|
||||
redirects: lockfile
|
||||
.content
|
||||
.redirects
|
||||
.iter()
|
||||
.map(|(from, to)| (from.as_str(), to.as_str())),
|
||||
package_specifiers: lockfile
|
||||
.content
|
||||
.packages
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|(dep, id)| (dep, id.as_str())),
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GraphWalkErrorsOptions {
|
||||
pub check_js: bool,
|
||||
|
@ -119,10 +145,10 @@ pub struct GraphWalkErrorsOptions {
|
|||
/// and enhances them with CLI information.
|
||||
pub fn graph_walk_errors<'a>(
|
||||
graph: &'a ModuleGraph,
|
||||
fs: &'a Arc<dyn FileSystem>,
|
||||
sys: &'a CliSys,
|
||||
roots: &'a [ModuleSpecifier],
|
||||
options: GraphWalkErrorsOptions,
|
||||
) -> impl Iterator<Item = AnyError> + 'a {
|
||||
) -> impl Iterator<Item = JsErrorBox> + 'a {
|
||||
graph
|
||||
.walk(
|
||||
roots.iter(),
|
||||
|
@ -142,29 +168,15 @@ pub fn graph_walk_errors<'a>(
|
|||
roots.contains(error.specifier())
|
||||
}
|
||||
};
|
||||
let mut message = match &error {
|
||||
ModuleGraphError::ResolutionError(resolution_error) => {
|
||||
enhanced_resolution_error_message(resolution_error)
|
||||
}
|
||||
ModuleGraphError::TypesResolutionError(resolution_error) => {
|
||||
format!(
|
||||
"Failed resolving types. {}",
|
||||
enhanced_resolution_error_message(resolution_error)
|
||||
)
|
||||
}
|
||||
ModuleGraphError::ModuleError(error) => {
|
||||
enhanced_integrity_error_message(error)
|
||||
.or_else(|| enhanced_sloppy_imports_error_message(fs, error))
|
||||
.unwrap_or_else(|| format_deno_graph_error(error))
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(range) = error.maybe_range() {
|
||||
if !is_root && !range.specifier.as_str().contains("/$deno$eval") {
|
||||
message.push_str("\n at ");
|
||||
message.push_str(&format_range_with_colors(range));
|
||||
}
|
||||
}
|
||||
let message = enhance_graph_error(
|
||||
sys,
|
||||
&error,
|
||||
if is_root {
|
||||
EnhanceGraphErrorMode::HideRange
|
||||
} else {
|
||||
EnhanceGraphErrorMode::ShowRange
|
||||
},
|
||||
);
|
||||
|
||||
if graph.graph_kind() == GraphKind::TypesOnly
|
||||
&& matches!(
|
||||
|
@ -176,10 +188,61 @@ pub fn graph_walk_errors<'a>(
|
|||
return None;
|
||||
}
|
||||
|
||||
Some(custom_error(get_error_class_name(&error.into()), message))
|
||||
if graph.graph_kind().include_types()
|
||||
&& (message.contains(RUN_WITH_SLOPPY_IMPORTS_MSG)
|
||||
|| matches!(
|
||||
error,
|
||||
ModuleGraphError::ModuleError(ModuleError::Missing(..))
|
||||
))
|
||||
{
|
||||
// ignore and let typescript surface this as a diagnostic instead
|
||||
log::debug!("Ignoring: {}", message);
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(JsErrorBox::new(error.get_class(), message))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum EnhanceGraphErrorMode {
|
||||
ShowRange,
|
||||
HideRange,
|
||||
}
|
||||
|
||||
pub fn enhance_graph_error(
|
||||
sys: &CliSys,
|
||||
error: &ModuleGraphError,
|
||||
mode: EnhanceGraphErrorMode,
|
||||
) -> String {
|
||||
let mut message = match &error {
|
||||
ModuleGraphError::ResolutionError(resolution_error) => {
|
||||
enhanced_resolution_error_message(resolution_error)
|
||||
}
|
||||
ModuleGraphError::TypesResolutionError(resolution_error) => {
|
||||
format!(
|
||||
"Failed resolving types. {}",
|
||||
enhanced_resolution_error_message(resolution_error)
|
||||
)
|
||||
}
|
||||
ModuleGraphError::ModuleError(error) => {
|
||||
enhanced_integrity_error_message(error)
|
||||
.or_else(|| enhanced_sloppy_imports_error_message(sys, error))
|
||||
.unwrap_or_else(|| format_deno_graph_error(error))
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(range) = error.maybe_range() {
|
||||
if mode == EnhanceGraphErrorMode::ShowRange
|
||||
&& !range.specifier.as_str().contains("/$deno$eval")
|
||||
{
|
||||
message.push_str("\n at ");
|
||||
message.push_str(&format_range_with_colors(range));
|
||||
}
|
||||
}
|
||||
message
|
||||
}
|
||||
|
||||
pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
|
||||
for error in graph.module_errors() {
|
||||
exit_for_integrity_error(error);
|
||||
|
@ -199,11 +262,12 @@ pub struct CreateGraphOptions<'a> {
|
|||
pub is_dynamic: bool,
|
||||
/// Specify `None` to use the default CLI loader.
|
||||
pub loader: Option<&'a mut dyn Loader>,
|
||||
pub npm_caching: NpmCachingStrategy,
|
||||
}
|
||||
|
||||
pub struct ModuleGraphCreator {
|
||||
options: Arc<CliOptions>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_installer: Option<Arc<NpmInstaller>>,
|
||||
module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
}
|
||||
|
@ -211,13 +275,13 @@ pub struct ModuleGraphCreator {
|
|||
impl ModuleGraphCreator {
|
||||
pub fn new(
|
||||
options: Arc<CliOptions>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_installer: Option<Arc<NpmInstaller>>,
|
||||
module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
) -> Self {
|
||||
Self {
|
||||
options,
|
||||
npm_resolver,
|
||||
npm_installer,
|
||||
module_graph_builder,
|
||||
type_checker,
|
||||
}
|
||||
|
@ -227,10 +291,11 @@ impl ModuleGraphCreator {
|
|||
&self,
|
||||
graph_kind: GraphKind,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
npm_caching: NpmCachingStrategy,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
let mut cache = self.module_graph_builder.create_graph_loader();
|
||||
self
|
||||
.create_graph_with_loader(graph_kind, roots, &mut cache)
|
||||
.create_graph_with_loader(graph_kind, roots, &mut cache, npm_caching)
|
||||
.await
|
||||
}
|
||||
|
||||
|
@ -239,6 +304,7 @@ impl ModuleGraphCreator {
|
|||
graph_kind: GraphKind,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &mut dyn Loader,
|
||||
npm_caching: NpmCachingStrategy,
|
||||
) -> Result<ModuleGraph, AnyError> {
|
||||
self
|
||||
.create_graph_with_options(CreateGraphOptions {
|
||||
|
@ -246,6 +312,7 @@ impl ModuleGraphCreator {
|
|||
graph_kind,
|
||||
roots,
|
||||
loader: Some(loader),
|
||||
npm_caching,
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -298,6 +365,7 @@ impl ModuleGraphCreator {
|
|||
graph_kind: deno_graph::GraphKind::All,
|
||||
roots,
|
||||
loader: Some(&mut publish_loader),
|
||||
npm_caching: self.options.default_npm_caching_strategy(),
|
||||
})
|
||||
.await?;
|
||||
self.graph_valid(&graph)?;
|
||||
|
@ -336,9 +404,9 @@ impl ModuleGraphCreator {
|
|||
.build_graph_with_npm_resolution(&mut graph, options)
|
||||
.await?;
|
||||
|
||||
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||
if let Some(npm_installer) = &self.npm_installer {
|
||||
if graph.has_node_specifier && self.options.type_check_mode().is_true() {
|
||||
npm_resolver.inject_synthetic_types_node_package().await?;
|
||||
npm_installer.inject_synthetic_types_node_package().await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -357,6 +425,7 @@ impl ModuleGraphCreator {
|
|||
graph_kind,
|
||||
roots,
|
||||
loader: None,
|
||||
npm_caching: self.options.default_npm_caching_strategy(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -371,14 +440,14 @@ impl ModuleGraphCreator {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> {
|
||||
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> {
|
||||
self.module_graph_builder.graph_valid(graph)
|
||||
}
|
||||
|
||||
async fn type_check_graph(
|
||||
&self,
|
||||
graph: ModuleGraph,
|
||||
) -> Result<Arc<ModuleGraph>, AnyError> {
|
||||
) -> Result<Arc<ModuleGraph>, CheckError> {
|
||||
self
|
||||
.type_checker
|
||||
.check(
|
||||
|
@ -401,56 +470,83 @@ pub struct BuildFastCheckGraphOptions<'a> {
|
|||
pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum BuildGraphWithNpmResolutionError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
SerdeJson(#[from] serde_json::Error),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
ToMaybeJsxImportSourceConfig(
|
||||
#[from] deno_json::ToMaybeJsxImportSourceConfigError,
|
||||
),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
NodeModulesDirParse(#[from] deno_json::NodeModulesDirParseError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Other(#[from] JsErrorBox),
|
||||
#[class(generic)]
|
||||
#[error("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead")]
|
||||
UnsupportedNpmSpecifierEntrypointResolutionWay,
|
||||
}
|
||||
|
||||
pub struct ModuleGraphBuilder {
|
||||
caches: Arc<cache::Caches>,
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
cjs_tracker: Arc<CliCjsTracker>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
file_fetcher: Arc<CliFileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
in_npm_pkg_checker: DenoInNpmPackageChecker,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_graph_resolver: Arc<CliNpmGraphResolver>,
|
||||
npm_installer: Option<Arc<NpmInstaller>>,
|
||||
npm_resolver: CliNpmResolver,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliResolver>,
|
||||
root_permissions_container: PermissionsContainer,
|
||||
sys: CliSys,
|
||||
}
|
||||
|
||||
impl ModuleGraphBuilder {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
caches: Arc<cache::Caches>,
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
cjs_tracker: Arc<CliCjsTracker>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
file_fetcher: Arc<CliFileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
in_npm_pkg_checker: DenoInNpmPackageChecker,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_graph_resolver: Arc<CliNpmGraphResolver>,
|
||||
npm_installer: Option<Arc<NpmInstaller>>,
|
||||
npm_resolver: CliNpmResolver,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliResolver>,
|
||||
root_permissions_container: PermissionsContainer,
|
||||
sys: CliSys,
|
||||
) -> Self {
|
||||
Self {
|
||||
caches,
|
||||
cjs_tracker,
|
||||
cli_options,
|
||||
file_fetcher,
|
||||
fs,
|
||||
global_http_cache,
|
||||
in_npm_pkg_checker,
|
||||
lockfile,
|
||||
maybe_file_watcher_reporter,
|
||||
module_info_cache,
|
||||
npm_graph_resolver,
|
||||
npm_installer,
|
||||
npm_resolver,
|
||||
parsed_source_cache,
|
||||
resolver,
|
||||
root_permissions_container,
|
||||
sys,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -458,7 +554,7 @@ impl ModuleGraphBuilder {
|
|||
&self,
|
||||
graph: &mut ModuleGraph,
|
||||
options: CreateGraphOptions<'a>,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), BuildGraphWithNpmResolutionError> {
|
||||
enum MutLoaderRef<'a> {
|
||||
Borrowed(&'a mut dyn Loader),
|
||||
Owned(cache::FetchCacher),
|
||||
|
@ -544,9 +640,7 @@ impl ModuleGraphBuilder {
|
|||
Some(loader) => MutLoaderRef::Borrowed(loader),
|
||||
None => MutLoaderRef::Owned(self.create_graph_loader()),
|
||||
};
|
||||
let cli_resolver = &self.resolver;
|
||||
let graph_resolver = self.create_graph_resolver()?;
|
||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
let maybe_file_watcher_reporter = self
|
||||
.maybe_file_watcher_reporter
|
||||
.as_ref()
|
||||
|
@ -565,14 +659,15 @@ impl ModuleGraphBuilder {
|
|||
is_dynamic: options.is_dynamic,
|
||||
passthrough_jsr_specifiers: false,
|
||||
executor: Default::default(),
|
||||
file_system: &DenoGraphFsAdapter(self.fs.as_ref()),
|
||||
file_system: &self.sys,
|
||||
jsr_url_provider: &CliJsrUrlProvider,
|
||||
npm_resolver: Some(&graph_npm_resolver),
|
||||
npm_resolver: Some(self.npm_graph_resolver.as_ref()),
|
||||
module_analyzer: &analyzer,
|
||||
reporter: maybe_file_watcher_reporter,
|
||||
resolver: Some(&graph_resolver),
|
||||
locker: locker.as_mut().map(|l| l as _),
|
||||
},
|
||||
options.npm_caching,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
@ -583,17 +678,23 @@ impl ModuleGraphBuilder {
|
|||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &'a mut dyn deno_graph::source::Loader,
|
||||
options: deno_graph::BuildOptions<'a>,
|
||||
) -> Result<(), AnyError> {
|
||||
npm_caching: NpmCachingStrategy,
|
||||
) -> Result<(), BuildGraphWithNpmResolutionError> {
|
||||
// ensure an "npm install" is done if the user has explicitly
|
||||
// opted into using a node_modules directory
|
||||
if self
|
||||
.cli_options
|
||||
.node_modules_dir()?
|
||||
.map(|m| m.uses_node_modules_dir())
|
||||
.map(|m| m == NodeModulesDirMode::Auto)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
if let Some(npm_installer) = &self.npm_installer {
|
||||
let already_done = npm_installer
|
||||
.ensure_top_level_package_json_install()
|
||||
.await?;
|
||||
if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) {
|
||||
npm_installer.cache_packages(PackageCaching::All).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -603,19 +704,7 @@ impl ModuleGraphBuilder {
|
|||
// populate the information from the lockfile
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
let lockfile = lockfile.lock();
|
||||
graph.fill_from_lockfile(FillFromLockfileOptions {
|
||||
redirects: lockfile
|
||||
.content
|
||||
.redirects
|
||||
.iter()
|
||||
.map(|(from, to)| (from.as_str(), to.as_str())),
|
||||
package_specifiers: lockfile
|
||||
.content
|
||||
.packages
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|(dep, id)| (dep, id.as_str())),
|
||||
});
|
||||
fill_graph_from_lockfile(graph, &lockfile);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -623,10 +712,9 @@ impl ModuleGraphBuilder {
|
|||
let initial_package_deps_len = graph.packages.package_deps_sum();
|
||||
let initial_package_mappings_len = graph.packages.mappings().len();
|
||||
|
||||
if roots.iter().any(|r| r.scheme() == "npm")
|
||||
&& self.npm_resolver.as_byonm().is_some()
|
||||
if roots.iter().any(|r| r.scheme() == "npm") && self.npm_resolver.is_byonm()
|
||||
{
|
||||
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead");
|
||||
return Err(BuildGraphWithNpmResolutionError::UnsupportedNpmSpecifierEntrypointResolutionWay);
|
||||
}
|
||||
|
||||
graph.build(roots, loader, options).await;
|
||||
|
@ -657,7 +745,7 @@ impl ModuleGraphBuilder {
|
|||
for (from, to) in graph.packages.mappings() {
|
||||
lockfile.insert_package_specifier(
|
||||
JsrDepPackageReq::jsr(from.clone()),
|
||||
to.version.to_string(),
|
||||
to.version.to_custom_string::<SmallStackString>(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -677,7 +765,7 @@ impl ModuleGraphBuilder {
|
|||
&self,
|
||||
graph: &mut ModuleGraph,
|
||||
options: BuildFastCheckGraphOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_json::ToMaybeJsxImportSourceConfigError> {
|
||||
if !graph.graph_kind().include_types() {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -692,9 +780,7 @@ impl ModuleGraphBuilder {
|
|||
None
|
||||
};
|
||||
let parser = self.parsed_source_cache.as_capturing_parser();
|
||||
let cli_resolver = &self.resolver;
|
||||
let graph_resolver = self.create_graph_resolver()?;
|
||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
|
||||
graph.build_fast_check_type_graph(
|
||||
deno_graph::BuildFastCheckTypeGraphOptions {
|
||||
|
@ -703,7 +789,7 @@ impl ModuleGraphBuilder {
|
|||
fast_check_dts: false,
|
||||
jsr_url_provider: &CliJsrUrlProvider,
|
||||
resolver: Some(&graph_resolver),
|
||||
npm_resolver: Some(&graph_npm_resolver),
|
||||
npm_resolver: Some(self.npm_graph_resolver.as_ref()),
|
||||
workspace_fast_check: options.workspace_fast_check,
|
||||
},
|
||||
);
|
||||
|
@ -721,10 +807,10 @@ impl ModuleGraphBuilder {
|
|||
) -> cache::FetchCacher {
|
||||
cache::FetchCacher::new(
|
||||
self.file_fetcher.clone(),
|
||||
self.fs.clone(),
|
||||
self.global_http_cache.clone(),
|
||||
self.in_npm_pkg_checker.clone(),
|
||||
self.module_info_cache.clone(),
|
||||
self.sys.clone(),
|
||||
cache::FetchCacherOptions {
|
||||
file_header_overrides: self.cli_options.resolve_file_header_overrides(),
|
||||
permissions,
|
||||
|
@ -739,7 +825,7 @@ impl ModuleGraphBuilder {
|
|||
/// Check if `roots` and their deps are available. Returns `Ok(())` if
|
||||
/// so. Returns `Err(_)` if there is a known module graph or resolution
|
||||
/// error statically reachable from `roots` and not a dynamic import.
|
||||
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> {
|
||||
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> {
|
||||
self.graph_roots_valid(
|
||||
graph,
|
||||
&graph.roots.iter().cloned().collect::<Vec<_>>(),
|
||||
|
@ -750,10 +836,10 @@ impl ModuleGraphBuilder {
|
|||
&self,
|
||||
graph: &ModuleGraph,
|
||||
roots: &[ModuleSpecifier],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), JsErrorBox> {
|
||||
graph_valid(
|
||||
graph,
|
||||
&self.fs,
|
||||
&self.sys,
|
||||
roots,
|
||||
GraphValidOptions {
|
||||
kind: if self.cli_options.type_check_mode().is_true() {
|
||||
|
@ -767,7 +853,10 @@ impl ModuleGraphBuilder {
|
|||
)
|
||||
}
|
||||
|
||||
fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> {
|
||||
fn create_graph_resolver(
|
||||
&self,
|
||||
) -> Result<CliGraphResolver, deno_json::ToMaybeJsxImportSourceConfigError>
|
||||
{
|
||||
let jsx_import_source_config = self
|
||||
.cli_options
|
||||
.workspace()
|
||||
|
@ -808,18 +897,19 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
|
|||
message
|
||||
}
|
||||
|
||||
static RUN_WITH_SLOPPY_IMPORTS_MSG: &str =
|
||||
"or run with --unstable-sloppy-imports";
|
||||
|
||||
fn enhanced_sloppy_imports_error_message(
|
||||
fs: &Arc<dyn FileSystem>,
|
||||
sys: &CliSys,
|
||||
error: &ModuleError,
|
||||
) -> Option<String> {
|
||||
match error {
|
||||
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
|
||||
| ModuleError::Missing(specifier, _) => {
|
||||
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
|
||||
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
|
||||
.as_suggestion_message();
|
||||
let additional_message = maybe_additional_sloppy_imports_message(sys, specifier)?;
|
||||
Some(format!(
|
||||
"{} {} or run with --unstable-sloppy-imports",
|
||||
"{} {}",
|
||||
error,
|
||||
additional_message,
|
||||
))
|
||||
|
@ -828,6 +918,19 @@ fn enhanced_sloppy_imports_error_message(
|
|||
}
|
||||
}
|
||||
|
||||
pub fn maybe_additional_sloppy_imports_message(
|
||||
sys: &CliSys,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
Some(format!(
|
||||
"{} {}",
|
||||
CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone()))
|
||||
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
|
||||
.as_suggestion_message(),
|
||||
RUN_WITH_SLOPPY_IMPORTS_MSG
|
||||
))
|
||||
}
|
||||
|
||||
fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
|
||||
match err {
|
||||
ModuleError::LoadingErr(
|
||||
|
@ -921,9 +1024,11 @@ fn get_resolution_error_bare_specifier(
|
|||
{
|
||||
Some(specifier.as_str())
|
||||
} else if let ResolutionError::ResolverError { error, .. } = error {
|
||||
if let ResolveError::Other(error) = (*error).as_ref() {
|
||||
if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) =
|
||||
error.downcast_ref::<ImportMapError>()
|
||||
if let ResolveError::ImportMap(error) = (*error).as_ref() {
|
||||
if let import_map::ImportMapErrorKind::UnmappedBareSpecifier(
|
||||
specifier,
|
||||
_,
|
||||
) = error.as_kind()
|
||||
{
|
||||
Some(specifier.as_str())
|
||||
} else {
|
||||
|
@ -960,11 +1065,12 @@ fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
|
|||
ResolveError::Other(other_error) => {
|
||||
if let Some(SpecifierError::ImportPrefixMissing {
|
||||
specifier, ..
|
||||
}) = other_error.downcast_ref::<SpecifierError>()
|
||||
}) = other_error.as_any().downcast_ref::<SpecifierError>()
|
||||
{
|
||||
maybe_specifier = Some(specifier);
|
||||
}
|
||||
}
|
||||
ResolveError::ImportMap(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1056,71 +1162,6 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct DenoGraphFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> {
|
||||
fn read_dir(
|
||||
&self,
|
||||
dir_url: &deno_graph::ModuleSpecifier,
|
||||
) -> Vec<deno_graph::source::DirEntry> {
|
||||
use deno_core::anyhow;
|
||||
use deno_graph::source::DirEntry;
|
||||
use deno_graph::source::DirEntryKind;
|
||||
|
||||
let dir_path = match dir_url.to_file_path() {
|
||||
Ok(path) => path,
|
||||
// ignore, treat as non-analyzable
|
||||
Err(()) => return vec![],
|
||||
};
|
||||
let entries = match self.0.read_dir_sync(&dir_path) {
|
||||
Ok(dir) => dir,
|
||||
Err(err)
|
||||
if matches!(
|
||||
err.kind(),
|
||||
std::io::ErrorKind::PermissionDenied | std::io::ErrorKind::NotFound
|
||||
) =>
|
||||
{
|
||||
return vec![];
|
||||
}
|
||||
Err(err) => {
|
||||
return vec![DirEntry {
|
||||
kind: DirEntryKind::Error(
|
||||
anyhow::Error::from(err)
|
||||
.context("Failed to read directory.".to_string()),
|
||||
),
|
||||
url: dir_url.clone(),
|
||||
}];
|
||||
}
|
||||
};
|
||||
let mut dir_entries = Vec::with_capacity(entries.len());
|
||||
for entry in entries {
|
||||
let entry_path = dir_path.join(&entry.name);
|
||||
dir_entries.push(if entry.is_directory {
|
||||
DirEntry {
|
||||
kind: DirEntryKind::Dir,
|
||||
url: ModuleSpecifier::from_directory_path(&entry_path).unwrap(),
|
||||
}
|
||||
} else if entry.is_file {
|
||||
DirEntry {
|
||||
kind: DirEntryKind::File,
|
||||
url: ModuleSpecifier::from_file_path(&entry_path).unwrap(),
|
||||
}
|
||||
} else if entry.is_symlink {
|
||||
DirEntry {
|
||||
kind: DirEntryKind::Symlink,
|
||||
url: ModuleSpecifier::from_file_path(&entry_path).unwrap(),
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
});
|
||||
}
|
||||
|
||||
dir_entries
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String {
|
||||
format!(
|
||||
"{}:{}:{}",
|
||||
|
@ -1184,7 +1225,7 @@ fn format_deno_graph_error(err: &dyn Error) -> String {
|
|||
|
||||
#[derive(Debug)]
|
||||
struct CliGraphResolver<'a> {
|
||||
cjs_tracker: &'a CjsTracker,
|
||||
cjs_tracker: &'a CliCjsTracker,
|
||||
resolver: &'a CliResolver,
|
||||
jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
}
|
||||
|
@ -1280,7 +1321,7 @@ mod test {
|
|||
let specifier = ModuleSpecifier::parse("file:///file.ts").unwrap();
|
||||
let err = import_map.resolve(input, &specifier).err().unwrap();
|
||||
let err = ResolutionError::ResolverError {
|
||||
error: Arc::new(ResolveError::Other(err.into())),
|
||||
error: Arc::new(ResolveError::ImportMap(err)),
|
||||
specifier: input.to_string(),
|
||||
range: Range {
|
||||
specifier,
|
||||
|
|
966
cli/http_util.rs
966
cli/http_util.rs
File diff suppressed because it is too large
Load diff
|
@ -1,18 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub fn main() {
|
||||
let mut args = vec!["cargo", "test", "-p", "cli_tests", "--features", "run"];
|
||||
|
||||
if !cfg!(debug_assertions) {
|
||||
args.push("--release");
|
||||
}
|
||||
|
||||
args.push("--");
|
||||
|
||||
// If any args were passed to this process, pass them through to the child
|
||||
let orig_args = std::env::args().skip(1).collect::<Vec<_>>();
|
||||
let orig_args: Vec<&str> =
|
||||
orig_args.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
|
||||
args.extend(orig_args);
|
||||
|
||||
test_util::spawn::exec_replace("cargo", &args).unwrap();
|
||||
// this file exists to cause the executable to be built when running cargo test
|
||||
}
|
||||
|
|
15
cli/js.rs
15
cli/js.rs
|
@ -1,19 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use log::debug;
|
||||
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
static CLI_SNAPSHOT: &[u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
|
||||
|
||||
pub fn deno_isolate_init() -> Option<&'static [u8]> {
|
||||
debug!("Deno isolate init with snapshots.");
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
Some(CLI_SNAPSHOT)
|
||||
}
|
||||
#[cfg(feature = "hmr")]
|
||||
{
|
||||
None
|
||||
}
|
||||
deno_snapshots::CLI_SNAPSHOT
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file
|
||||
|
||||
import { core, primordials } from "ext:core/mod.js";
|
||||
|
@ -8,7 +8,7 @@ import {
|
|||
restorePermissions,
|
||||
} from "ext:cli/40_test_common.js";
|
||||
import { Console } from "ext:deno_console/01_console.js";
|
||||
import { setExitHandler } from "ext:runtime/30_os.js";
|
||||
import { setExitHandler } from "ext:deno_os/30_os.js";
|
||||
const {
|
||||
op_register_bench,
|
||||
op_bench_get_origin,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
// deno-lint-ignore-file
|
||||
|
||||
/*
|
||||
|
|
1089
cli/js/40_lint.js
Normal file
1089
cli/js/40_lint.js
Normal file
File diff suppressed because it is too large
Load diff
1029
cli/js/40_lint_selector.js
Normal file
1029
cli/js/40_lint_selector.js
Normal file
File diff suppressed because it is too large
Load diff
139
cli/js/40_lint_types.d.ts
vendored
Normal file
139
cli/js/40_lint_types.d.ts
vendored
Normal file
|
@ -0,0 +1,139 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
export interface NodeFacade {
|
||||
type: string;
|
||||
range: [number, number];
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface AstContext {
|
||||
buf: Uint8Array;
|
||||
strTable: Map<number, string>;
|
||||
strTableOffset: number;
|
||||
rootOffset: number;
|
||||
nodes: Map<number, NodeFacade>;
|
||||
spansOffset: number;
|
||||
propsOffset: number;
|
||||
strByType: number[];
|
||||
strByProp: number[];
|
||||
typeByStr: Map<string, number>;
|
||||
propByStr: Map<string, number>;
|
||||
matcher: MatchContext;
|
||||
}
|
||||
|
||||
export interface Node {
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export type Range = [number, number];
|
||||
|
||||
// TODO(@marvinhagemeister) Remove once we land "official" types
|
||||
export interface RuleContext {
|
||||
id: string;
|
||||
}
|
||||
|
||||
// TODO(@marvinhagemeister) Remove once we land "official" types
|
||||
export interface LintRule {
|
||||
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
|
||||
destroy?(ctx: RuleContext): void;
|
||||
}
|
||||
|
||||
// TODO(@marvinhagemeister) Remove once we land "official" types
|
||||
export interface LintPlugin {
|
||||
name: string;
|
||||
rules: Record<string, LintRule>;
|
||||
}
|
||||
|
||||
export interface LintState {
|
||||
plugins: LintPlugin[];
|
||||
installedPlugins: Set<string>;
|
||||
}
|
||||
|
||||
export type VisitorFn = (node: unknown) => void;
|
||||
|
||||
export interface CompiledVisitor {
|
||||
matcher: (ctx: MatchContext, offset: number) => boolean;
|
||||
info: { enter: VisitorFn; exit: VisitorFn };
|
||||
}
|
||||
|
||||
export interface AttrExists {
|
||||
type: 3;
|
||||
prop: number[];
|
||||
}
|
||||
|
||||
export interface AttrBin {
|
||||
type: 4;
|
||||
prop: number[];
|
||||
op: number;
|
||||
// deno-lint-ignore no-explicit-any
|
||||
value: any;
|
||||
}
|
||||
|
||||
export type AttrSelector = AttrExists | AttrBin;
|
||||
|
||||
export interface ElemSelector {
|
||||
type: 1;
|
||||
wildcard: boolean;
|
||||
elem: number;
|
||||
}
|
||||
|
||||
export interface PseudoNthChild {
|
||||
type: 5;
|
||||
op: string | null;
|
||||
step: number;
|
||||
stepOffset: number;
|
||||
of: Selector | null;
|
||||
repeat: boolean;
|
||||
}
|
||||
|
||||
export interface PseudoHas {
|
||||
type: 6;
|
||||
selectors: Selector[];
|
||||
}
|
||||
export interface PseudoNot {
|
||||
type: 7;
|
||||
selectors: Selector[];
|
||||
}
|
||||
export interface PseudoFirstChild {
|
||||
type: 8;
|
||||
}
|
||||
export interface PseudoLastChild {
|
||||
type: 9;
|
||||
}
|
||||
|
||||
export interface Relation {
|
||||
type: 2;
|
||||
op: number;
|
||||
}
|
||||
|
||||
export type Selector = Array<
|
||||
| ElemSelector
|
||||
| Relation
|
||||
| AttrExists
|
||||
| AttrBin
|
||||
| PseudoNthChild
|
||||
| PseudoNot
|
||||
| PseudoHas
|
||||
| PseudoFirstChild
|
||||
| PseudoLastChild
|
||||
>;
|
||||
|
||||
export interface SelectorParseCtx {
|
||||
root: Selector;
|
||||
current: Selector;
|
||||
}
|
||||
|
||||
export interface MatchContext {
|
||||
getFirstChild(id: number): number;
|
||||
getLastChild(id: number): number;
|
||||
getSiblings(id: number): number[];
|
||||
getParent(id: number): number;
|
||||
getType(id: number): number;
|
||||
getAttrPathValue(id: number, propIds: number[], idx: number): unknown;
|
||||
}
|
||||
|
||||
export type NextFn = (ctx: MatchContext, id: number) => boolean;
|
||||
export type MatcherFn = (ctx: MatchContext, id: number) => boolean;
|
||||
export type TransformFn = (value: string) => number;
|
||||
|
||||
export {};
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
import { core, primordials } from "ext:core/mod.js";
|
||||
import { escapeName, withPermissions } from "ext:cli/40_test_common.js";
|
||||
|
@ -26,7 +26,7 @@ const {
|
|||
TypeError,
|
||||
} = primordials;
|
||||
|
||||
import { setExitHandler } from "ext:runtime/30_os.js";
|
||||
import { setExitHandler } from "ext:deno_os/30_os.js";
|
||||
|
||||
// Capture `Deno` global so that users deleting or mangling it, won't
|
||||
// have impact on our sanitizers.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
import { core, primordials } from "ext:core/mod.js";
|
||||
import { serializePermissions } from "ext:runtime/10_permissions.js";
|
||||
const ops = core.ops;
|
||||
|
|
14
cli/jsr.rs
14
cli/jsr.rs
|
@ -1,14 +1,16 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use dashmap::DashMap;
|
||||
use deno_core::serde_json;
|
||||
use deno_graph::packages::JsrPackageInfo;
|
||||
use deno_graph::packages::JsrPackageVersionInfo;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::file_fetcher::CliFileFetcher;
|
||||
|
||||
/// This is similar to a subset of `JsrCacheResolver` which fetches rather than
|
||||
/// just reads the cache. Keep in sync!
|
||||
|
@ -19,11 +21,11 @@ pub struct JsrFetchResolver {
|
|||
/// It can be large and we don't want to store it.
|
||||
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
|
||||
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_fetcher: Arc<CliFileFetcher>,
|
||||
}
|
||||
|
||||
impl JsrFetchResolver {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
|
||||
Self {
|
||||
nv_by_req: Default::default(),
|
||||
info_by_nv: Default::default(),
|
||||
|
|
46
cli/lib/Cargo.toml
Normal file
46
cli/lib/Cargo.toml
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno_lib"
|
||||
version = "0.3.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
readme = "README.md"
|
||||
repository.workspace = true
|
||||
description = "Shared code between the Deno CLI and denort"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
capacity_builder.workspace = true
|
||||
deno_config = { workspace = true, features = ["sync", "workspace"] }
|
||||
deno_error.workspace = true
|
||||
deno_fs = { workspace = true, features = ["sync_fs"] }
|
||||
deno_media_type.workspace = true
|
||||
deno_node = { workspace = true, features = ["sync_fs"] }
|
||||
deno_npm.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_resolver = { workspace = true, features = ["sync"] }
|
||||
deno_runtime.workspace = true
|
||||
deno_semver.workspace = true
|
||||
deno_terminal.workspace = true
|
||||
env_logger = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
indexmap.workspace = true
|
||||
libsui.workspace = true
|
||||
log = { workspace = true, features = ["serde"] }
|
||||
node_resolver = { workspace = true, features = ["sync"] }
|
||||
parking_lot.workspace = true
|
||||
ring.workspace = true
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json.workspace = true
|
||||
sys_traits = { workspace = true, features = ["getrandom"] }
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
twox-hash.workspace = true
|
||||
url.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
test_util.workspace = true
|
4
cli/lib/README.md
Normal file
4
cli/lib/README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
# deno_lib
|
||||
|
||||
This crate contains the shared code between the Deno CLI and denort. It is
|
||||
highly unstable.
|
216
cli/lib/args.rs
Normal file
216
cli/lib/args.rs
Normal file
|
@ -0,0 +1,216 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::io::BufReader;
|
||||
use std::io::Cursor;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use deno_npm::resolution::PackageIdNotFoundError;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
|
||||
use deno_runtime::deno_tls::rustls;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::rustls_pemfile;
|
||||
use deno_runtime::deno_tls::webpki_roots;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
pub fn npm_pkg_req_ref_to_binary_command(
|
||||
req_ref: &NpmPackageReqReference,
|
||||
) -> String {
|
||||
req_ref
|
||||
.sub_path()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| req_ref.req().name.to_string())
|
||||
}
|
||||
|
||||
pub fn has_trace_permissions_enabled() -> bool {
|
||||
has_flag_env_var("DENO_TRACE_PERMISSIONS")
|
||||
}
|
||||
|
||||
pub fn has_flag_env_var(name: &str) -> bool {
|
||||
match std::env::var_os(name) {
|
||||
Some(value) => value == "1",
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum CaData {
|
||||
/// The string is a file path
|
||||
File(String),
|
||||
/// This variant is not exposed as an option in the CLI, it is used internally
|
||||
/// for standalone binaries.
|
||||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone, deno_error::JsError)]
|
||||
#[class(generic)]
|
||||
pub enum RootCertStoreLoadError {
|
||||
#[error(
|
||||
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
|
||||
)]
|
||||
UnknownStore(String),
|
||||
#[error("Unable to add pem file to certificate store: {0}")]
|
||||
FailedAddPemFile(String),
|
||||
#[error("Failed opening CA file: {0}")]
|
||||
CaFileOpenError(String),
|
||||
}
|
||||
|
||||
/// Create and populate a root cert store based on the passed options and
|
||||
/// environment.
|
||||
pub fn get_root_cert_store(
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
) -> Result<RootCertStore, RootCertStoreLoadError> {
|
||||
let mut root_cert_store = RootCertStore::empty();
|
||||
let ca_stores: Vec<String> = maybe_ca_stores
|
||||
.or_else(|| {
|
||||
let env_ca_store = std::env::var("DENO_TLS_CA_STORE").ok()?;
|
||||
Some(
|
||||
env_ca_store
|
||||
.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| vec!["mozilla".to_string()]);
|
||||
|
||||
for store in ca_stores.iter() {
|
||||
match store.as_str() {
|
||||
"mozilla" => {
|
||||
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
|
||||
}
|
||||
"system" => {
|
||||
let roots = load_native_certs().expect("could not load platform certs");
|
||||
for root in roots {
|
||||
if let Err(err) = root_cert_store
|
||||
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
|
||||
{
|
||||
log::error!(
|
||||
"{}",
|
||||
colors::yellow(&format!(
|
||||
"Unable to add system certificate to certificate store: {:?}",
|
||||
err
|
||||
))
|
||||
);
|
||||
let hex_encoded_root = faster_hex::hex_string(&root.0);
|
||||
log::error!("{}", colors::gray(&hex_encoded_root));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let ca_data =
|
||||
maybe_ca_data.or_else(|| std::env::var("DENO_CERT").ok().map(CaData::File));
|
||||
if let Some(ca_data) = ca_data {
|
||||
let result = match ca_data {
|
||||
CaData::File(ca_file) => {
|
||||
let ca_file = if let Some(root) = &maybe_root_path {
|
||||
root.join(&ca_file)
|
||||
} else {
|
||||
PathBuf::from(ca_file)
|
||||
};
|
||||
let certfile = std::fs::File::open(ca_file).map_err(|err| {
|
||||
RootCertStoreLoadError::CaFileOpenError(err.to_string())
|
||||
})?;
|
||||
let mut reader = BufReader::new(certfile);
|
||||
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
CaData::Bytes(data) => {
|
||||
let mut reader = BufReader::new(Cursor::new(data));
|
||||
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(certs) => {
|
||||
root_cert_store.add_parsable_certificates(certs);
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(root_cert_store)
|
||||
}
|
||||
|
||||
/// State provided to the process via an environment variable.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NpmProcessState {
|
||||
pub kind: NpmProcessStateKind,
|
||||
pub local_node_modules_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum NpmProcessStateKind {
|
||||
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
|
||||
Byonm,
|
||||
}
|
||||
|
||||
pub static NPM_PROCESS_STATE: LazyLock<Option<NpmProcessState>> =
|
||||
LazyLock::new(|| {
|
||||
use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
|
||||
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
|
||||
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
|
||||
let fd = fd.parse::<usize>().ok()?;
|
||||
let mut file = {
|
||||
use deno_runtime::deno_io::FromRawIoHandle;
|
||||
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
|
||||
};
|
||||
let mut buf = Vec::new();
|
||||
// seek to beginning. after the file is written the position will be inherited by this subprocess,
|
||||
// and also this file might have been read before
|
||||
file.seek(std::io::SeekFrom::Start(0)).unwrap();
|
||||
file
|
||||
.read_to_end(&mut buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!("failed to read npm process state from fd {fd}: {e}");
|
||||
})
|
||||
.ok()?;
|
||||
let state: NpmProcessState = serde_json::from_slice(&buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!(
|
||||
"failed to deserialize npm process state: {e} {}",
|
||||
String::from_utf8_lossy(&buf)
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
Some(state)
|
||||
});
|
||||
|
||||
pub fn resolve_npm_resolution_snapshot(
|
||||
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, PackageIdNotFoundError>
|
||||
{
|
||||
if let Some(NpmProcessStateKind::Snapshot(snapshot)) =
|
||||
NPM_PROCESS_STATE.as_ref().map(|s| &s.kind)
|
||||
{
|
||||
// TODO(bartlomieju): remove this clone
|
||||
Ok(Some(snapshot.clone().into_valid()?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct UnstableConfig {
|
||||
// TODO(bartlomieju): remove in Deno 2.5
|
||||
pub legacy_flag_enabled: bool, // --unstable
|
||||
pub bare_node_builtins: bool,
|
||||
pub detect_cjs: bool,
|
||||
pub sloppy_imports: bool,
|
||||
pub npm_lazy_caching: bool,
|
||||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||
}
|
42
cli/lib/build.rs
Normal file
42
cli/lib/build.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
fn main() {
|
||||
// todo(dsherret): remove this after Deno 2.2.0 is published and then
|
||||
// align the version of this crate with Deno then. We need to wait because
|
||||
// there was previously a deno_lib 2.2.0 published (https://crates.io/crates/deno_lib/versions)
|
||||
let version_path = std::path::Path::new(".").join("version.txt");
|
||||
println!("cargo:rerun-if-changed={}", version_path.display());
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let text = std::fs::read_to_string(version_path).unwrap();
|
||||
println!("cargo:rustc-env=DENO_VERSION={}", text);
|
||||
|
||||
let commit_hash = git_commit_hash();
|
||||
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", commit_hash);
|
||||
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
|
||||
println!(
|
||||
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
|
||||
&commit_hash[..7]
|
||||
);
|
||||
}
|
||||
|
||||
fn git_commit_hash() -> String {
|
||||
if let Ok(output) = std::process::Command::new("git")
|
||||
.arg("rev-list")
|
||||
.arg("-1")
|
||||
.arg("HEAD")
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
std::str::from_utf8(&output.stdout[..40])
|
||||
.unwrap()
|
||||
.to_string()
|
||||
} else {
|
||||
// When not in git repository
|
||||
// (e.g. when the user install by `cargo install deno`)
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
} else {
|
||||
// When there is no git command for some reason
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
}
|
48
cli/lib/clippy.toml
Normal file
48
cli/lib/clippy.toml
Normal file
|
@ -0,0 +1,48 @@
|
|||
disallowed-methods = [
|
||||
{ path = "std::env::current_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::is_file", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::read_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::env::set_current_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::env::temp_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::copy", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::create_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::hard_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read_to_string", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::remove_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::remove_file", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::rename", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::set_permissions", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::write", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead" },
|
||||
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead" },
|
||||
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead" },
|
||||
]
|
11
cli/lib/lib.rs
Normal file
11
cli/lib/lib.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub mod args;
|
||||
pub mod loader;
|
||||
pub mod npm;
|
||||
pub mod shared;
|
||||
pub mod standalone;
|
||||
pub mod sys;
|
||||
pub mod util;
|
||||
pub mod version;
|
||||
pub mod worker;
|
213
cli/lib/loader.rs
Normal file
213
cli/lib/loader.rs
Normal file
|
@ -0,0 +1,213 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_media_type::MediaType;
|
||||
use deno_resolver::cjs::CjsTracker;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_runtime::deno_core::ModuleSourceCode;
|
||||
use node_resolver::analyze::CjsCodeAnalyzer;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::IsBuiltInNodeModuleChecker;
|
||||
use node_resolver::NpmPackageFolderResolver;
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use crate::sys::DenoLibSys;
|
||||
use crate::util::text_encoding::from_utf8_lossy_cow;
|
||||
|
||||
pub struct ModuleCodeStringSource {
|
||||
pub code: ModuleSourceCode,
|
||||
pub found_url: Url,
|
||||
pub media_type: MediaType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, deno_error::JsError)]
|
||||
#[class(type)]
|
||||
#[error("{media_type} files are not supported in npm packages: {specifier}")]
|
||||
pub struct NotSupportedKindInNpmError {
|
||||
pub media_type: MediaType,
|
||||
pub specifier: Url,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, deno_error::JsError)]
|
||||
pub enum NpmModuleLoadError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
NotSupportedKindInNpm(#[from] NotSupportedKindInNpmError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
|
||||
#[class(inherit)]
|
||||
#[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())]
|
||||
UnableToLoad {
|
||||
file_path: PathBuf,
|
||||
maybe_referrer: Option<Url>,
|
||||
#[source]
|
||||
#[inherit]
|
||||
source: std::io::Error,
|
||||
},
|
||||
#[class(inherit)]
|
||||
#[error(
|
||||
"{}",
|
||||
format_dir_import_message(file_path, maybe_referrer, suggestion)
|
||||
)]
|
||||
DirImport {
|
||||
file_path: PathBuf,
|
||||
maybe_referrer: Option<Url>,
|
||||
suggestion: Option<&'static str>,
|
||||
#[source]
|
||||
#[inherit]
|
||||
source: std::io::Error,
|
||||
},
|
||||
}
|
||||
|
||||
fn format_dir_import_message(
|
||||
file_path: &std::path::Path,
|
||||
maybe_referrer: &Option<Url>,
|
||||
suggestion: &Option<&'static str>,
|
||||
) -> String {
|
||||
// directory imports are not allowed when importing from an
|
||||
// ES module, so provide the user with a helpful error message
|
||||
let dir_path = file_path;
|
||||
let mut msg = "Directory import ".to_string();
|
||||
msg.push_str(&dir_path.to_string_lossy());
|
||||
if let Some(referrer) = maybe_referrer {
|
||||
msg.push_str(" is not supported resolving import from ");
|
||||
msg.push_str(referrer.as_str());
|
||||
if let Some(entrypoint_name) = suggestion {
|
||||
msg.push_str("\nDid you mean to import ");
|
||||
msg.push_str(entrypoint_name);
|
||||
msg.push_str(" within the directory?");
|
||||
}
|
||||
}
|
||||
msg
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NpmModuleLoader<
|
||||
TCjsCodeAnalyzer: CjsCodeAnalyzer,
|
||||
TInNpmPackageChecker: InNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver: NpmPackageFolderResolver,
|
||||
TSys: DenoLibSys,
|
||||
> {
|
||||
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
|
||||
sys: TSys,
|
||||
node_code_translator: Arc<
|
||||
NodeCodeTranslator<
|
||||
TCjsCodeAnalyzer,
|
||||
TInNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver,
|
||||
TSys,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
impl<
|
||||
TCjsCodeAnalyzer: CjsCodeAnalyzer,
|
||||
TInNpmPackageChecker: InNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver: NpmPackageFolderResolver,
|
||||
TSys: DenoLibSys,
|
||||
>
|
||||
NpmModuleLoader<
|
||||
TCjsCodeAnalyzer,
|
||||
TInNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver,
|
||||
TSys,
|
||||
>
|
||||
{
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
|
||||
node_code_translator: Arc<
|
||||
NodeCodeTranslator<
|
||||
TCjsCodeAnalyzer,
|
||||
TInNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver,
|
||||
TSys,
|
||||
>,
|
||||
>,
|
||||
sys: TSys,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_tracker,
|
||||
node_code_translator,
|
||||
sys,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load(
|
||||
&self,
|
||||
specifier: &Url,
|
||||
maybe_referrer: Option<&Url>,
|
||||
) -> Result<ModuleCodeStringSource, NpmModuleLoadError> {
|
||||
let file_path = deno_path_util::url_to_file_path(specifier)?;
|
||||
let code = self.sys.fs_read(&file_path).map_err(|source| {
|
||||
if self.sys.fs_is_dir_no_err(&file_path) {
|
||||
let suggestion = ["index.mjs", "index.js", "index.cjs"]
|
||||
.into_iter()
|
||||
.find(|e| self.sys.fs_is_file_no_err(file_path.join(e)));
|
||||
NpmModuleLoadError::DirImport {
|
||||
file_path,
|
||||
maybe_referrer: maybe_referrer.cloned(),
|
||||
suggestion,
|
||||
source,
|
||||
}
|
||||
} else {
|
||||
NpmModuleLoadError::UnableToLoad {
|
||||
file_path,
|
||||
maybe_referrer: maybe_referrer.cloned(),
|
||||
source,
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
if media_type.is_emittable() {
|
||||
return Err(NpmModuleLoadError::NotSupportedKindInNpm(
|
||||
NotSupportedKindInNpmError {
|
||||
media_type,
|
||||
specifier: specifier.clone(),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
let code = from_utf8_lossy_cow(code);
|
||||
ModuleSourceCode::String(
|
||||
self
|
||||
.node_code_translator
|
||||
.translate_cjs_to_esm(specifier, Some(code))
|
||||
.await?
|
||||
.into_owned()
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
// esm and json code is untouched
|
||||
ModuleSourceCode::Bytes(match code {
|
||||
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
|
||||
Cow::Borrowed(bytes) => bytes.into(),
|
||||
})
|
||||
};
|
||||
|
||||
Ok(ModuleCodeStringSource {
|
||||
code,
|
||||
found_url: specifier.clone(),
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
})
|
||||
}
|
||||
}
|
80
cli/lib/npm/mod.rs
Normal file
80
cli/lib/npm/mod.rs
Normal file
|
@ -0,0 +1,80 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
mod permission_checker;
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ManagedNpmResolverRc;
|
||||
use deno_resolver::npm::NpmResolver;
|
||||
use deno_runtime::deno_process::NpmProcessStateProvider;
|
||||
use deno_runtime::deno_process::NpmProcessStateProviderRc;
|
||||
pub use permission_checker::NpmRegistryReadPermissionChecker;
|
||||
pub use permission_checker::NpmRegistryReadPermissionCheckerMode;
|
||||
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::sys::DenoLibSys;
|
||||
|
||||
pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
|
||||
npm_resolver: &NpmResolver<TSys>,
|
||||
) -> NpmProcessStateProviderRc {
|
||||
match npm_resolver {
|
||||
NpmResolver::Byonm(byonm_npm_resolver) => {
|
||||
Arc::new(ByonmNpmProcessStateProvider(byonm_npm_resolver.clone()))
|
||||
}
|
||||
NpmResolver::Managed(managed_npm_resolver) => {
|
||||
Arc::new(ManagedNpmProcessStateProvider(managed_npm_resolver.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn npm_process_state(
|
||||
snapshot: ValidSerializedNpmResolutionSnapshot,
|
||||
node_modules_path: Option<&Path>,
|
||||
) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
|
||||
local_node_modules_path: node_modules_path
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ManagedNpmProcessStateProvider<TSys: DenoLibSys>(
|
||||
pub ManagedNpmResolverRc<TSys>,
|
||||
);
|
||||
|
||||
impl<TSys: DenoLibSys> NpmProcessStateProvider
|
||||
for ManagedNpmProcessStateProvider<TSys>
|
||||
{
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
npm_process_state(
|
||||
self.0.resolution().serialized_valid_snapshot(),
|
||||
self.0.root_node_modules_path(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ByonmNpmProcessStateProvider<TSys: DenoLibSys>(
|
||||
pub Arc<ByonmNpmResolver<TSys>>,
|
||||
);
|
||||
|
||||
impl<TSys: DenoLibSys> NpmProcessStateProvider
|
||||
for ByonmNpmProcessStateProvider<TSys>
|
||||
{
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Byonm,
|
||||
local_node_modules_path: self
|
||||
.0
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
120
cli/lib/npm/permission_checker.rs
Normal file
120
cli/lib/npm/permission_checker.rs
Normal file
|
@ -0,0 +1,120 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::sys::DenoLibSys;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum NpmRegistryReadPermissionCheckerMode {
|
||||
Byonm,
|
||||
Global(PathBuf),
|
||||
Local(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NpmRegistryReadPermissionChecker<TSys: DenoLibSys> {
|
||||
sys: TSys,
|
||||
cache: Mutex<HashMap<PathBuf, PathBuf>>,
|
||||
mode: NpmRegistryReadPermissionCheckerMode,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
#[class(inherit)]
|
||||
#[error("failed canonicalizing '{path}'")]
|
||||
struct EnsureRegistryReadPermissionError {
|
||||
path: PathBuf,
|
||||
#[source]
|
||||
#[inherit]
|
||||
source: std::io::Error,
|
||||
}
|
||||
|
||||
impl<TSys: DenoLibSys> NpmRegistryReadPermissionChecker<TSys> {
|
||||
pub fn new(sys: TSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
|
||||
Self {
|
||||
sys,
|
||||
cache: Default::default(),
|
||||
mode,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
pub fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, JsErrorBox> {
|
||||
if permissions.query_read_all() {
|
||||
return Ok(Cow::Borrowed(path)); // skip permissions checks below
|
||||
}
|
||||
|
||||
match &self.mode {
|
||||
NpmRegistryReadPermissionCheckerMode::Byonm => {
|
||||
if path.components().any(|c| c.as_os_str() == "node_modules") {
|
||||
Ok(Cow::Borrowed(path))
|
||||
} else {
|
||||
permissions
|
||||
.check_read_path(path)
|
||||
.map_err(JsErrorBox::from_err)
|
||||
}
|
||||
}
|
||||
NpmRegistryReadPermissionCheckerMode::Global(registry_path)
|
||||
| NpmRegistryReadPermissionCheckerMode::Local(registry_path) => {
|
||||
// allow reading if it's in the node_modules
|
||||
let is_path_in_node_modules = path.starts_with(registry_path)
|
||||
&& path
|
||||
.components()
|
||||
.all(|c| !matches!(c, std::path::Component::ParentDir));
|
||||
|
||||
if is_path_in_node_modules {
|
||||
let mut cache = self.cache.lock();
|
||||
let mut canonicalize =
|
||||
|path: &Path| -> Result<Option<PathBuf>, JsErrorBox> {
|
||||
match cache.get(path) {
|
||||
Some(canon) => Ok(Some(canon.clone())),
|
||||
None => match self.sys.fs_canonicalize(path) {
|
||||
Ok(canon) => {
|
||||
cache.insert(path.to_path_buf(), canon.clone());
|
||||
Ok(Some(canon))
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == ErrorKind::NotFound {
|
||||
return Ok(None);
|
||||
}
|
||||
Err(JsErrorBox::from_err(
|
||||
EnsureRegistryReadPermissionError {
|
||||
path: path.to_path_buf(),
|
||||
source: e,
|
||||
},
|
||||
))
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
if let Some(registry_path_canon) = canonicalize(registry_path)? {
|
||||
if let Some(path_canon) = canonicalize(path)? {
|
||||
if path_canon.starts_with(registry_path_canon) {
|
||||
return Ok(Cow::Owned(path_canon));
|
||||
}
|
||||
} else if path.starts_with(registry_path_canon)
|
||||
|| path.starts_with(registry_path)
|
||||
{
|
||||
return Ok(Cow::Borrowed(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
permissions
|
||||
.check_read_path(path)
|
||||
.map_err(JsErrorBox::from_err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,8 +1,11 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
/// This module is shared between build script and the binaries. Use it sparsely.
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("Unrecognized release channel: {0}")]
|
||||
pub struct UnrecognizedReleaseChannelError(pub String);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ReleaseChannel {
|
||||
|
@ -50,13 +53,17 @@ impl ReleaseChannel {
|
|||
// NOTE(bartlomieju): do not ever change these values, tools like `patchver`
|
||||
// rely on them.
|
||||
#[allow(unused)]
|
||||
pub fn deserialize(str_: &str) -> Result<Self, AnyError> {
|
||||
pub fn deserialize(
|
||||
str_: &str,
|
||||
) -> Result<Self, UnrecognizedReleaseChannelError> {
|
||||
Ok(match str_ {
|
||||
"stable" => Self::Stable,
|
||||
"canary" => Self::Canary,
|
||||
"rc" => Self::Rc,
|
||||
"lts" => Self::Lts,
|
||||
unknown => bail!("Unrecognized release channel: {}", unknown),
|
||||
unknown => {
|
||||
return Err(UnrecognizedReleaseChannelError(unknown.to_string()))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
389
cli/lib/standalone/binary.rs
Normal file
389
cli/lib/standalone/binary.rs
Normal file
|
@ -0,0 +1,389 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_media_type::MediaType;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_runtime::deno_telemetry::OtelConfig;
|
||||
use deno_semver::Version;
|
||||
use indexmap::IndexMap;
|
||||
use node_resolver::analyze::CjsAnalysisExports;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use url::Url;
|
||||
|
||||
use super::virtual_fs::FileSystemCaseSensitivity;
|
||||
use crate::args::UnstableConfig;
|
||||
|
||||
pub const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
pub trait DenoRtDeserializable<'a>: Sized {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)>;
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for Cow<'a, [u8]> {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, Cow::Borrowed(data)))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DenoRtSerializable<'a> {
|
||||
fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum NodeModules {
|
||||
Managed {
|
||||
/// Relative path for the node_modules directory in the vfs.
|
||||
node_modules_dir: Option<String>,
|
||||
},
|
||||
Byonm {
|
||||
root_node_modules_dir: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolverImportMap {
|
||||
pub specifier: String,
|
||||
pub json: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SerializedResolverWorkspaceJsrPackage {
|
||||
pub relative_base: String,
|
||||
pub name: String,
|
||||
pub version: Option<Version>,
|
||||
pub exports: IndexMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolver {
|
||||
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
|
||||
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
|
||||
pub package_jsons: BTreeMap<String, serde_json::Value>,
|
||||
pub pkg_json_resolution: PackageJsonDepResolution,
|
||||
}
|
||||
|
||||
// Note: Don't use hashmaps/hashsets. Ensure the serialization
|
||||
// is deterministic.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
pub seed: Option<u64>,
|
||||
pub code_cache_key: Option<u64>,
|
||||
pub permissions: PermissionsOptions,
|
||||
pub location: Option<Url>,
|
||||
pub v8_flags: Vec<String>,
|
||||
pub log_level: Option<log::Level>,
|
||||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub env_vars_from_env_file: IndexMap<String, String>,
|
||||
pub workspace_resolver: SerializedWorkspaceResolver,
|
||||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub unstable_config: UnstableConfig,
|
||||
pub otel_config: OtelConfig,
|
||||
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct SpecifierId(u32);
|
||||
|
||||
impl SpecifierId {
|
||||
pub fn new(id: u32) -> Self {
|
||||
Self(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> capacity_builder::BytesAppendable<'a> for SpecifierId {
|
||||
fn append_to_builder<TBytes: capacity_builder::BytesType>(
|
||||
self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
|
||||
) {
|
||||
builder.append_le(self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtSerializable<'a> for SpecifierId {
|
||||
fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
) {
|
||||
builder.append_le(self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for SpecifierId {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, id) = read_u32(input)?;
|
||||
Ok((input, Self(id)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum CjsExportAnalysisEntry {
|
||||
Esm,
|
||||
Cjs(CjsAnalysisExports),
|
||||
}
|
||||
|
||||
const HAS_TRANSPILED_FLAG: u8 = 1 << 0;
|
||||
const HAS_SOURCE_MAP_FLAG: u8 = 1 << 1;
|
||||
const HAS_CJS_EXPORT_ANALYSIS_FLAG: u8 = 1 << 2;
|
||||
|
||||
pub struct RemoteModuleEntry<'a> {
|
||||
pub media_type: MediaType,
|
||||
pub data: Cow<'a, [u8]>,
|
||||
pub maybe_transpiled: Option<Cow<'a, [u8]>>,
|
||||
pub maybe_source_map: Option<Cow<'a, [u8]>>,
|
||||
pub maybe_cjs_export_analysis: Option<Cow<'a, [u8]>>,
|
||||
}
|
||||
|
||||
impl<'a> DenoRtSerializable<'a> for RemoteModuleEntry<'a> {
|
||||
fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
) {
|
||||
fn append_maybe_data<'a>(
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
maybe_data: Option<&'a [u8]>,
|
||||
) {
|
||||
if let Some(data) = maybe_data {
|
||||
builder.append_le(data.len() as u32);
|
||||
builder.append(data);
|
||||
}
|
||||
}
|
||||
|
||||
let mut has_data_flags = 0;
|
||||
if self.maybe_transpiled.is_some() {
|
||||
has_data_flags |= HAS_TRANSPILED_FLAG;
|
||||
}
|
||||
if self.maybe_source_map.is_some() {
|
||||
has_data_flags |= HAS_SOURCE_MAP_FLAG;
|
||||
}
|
||||
if self.maybe_cjs_export_analysis.is_some() {
|
||||
has_data_flags |= HAS_CJS_EXPORT_ANALYSIS_FLAG;
|
||||
}
|
||||
builder.append(serialize_media_type(self.media_type));
|
||||
builder.append_le(self.data.len() as u32);
|
||||
builder.append(self.data.as_ref());
|
||||
builder.append(has_data_flags);
|
||||
append_maybe_data(builder, self.maybe_transpiled.as_deref());
|
||||
append_maybe_data(builder, self.maybe_source_map.as_deref());
|
||||
append_maybe_data(builder, self.maybe_cjs_export_analysis.as_deref());
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for RemoteModuleEntry<'a> {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn deserialize_data_if_has_flag(
|
||||
input: &[u8],
|
||||
has_data_flags: u8,
|
||||
flag: u8,
|
||||
) -> std::io::Result<(&[u8], Option<Cow<[u8]>>)> {
|
||||
if has_data_flags & flag != 0 {
|
||||
let (input, bytes) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, Some(Cow::Borrowed(bytes))))
|
||||
} else {
|
||||
Ok((input, None))
|
||||
}
|
||||
}
|
||||
|
||||
let (input, media_type) = MediaType::deserialize(input)?;
|
||||
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||
let (input, has_data_flags) = read_u8(input)?;
|
||||
let (input, maybe_transpiled) =
|
||||
deserialize_data_if_has_flag(input, has_data_flags, HAS_TRANSPILED_FLAG)?;
|
||||
let (input, maybe_source_map) =
|
||||
deserialize_data_if_has_flag(input, has_data_flags, HAS_SOURCE_MAP_FLAG)?;
|
||||
let (input, maybe_cjs_export_analysis) = deserialize_data_if_has_flag(
|
||||
input,
|
||||
has_data_flags,
|
||||
HAS_CJS_EXPORT_ANALYSIS_FLAG,
|
||||
)?;
|
||||
Ok((
|
||||
input,
|
||||
Self {
|
||||
media_type,
|
||||
data: Cow::Borrowed(data),
|
||||
maybe_transpiled,
|
||||
maybe_source_map,
|
||||
maybe_cjs_export_analysis,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_media_type(media_type: MediaType) -> u8 {
|
||||
match media_type {
|
||||
MediaType::JavaScript => 0,
|
||||
MediaType::Jsx => 1,
|
||||
MediaType::Mjs => 2,
|
||||
MediaType::Cjs => 3,
|
||||
MediaType::TypeScript => 4,
|
||||
MediaType::Mts => 5,
|
||||
MediaType::Cts => 6,
|
||||
MediaType::Dts => 7,
|
||||
MediaType::Dmts => 8,
|
||||
MediaType::Dcts => 9,
|
||||
MediaType::Tsx => 10,
|
||||
MediaType::Json => 11,
|
||||
MediaType::Wasm => 12,
|
||||
MediaType::Css => 13,
|
||||
MediaType::SourceMap => 14,
|
||||
MediaType::Unknown => 15,
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for MediaType {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, value) = read_u8(input)?;
|
||||
let value = match value {
|
||||
0 => MediaType::JavaScript,
|
||||
1 => MediaType::Jsx,
|
||||
2 => MediaType::Mjs,
|
||||
3 => MediaType::Cjs,
|
||||
4 => MediaType::TypeScript,
|
||||
5 => MediaType::Mts,
|
||||
6 => MediaType::Cts,
|
||||
7 => MediaType::Dts,
|
||||
8 => MediaType::Dmts,
|
||||
9 => MediaType::Dcts,
|
||||
10 => MediaType::Tsx,
|
||||
11 => MediaType::Json,
|
||||
12 => MediaType::Wasm,
|
||||
13 => MediaType::Css,
|
||||
14 => MediaType::SourceMap,
|
||||
15 => MediaType::Unknown,
|
||||
value => {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
format!("Unknown media type value: {value}"),
|
||||
))
|
||||
}
|
||||
};
|
||||
Ok((input, value))
|
||||
}
|
||||
}
|
||||
|
||||
/// Data stored keyed by specifier.
|
||||
pub struct SpecifierDataStore<TData> {
|
||||
data: IndexMap<SpecifierId, TData>,
|
||||
}
|
||||
|
||||
impl<TData> Default for SpecifierDataStore<TData> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
data: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<TData> SpecifierDataStore<TData> {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
data: IndexMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (SpecifierId, &TData)> {
|
||||
self.data.iter().map(|(k, v)| (*k, v))
|
||||
}
|
||||
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn contains(&self, specifier: SpecifierId) -> bool {
|
||||
self.data.contains_key(&specifier)
|
||||
}
|
||||
|
||||
pub fn add(&mut self, specifier: SpecifierId, value: TData) {
|
||||
self.data.insert(specifier, value);
|
||||
}
|
||||
|
||||
pub fn get(&self, specifier: SpecifierId) -> Option<&TData> {
|
||||
self.data.get(&specifier)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, TData> SpecifierDataStore<TData>
|
||||
where
|
||||
TData: DenoRtSerializable<'a> + 'a,
|
||||
{
|
||||
pub fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
) {
|
||||
builder.append_le(self.len() as u32);
|
||||
for (specifier, value) in self.iter() {
|
||||
builder.append(specifier);
|
||||
value.serialize(builder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, TData> DenoRtDeserializable<'a> for SpecifierDataStore<TData>
|
||||
where
|
||||
TData: DenoRtDeserializable<'a>,
|
||||
{
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let mut data = IndexMap::with_capacity(len);
|
||||
let mut input = input;
|
||||
for _ in 0..len {
|
||||
let (new_input, specifier) = SpecifierId::deserialize(input)?;
|
||||
let (new_input, value) = TData::deserialize(new_input)?;
|
||||
data.insert(specifier, value);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, Self { data }))
|
||||
}
|
||||
}
|
||||
|
||||
fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let (input, data) = read_bytes(input, len)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> {
|
||||
read_u32(input).map(|(input, len)| (input, len as usize))
|
||||
}
|
||||
|
||||
fn read_u32(input: &[u8]) -> std::io::Result<(&[u8], u32)> {
|
||||
let (input, len_bytes) = read_bytes(input, 4)?;
|
||||
let len = u32::from_le_bytes(len_bytes.try_into().unwrap());
|
||||
Ok((input, len))
|
||||
}
|
||||
|
||||
fn read_u8(input: &[u8]) -> std::io::Result<(&[u8], u8)> {
|
||||
check_has_len(input, 1)?;
|
||||
Ok((&input[1..], input[0]))
|
||||
}
|
||||
|
||||
fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> {
|
||||
check_has_len(input, len)?;
|
||||
let (len_bytes, input) = input.split_at(len);
|
||||
Ok((input, len_bytes))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> {
|
||||
if input.len() < len {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Unexpected end of data",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
4
cli/lib/standalone/mod.rs
Normal file
4
cli/lib/standalone/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub mod binary;
|
||||
pub mod virtual_fs;
|
999
cli/lib/standalone/virtual_fs.rs
Normal file
999
cli/lib/standalone/virtual_fs.rs
Normal file
|
@ -0,0 +1,999 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_path_util::strip_unc_prefix;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_core::anyhow::bail;
|
||||
use deno_runtime::deno_core::anyhow::Context;
|
||||
use deno_runtime::deno_core::error::AnyError;
|
||||
use indexmap::IndexSet;
|
||||
use serde::de;
|
||||
use serde::de::SeqAccess;
|
||||
use serde::de::Visitor;
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use serde::Serialize;
|
||||
use serde::Serializer;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum WindowsSystemRootablePath {
|
||||
/// The root of the system above any drive letters.
|
||||
WindowSystemRoot,
|
||||
Path(PathBuf),
|
||||
}
|
||||
|
||||
impl WindowsSystemRootablePath {
|
||||
pub fn root_for_current_os() -> Self {
|
||||
if cfg!(windows) {
|
||||
WindowsSystemRootablePath::WindowSystemRoot
|
||||
} else {
|
||||
WindowsSystemRootablePath::Path(PathBuf::from("/"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&self, name_component: &str) -> PathBuf {
|
||||
// this method doesn't handle multiple components
|
||||
debug_assert!(
|
||||
!name_component.contains('\\'),
|
||||
"Invalid component: {}",
|
||||
name_component
|
||||
);
|
||||
debug_assert!(
|
||||
!name_component.contains('/'),
|
||||
"Invalid component: {}",
|
||||
name_component
|
||||
);
|
||||
|
||||
match self {
|
||||
WindowsSystemRootablePath::WindowSystemRoot => {
|
||||
// windows drive letter
|
||||
PathBuf::from(&format!("{}\\", name_component))
|
||||
}
|
||||
WindowsSystemRootablePath::Path(path) => path.join(name_component),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum FileSystemCaseSensitivity {
|
||||
#[serde(rename = "s")]
|
||||
Sensitive,
|
||||
#[serde(rename = "i")]
|
||||
Insensitive,
|
||||
}
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct VirtualDirectoryEntries(Vec<VfsEntry>);
|
||||
|
||||
impl VirtualDirectoryEntries {
|
||||
pub fn new(mut entries: Vec<VfsEntry>) -> Self {
|
||||
// needs to be sorted by name
|
||||
entries.sort_by(|a, b| a.name().cmp(b.name()));
|
||||
Self(entries)
|
||||
}
|
||||
|
||||
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, VfsEntry> {
|
||||
self.0.iter_mut()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> std::slice::Iter<'_, VfsEntry> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
pub fn take_inner(&mut self) -> Vec<VfsEntry> {
|
||||
std::mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn get_by_name(
|
||||
&self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> Option<&VfsEntry> {
|
||||
self
|
||||
.binary_search(name, case_sensitivity)
|
||||
.ok()
|
||||
.map(|index| &self.0[index])
|
||||
}
|
||||
|
||||
pub fn get_mut_by_name(
|
||||
&mut self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> Option<&mut VfsEntry> {
|
||||
self
|
||||
.binary_search(name, case_sensitivity)
|
||||
.ok()
|
||||
.map(|index| &mut self.0[index])
|
||||
}
|
||||
|
||||
pub fn get_mut_by_index(&mut self, index: usize) -> Option<&mut VfsEntry> {
|
||||
self.0.get_mut(index)
|
||||
}
|
||||
|
||||
pub fn get_by_index(&self, index: usize) -> Option<&VfsEntry> {
|
||||
self.0.get(index)
|
||||
}
|
||||
|
||||
pub fn binary_search(
|
||||
&self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> Result<usize, usize> {
|
||||
match case_sensitivity {
|
||||
FileSystemCaseSensitivity::Sensitive => {
|
||||
self.0.binary_search_by(|e| e.name().cmp(name))
|
||||
}
|
||||
FileSystemCaseSensitivity::Insensitive => self.0.binary_search_by(|e| {
|
||||
e.name()
|
||||
.chars()
|
||||
.zip(name.chars())
|
||||
.map(|(a, b)| a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()))
|
||||
.find(|&ord| ord != Ordering::Equal)
|
||||
.unwrap_or_else(|| e.name().len().cmp(&name.len()))
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(
|
||||
&mut self,
|
||||
entry: VfsEntry,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> usize {
|
||||
match self.binary_search(entry.name(), case_sensitivity) {
|
||||
Ok(index) => {
|
||||
self.0[index] = entry;
|
||||
index
|
||||
}
|
||||
Err(insert_index) => {
|
||||
self.0.insert(insert_index, entry);
|
||||
insert_index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_or_modify(
|
||||
&mut self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
on_insert: impl FnOnce() -> VfsEntry,
|
||||
on_modify: impl FnOnce(&mut VfsEntry),
|
||||
) -> usize {
|
||||
match self.binary_search(name, case_sensitivity) {
|
||||
Ok(index) => {
|
||||
on_modify(&mut self.0[index]);
|
||||
index
|
||||
}
|
||||
Err(insert_index) => {
|
||||
self.0.insert(insert_index, on_insert());
|
||||
insert_index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) -> VfsEntry {
|
||||
self.0.remove(index)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VirtualDirectory {
|
||||
#[serde(rename = "n")]
|
||||
pub name: String,
|
||||
// should be sorted by name
|
||||
#[serde(rename = "e")]
|
||||
pub entries: VirtualDirectoryEntries,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct OffsetWithLength {
|
||||
pub offset: u64,
|
||||
pub len: u64,
|
||||
}
|
||||
|
||||
// serialize as an array in order to save space
|
||||
impl Serialize for OffsetWithLength {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let array = [self.offset, self.len];
|
||||
array.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for OffsetWithLength {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct OffsetWithLengthVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for OffsetWithLengthVisitor {
|
||||
type Value = OffsetWithLength;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an array with two elements: [offset, len]")
|
||||
}
|
||||
|
||||
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
||||
where
|
||||
A: SeqAccess<'de>,
|
||||
{
|
||||
let offset = seq
|
||||
.next_element()?
|
||||
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
|
||||
let len = seq
|
||||
.next_element()?
|
||||
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
|
||||
Ok(OffsetWithLength { offset, len })
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_seq(OffsetWithLengthVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct VirtualFile {
|
||||
#[serde(rename = "n")]
|
||||
pub name: String,
|
||||
#[serde(rename = "o")]
|
||||
pub offset: OffsetWithLength,
|
||||
#[serde(rename = "m", skip_serializing_if = "Option::is_none")]
|
||||
pub transpiled_offset: Option<OffsetWithLength>,
|
||||
#[serde(rename = "c", skip_serializing_if = "Option::is_none")]
|
||||
pub cjs_export_analysis_offset: Option<OffsetWithLength>,
|
||||
#[serde(rename = "s", skip_serializing_if = "Option::is_none")]
|
||||
pub source_map_offset: Option<OffsetWithLength>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VirtualSymlinkParts(Vec<String>);
|
||||
|
||||
impl VirtualSymlinkParts {
|
||||
pub fn from_path(path: &Path) -> Self {
|
||||
Self(
|
||||
path
|
||||
.components()
|
||||
.filter(|c| !matches!(c, std::path::Component::RootDir))
|
||||
.map(|c| c.as_os_str().to_string_lossy().to_string())
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn take_parts(&mut self) -> Vec<String> {
|
||||
std::mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
pub fn parts(&self) -> &[String] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub fn set_parts(&mut self, parts: Vec<String>) {
|
||||
self.0 = parts;
|
||||
}
|
||||
|
||||
pub fn display(&self) -> String {
|
||||
self.0.join("/")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VirtualSymlink {
|
||||
#[serde(rename = "n")]
|
||||
pub name: String,
|
||||
#[serde(rename = "p")]
|
||||
pub dest_parts: VirtualSymlinkParts,
|
||||
}
|
||||
|
||||
impl VirtualSymlink {
|
||||
pub fn resolve_dest_from_root(&self, root: &Path) -> PathBuf {
|
||||
let mut dest = root.to_path_buf();
|
||||
for part in &self.dest_parts.0 {
|
||||
dest.push(part);
|
||||
}
|
||||
dest
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum VfsEntryRef<'a> {
|
||||
Dir(&'a VirtualDirectory),
|
||||
File(&'a VirtualFile),
|
||||
Symlink(&'a VirtualSymlink),
|
||||
}
|
||||
|
||||
impl VfsEntryRef<'_> {
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
Self::Dir(dir) => &dir.name,
|
||||
Self::File(file) => &file.name,
|
||||
Self::Symlink(symlink) => &symlink.name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): we should store this more efficiently in the binary
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum VfsEntry {
|
||||
Dir(VirtualDirectory),
|
||||
File(VirtualFile),
|
||||
Symlink(VirtualSymlink),
|
||||
}
|
||||
|
||||
impl VfsEntry {
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
Self::Dir(dir) => &dir.name,
|
||||
Self::File(file) => &file.name,
|
||||
Self::Symlink(symlink) => &symlink.name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> VfsEntryRef {
|
||||
match self {
|
||||
VfsEntry::Dir(dir) => VfsEntryRef::Dir(dir),
|
||||
VfsEntry::File(file) => VfsEntryRef::File(file),
|
||||
VfsEntry::Symlink(symlink) => VfsEntryRef::Symlink(symlink),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
|
||||
".deno_compile_node_modules";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BuiltVfs {
|
||||
pub root_path: WindowsSystemRootablePath,
|
||||
pub case_sensitivity: FileSystemCaseSensitivity,
|
||||
pub entries: VirtualDirectoryEntries,
|
||||
pub files: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct FilesData {
|
||||
files: Vec<Vec<u8>>,
|
||||
current_offset: u64,
|
||||
file_offsets: HashMap<(String, usize), OffsetWithLength>,
|
||||
}
|
||||
|
||||
impl FilesData {
|
||||
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
|
||||
if offset.len == 0 {
|
||||
return Some(&[]);
|
||||
}
|
||||
|
||||
// the debug assertions in this method should never happen
|
||||
// because it would indicate providing an offset not in the vfs
|
||||
let mut count: u64 = 0;
|
||||
for file in &self.files {
|
||||
// clippy wanted a match
|
||||
match count.cmp(&offset.offset) {
|
||||
Ordering::Equal => {
|
||||
debug_assert_eq!(offset.len, file.len() as u64);
|
||||
if offset.len == file.len() as u64 {
|
||||
return Some(file);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Ordering::Less => {
|
||||
count += file.len() as u64;
|
||||
}
|
||||
Ordering::Greater => {
|
||||
debug_assert!(false);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
debug_assert!(false);
|
||||
None
|
||||
}
|
||||
|
||||
pub fn add_data(&mut self, data: Vec<u8>) -> OffsetWithLength {
|
||||
if data.is_empty() {
|
||||
return OffsetWithLength { offset: 0, len: 0 };
|
||||
}
|
||||
let checksum = crate::util::checksum::gen(&[&data]);
|
||||
match self.file_offsets.entry((checksum, data.len())) {
|
||||
Entry::Occupied(occupied_entry) => {
|
||||
let offset_and_len = *occupied_entry.get();
|
||||
debug_assert_eq!(data.len() as u64, offset_and_len.len);
|
||||
offset_and_len
|
||||
}
|
||||
Entry::Vacant(vacant_entry) => {
|
||||
let offset_and_len = OffsetWithLength {
|
||||
offset: self.current_offset,
|
||||
len: data.len() as u64,
|
||||
};
|
||||
vacant_entry.insert(offset_and_len);
|
||||
self.current_offset += offset_and_len.len;
|
||||
self.files.push(data);
|
||||
offset_and_len
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AddFileDataOptions {
|
||||
pub data: Vec<u8>,
|
||||
pub maybe_transpiled: Option<Vec<u8>>,
|
||||
pub maybe_source_map: Option<Vec<u8>>,
|
||||
pub maybe_cjs_export_analysis: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VfsBuilder {
|
||||
executable_root: VirtualDirectory,
|
||||
files: FilesData,
|
||||
/// The minimum root directory that should be included in the VFS.
|
||||
min_root_dir: Option<WindowsSystemRootablePath>,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
}
|
||||
|
||||
impl Default for VfsBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl VfsBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
executable_root: VirtualDirectory {
|
||||
name: "/".to_string(),
|
||||
entries: Default::default(),
|
||||
},
|
||||
files: Default::default(),
|
||||
min_root_dir: Default::default(),
|
||||
// This is not exactly correct because file systems on these OSes
|
||||
// may be case-sensitive or not based on the directory, but this
|
||||
// is a good enough approximation and limitation. In the future,
|
||||
// we may want to store this information per directory instead
|
||||
// depending on the feedback we get.
|
||||
case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") {
|
||||
FileSystemCaseSensitivity::Insensitive
|
||||
} else {
|
||||
FileSystemCaseSensitivity::Sensitive
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity {
|
||||
self.case_sensitivity
|
||||
}
|
||||
|
||||
pub fn files_len(&self) -> usize {
|
||||
self.files.files.len()
|
||||
}
|
||||
|
||||
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
|
||||
self.files.file_bytes(offset)
|
||||
}
|
||||
|
||||
/// Add a directory that might be the minimum root directory
|
||||
/// of the VFS.
|
||||
///
|
||||
/// For example, say the user has a deno.json and specifies an
|
||||
/// import map in a parent directory. The import map won't be
|
||||
/// included in the VFS, but its base will meaning we need to
|
||||
/// tell the VFS builder to include the base of the import map
|
||||
/// by calling this method.
|
||||
pub fn add_possible_min_root_dir(&mut self, path: &Path) {
|
||||
self.add_dir_raw(path);
|
||||
|
||||
match &self.min_root_dir {
|
||||
Some(WindowsSystemRootablePath::WindowSystemRoot) => {
|
||||
// already the root dir
|
||||
}
|
||||
Some(WindowsSystemRootablePath::Path(current_path)) => {
|
||||
let mut common_components = Vec::new();
|
||||
for (a, b) in current_path.components().zip(path.components()) {
|
||||
if a != b {
|
||||
break;
|
||||
}
|
||||
common_components.push(a);
|
||||
}
|
||||
if common_components.is_empty() {
|
||||
self.min_root_dir =
|
||||
Some(WindowsSystemRootablePath::root_for_current_os());
|
||||
} else {
|
||||
self.min_root_dir = Some(WindowsSystemRootablePath::Path(
|
||||
common_components.iter().collect(),
|
||||
));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
self.min_root_dir =
|
||||
Some(WindowsSystemRootablePath::Path(path.to_path_buf()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
let target_path = self.resolve_target_path(path)?;
|
||||
self.add_dir_recursive_not_symlink(&target_path)
|
||||
}
|
||||
|
||||
fn add_dir_recursive_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
self.add_dir_raw(path);
|
||||
// ok, building fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let read_dir = std::fs::read_dir(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
|
||||
let mut dir_entries =
|
||||
read_dir.into_iter().collect::<Result<Vec<_>, _>>()?;
|
||||
dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism
|
||||
|
||||
for entry in dir_entries {
|
||||
let file_type = entry.file_type()?;
|
||||
let path = entry.path();
|
||||
|
||||
if file_type.is_dir() {
|
||||
self.add_dir_recursive_not_symlink(&path)?;
|
||||
} else if file_type.is_file() {
|
||||
self.add_file_at_path_not_symlink(&path)?;
|
||||
} else if file_type.is_symlink() {
|
||||
match self.add_symlink(&path) {
|
||||
Ok(target) => match target {
|
||||
SymlinkTarget::File(target) => {
|
||||
self.add_file_at_path_not_symlink(&target)?
|
||||
}
|
||||
SymlinkTarget::Dir(target) => {
|
||||
self.add_dir_recursive_not_symlink(&target)?;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
log::warn!(
|
||||
"{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}",
|
||||
colors::yellow("Warning"),
|
||||
path.display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory {
|
||||
log::debug!("Ensuring directory '{}'", path.display());
|
||||
debug_assert!(path.is_absolute());
|
||||
let mut current_dir = &mut self.executable_root;
|
||||
|
||||
for component in path.components() {
|
||||
if matches!(component, std::path::Component::RootDir) {
|
||||
continue;
|
||||
}
|
||||
let name = component.as_os_str().to_string_lossy();
|
||||
let index = current_dir.entries.insert_or_modify(
|
||||
&name,
|
||||
self.case_sensitivity,
|
||||
|| {
|
||||
VfsEntry::Dir(VirtualDirectory {
|
||||
name: name.to_string(),
|
||||
entries: Default::default(),
|
||||
})
|
||||
},
|
||||
|_| {
|
||||
// ignore
|
||||
},
|
||||
);
|
||||
match current_dir.entries.get_mut_by_index(index) {
|
||||
Some(VfsEntry::Dir(dir)) => {
|
||||
current_dir = dir;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
|
||||
current_dir
|
||||
}
|
||||
|
||||
pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory {
|
||||
&mut self.executable_root
|
||||
}
|
||||
|
||||
pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> {
|
||||
debug_assert!(path.is_absolute());
|
||||
let mut current_dir = &mut self.executable_root;
|
||||
|
||||
for component in path.components() {
|
||||
if matches!(component, std::path::Component::RootDir) {
|
||||
continue;
|
||||
}
|
||||
let name = component.as_os_str().to_string_lossy();
|
||||
let entry = current_dir
|
||||
.entries
|
||||
.get_mut_by_name(&name, self.case_sensitivity)?;
|
||||
match entry {
|
||||
VfsEntry::Dir(dir) => {
|
||||
current_dir = dir;
|
||||
}
|
||||
_ => unreachable!("{}", path.display()),
|
||||
};
|
||||
}
|
||||
|
||||
Some(current_dir)
|
||||
}
|
||||
|
||||
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
// ok, building fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file_with_data(
|
||||
path,
|
||||
AddFileDataOptions {
|
||||
data: file_bytes,
|
||||
maybe_cjs_export_analysis: None,
|
||||
maybe_transpiled: None,
|
||||
maybe_source_map: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn add_file_at_path_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
// ok, building fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file_with_data_raw(path, file_bytes)
|
||||
}
|
||||
|
||||
pub fn add_file_with_data(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
options: AddFileDataOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let metadata = std::fs::symlink_metadata(path).with_context(|| {
|
||||
format!("Resolving target path for '{}'", path.display())
|
||||
})?;
|
||||
if metadata.is_symlink() {
|
||||
let target = self.add_symlink(path)?.into_path_buf();
|
||||
self.add_file_with_data_raw_options(&target, options)
|
||||
} else {
|
||||
self.add_file_with_data_raw_options(path, options)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_file_with_data_raw(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), AnyError> {
|
||||
self.add_file_with_data_raw_options(
|
||||
path,
|
||||
AddFileDataOptions {
|
||||
data,
|
||||
maybe_transpiled: None,
|
||||
maybe_cjs_export_analysis: None,
|
||||
maybe_source_map: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn add_file_with_data_raw_options(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
options: AddFileDataOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Adding file '{}'", path.display());
|
||||
let case_sensitivity = self.case_sensitivity;
|
||||
|
||||
let offset_and_len = self.files.add_data(options.data);
|
||||
let transpiled_offset = options
|
||||
.maybe_transpiled
|
||||
.map(|data| self.files.add_data(data));
|
||||
let source_map_offset = options
|
||||
.maybe_source_map
|
||||
.map(|data| self.files.add_data(data));
|
||||
let cjs_export_analysis_offset = options
|
||||
.maybe_cjs_export_analysis
|
||||
.map(|data| self.files.add_data(data));
|
||||
let dir = self.add_dir_raw(path.parent().unwrap());
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
|
||||
dir.entries.insert_or_modify(
|
||||
&name,
|
||||
case_sensitivity,
|
||||
|| {
|
||||
VfsEntry::File(VirtualFile {
|
||||
name: name.to_string(),
|
||||
offset: offset_and_len,
|
||||
transpiled_offset,
|
||||
cjs_export_analysis_offset,
|
||||
source_map_offset,
|
||||
})
|
||||
},
|
||||
|entry| match entry {
|
||||
VfsEntry::File(virtual_file) => {
|
||||
virtual_file.offset = offset_and_len;
|
||||
// doesn't overwrite to None
|
||||
if transpiled_offset.is_some() {
|
||||
virtual_file.transpiled_offset = transpiled_offset;
|
||||
}
|
||||
if source_map_offset.is_some() {
|
||||
virtual_file.source_map_offset = source_map_offset;
|
||||
}
|
||||
if cjs_export_analysis_offset.is_some() {
|
||||
virtual_file.cjs_export_analysis_offset =
|
||||
cjs_export_analysis_offset;
|
||||
}
|
||||
}
|
||||
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_target_path(&mut self, path: &Path) -> Result<PathBuf, AnyError> {
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let metadata = std::fs::symlink_metadata(path).with_context(|| {
|
||||
format!("Resolving target path for '{}'", path.display())
|
||||
})?;
|
||||
if metadata.is_symlink() {
|
||||
Ok(self.add_symlink(path)?.into_path_buf())
|
||||
} else {
|
||||
Ok(path.to_path_buf())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<SymlinkTarget, AnyError> {
|
||||
self.add_symlink_inner(path, &mut IndexSet::new())
|
||||
}
|
||||
|
||||
fn add_symlink_inner(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
visited: &mut IndexSet<PathBuf>,
|
||||
) -> Result<SymlinkTarget, AnyError> {
|
||||
log::debug!("Adding symlink '{}'", path.display());
|
||||
let target = strip_unc_prefix(
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
std::fs::read_link(path)
|
||||
.with_context(|| format!("Reading symlink '{}'", path.display()))?,
|
||||
);
|
||||
let case_sensitivity = self.case_sensitivity;
|
||||
let target = normalize_path(path.parent().unwrap().join(&target));
|
||||
let dir = self.add_dir_raw(path.parent().unwrap());
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
dir.entries.insert_or_modify(
|
||||
&name,
|
||||
case_sensitivity,
|
||||
|| {
|
||||
VfsEntry::Symlink(VirtualSymlink {
|
||||
name: name.to_string(),
|
||||
dest_parts: VirtualSymlinkParts::from_path(&target),
|
||||
})
|
||||
},
|
||||
|_| {
|
||||
// ignore previously inserted
|
||||
},
|
||||
);
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target_metadata =
|
||||
std::fs::symlink_metadata(&target).with_context(|| {
|
||||
format!("Reading symlink target '{}'", target.display())
|
||||
})?;
|
||||
if target_metadata.is_symlink() {
|
||||
if !visited.insert(target.clone()) {
|
||||
// todo: probably don't error in this scenario
|
||||
bail!(
|
||||
"Circular symlink detected: {} -> {}",
|
||||
visited
|
||||
.iter()
|
||||
.map(|p| p.display().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" -> "),
|
||||
target.display()
|
||||
);
|
||||
}
|
||||
self.add_symlink_inner(&target, visited)
|
||||
} else if target_metadata.is_dir() {
|
||||
Ok(SymlinkTarget::Dir(target))
|
||||
} else {
|
||||
Ok(SymlinkTarget::File(target))
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds the CJS export analysis to the provided file.
|
||||
///
|
||||
/// Warning: This will panic if the file wasn't properly
|
||||
/// setup before calling this.
|
||||
pub fn add_cjs_export_analysis(&mut self, path: &Path, data: Vec<u8>) {
|
||||
self.add_data_for_file_or_panic(path, data, |file, offset_with_length| {
|
||||
file.cjs_export_analysis_offset = Some(offset_with_length);
|
||||
})
|
||||
}
|
||||
|
||||
fn add_data_for_file_or_panic(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
data: Vec<u8>,
|
||||
update_file: impl FnOnce(&mut VirtualFile, OffsetWithLength),
|
||||
) {
|
||||
let offset_with_length = self.files.add_data(data);
|
||||
let case_sensitivity = self.case_sensitivity;
|
||||
let dir = self.get_dir_mut(path.parent().unwrap()).unwrap();
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
let file = dir
|
||||
.entries
|
||||
.get_mut_by_name(&name, case_sensitivity)
|
||||
.unwrap();
|
||||
match file {
|
||||
VfsEntry::File(virtual_file) => {
|
||||
update_file(virtual_file, offset_with_length);
|
||||
}
|
||||
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterates through all the files in the virtual file system.
|
||||
pub fn iter_files(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (PathBuf, &VirtualFile)> + '_ {
|
||||
FileIterator {
|
||||
pending_dirs: VecDeque::from([(
|
||||
WindowsSystemRootablePath::root_for_current_os(),
|
||||
&self.executable_root,
|
||||
)]),
|
||||
current_dir_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> BuiltVfs {
|
||||
fn strip_prefix_from_symlinks(
|
||||
dir: &mut VirtualDirectory,
|
||||
parts: &[String],
|
||||
) {
|
||||
for entry in dir.entries.iter_mut() {
|
||||
match entry {
|
||||
VfsEntry::Dir(dir) => {
|
||||
strip_prefix_from_symlinks(dir, parts);
|
||||
}
|
||||
VfsEntry::File(_) => {}
|
||||
VfsEntry::Symlink(symlink) => {
|
||||
let parts = symlink
|
||||
.dest_parts
|
||||
.take_parts()
|
||||
.into_iter()
|
||||
.skip(parts.len())
|
||||
.collect();
|
||||
symlink.dest_parts.set_parts(parts);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut current_dir = self.executable_root;
|
||||
let mut current_path = WindowsSystemRootablePath::root_for_current_os();
|
||||
loop {
|
||||
if current_dir.entries.len() != 1 {
|
||||
break;
|
||||
}
|
||||
if self.min_root_dir.as_ref() == Some(¤t_path) {
|
||||
break;
|
||||
}
|
||||
match current_dir.entries.iter().next().unwrap() {
|
||||
VfsEntry::Dir(dir) => {
|
||||
if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME {
|
||||
// special directory we want to maintain
|
||||
break;
|
||||
}
|
||||
match current_dir.entries.remove(0) {
|
||||
VfsEntry::Dir(dir) => {
|
||||
current_path =
|
||||
WindowsSystemRootablePath::Path(current_path.join(&dir.name));
|
||||
current_dir = dir;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
VfsEntry::File(_) | VfsEntry::Symlink(_) => break,
|
||||
}
|
||||
}
|
||||
if let WindowsSystemRootablePath::Path(path) = ¤t_path {
|
||||
strip_prefix_from_symlinks(
|
||||
&mut current_dir,
|
||||
VirtualSymlinkParts::from_path(path).parts(),
|
||||
);
|
||||
}
|
||||
BuiltVfs {
|
||||
root_path: current_path,
|
||||
case_sensitivity: self.case_sensitivity,
|
||||
entries: current_dir.entries,
|
||||
files: self.files.files,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct FileIterator<'a> {
|
||||
pending_dirs: VecDeque<(WindowsSystemRootablePath, &'a VirtualDirectory)>,
|
||||
current_dir_index: usize,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for FileIterator<'a> {
|
||||
type Item = (PathBuf, &'a VirtualFile);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while !self.pending_dirs.is_empty() {
|
||||
let (dir_path, current_dir) = self.pending_dirs.front()?;
|
||||
if let Some(entry) =
|
||||
current_dir.entries.get_by_index(self.current_dir_index)
|
||||
{
|
||||
self.current_dir_index += 1;
|
||||
match entry {
|
||||
VfsEntry::Dir(virtual_directory) => {
|
||||
self.pending_dirs.push_back((
|
||||
WindowsSystemRootablePath::Path(
|
||||
dir_path.join(&virtual_directory.name),
|
||||
),
|
||||
virtual_directory,
|
||||
));
|
||||
}
|
||||
VfsEntry::File(virtual_file) => {
|
||||
return Some((dir_path.join(&virtual_file.name), virtual_file));
|
||||
}
|
||||
VfsEntry::Symlink(_) => {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.pending_dirs.pop_front();
|
||||
self.current_dir_index = 0;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SymlinkTarget {
|
||||
File(PathBuf),
|
||||
Dir(PathBuf),
|
||||
}
|
||||
|
||||
impl SymlinkTarget {
|
||||
pub fn into_path_buf(self) -> PathBuf {
|
||||
match self {
|
||||
Self::File(path) => path,
|
||||
Self::Dir(path) => path,
|
||||
}
|
||||
}
|
||||
}
|
37
cli/lib/sys.rs
Normal file
37
cli/lib/sys.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_node::ExtNodeSys;
|
||||
use sys_traits::FsCanonicalize;
|
||||
use sys_traits::FsCreateDirAll;
|
||||
use sys_traits::FsMetadata;
|
||||
use sys_traits::FsOpen;
|
||||
use sys_traits::FsRead;
|
||||
use sys_traits::FsReadDir;
|
||||
use sys_traits::FsRemoveFile;
|
||||
use sys_traits::FsRename;
|
||||
use sys_traits::SystemRandom;
|
||||
use sys_traits::ThreadSleep;
|
||||
|
||||
pub trait DenoLibSys:
|
||||
FsCanonicalize
|
||||
+ FsCreateDirAll
|
||||
+ FsReadDir
|
||||
+ FsMetadata
|
||||
+ FsOpen
|
||||
+ FsRemoveFile
|
||||
+ FsRename
|
||||
+ FsRead
|
||||
+ ThreadSleep
|
||||
+ SystemRandom
|
||||
+ ExtNodeSys
|
||||
+ Clone
|
||||
+ Send
|
||||
+ Sync
|
||||
+ std::fmt::Debug
|
||||
+ 'static
|
||||
{
|
||||
}
|
||||
|
||||
// ok, implementation
|
||||
#[allow(clippy::disallowed_types)]
|
||||
impl DenoLibSys for sys_traits::impls::RealSys {}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use ring::digest::Context;
|
||||
use ring::digest::SHA256;
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::hash::Hasher;
|
||||
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue