1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge branch 'main' into sqlite_node_cppgc

Signed-off-by: Divy Srivastava <dj.srivastava23@gmail.com>
This commit is contained in:
Divy Srivastava 2024-12-13 21:07:43 -08:00 committed by GitHub
commit 95763d3f43
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
219 changed files with 4281 additions and 1792 deletions

View file

@ -35,7 +35,7 @@ jobs:
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v2 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v2.x
- name: Publish - name: Publish
env: env:

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 29; const cacheVersion = 30;
const ubuntuX86Runner = "ubuntu-24.04"; const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl"; const ubuntuX86XlRunner = "ubuntu-24.04-xl";
@ -59,6 +59,15 @@ const Runners = {
const prCacheKeyPrefix = const prCacheKeyPrefix =
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
const prCacheKey = `${prCacheKeyPrefix}\${{ github.sha }}`;
const prCachePath = [
// this must match for save and restore (https://github.com/actions/cache/issues/1444)
"./target",
"!./target/*/gn_out",
"!./target/*/gn_root",
"!./target/*/*.zip",
"!./target/*/*.tar.gz",
].join("\n");
// Note that you may need to add more version to the `apt-get remove` line below if you change this // Note that you may need to add more version to the `apt-get remove` line below if you change this
const llvmVersion = 19; const llvmVersion = 19;
@ -196,7 +205,7 @@ const installNodeStep = {
const installDenoStep = { const installDenoStep = {
name: "Install Deno", name: "Install Deno",
uses: "denoland/setup-deno@v2", uses: "denoland/setup-deno@v2",
with: { "deno-version": "v1.x" }, with: { "deno-version": "v2.x" },
}; };
const authenticateWithGoogleCloud = { const authenticateWithGoogleCloud = {
@ -612,7 +621,7 @@ const ci = {
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`, `${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
// We will try to restore from the closest cargo-home we can find // We will try to restore from the closest cargo-home we can find
"restore-keys": "restore-keys":
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}`, `${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-`,
}, },
}, },
{ {
@ -622,13 +631,7 @@ const ci = {
if: if:
"github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')", "github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')",
with: { with: {
path: [ path: prCachePath,
"./target",
"!./target/*/gn_out",
"!./target/*/gn_root",
"!./target/*/*.zip",
"!./target/*/*.tar.gz",
].join("\n"),
key: "never_saved", key: "never_saved",
"restore-keys": prCacheKeyPrefix, "restore-keys": prCacheKeyPrefix,
}, },
@ -1080,14 +1083,8 @@ const ci = {
if: if:
"(matrix.job == 'test' || matrix.job == 'lint') && github.ref == 'refs/heads/main'", "(matrix.job == 'test' || matrix.job == 'lint') && github.ref == 'refs/heads/main'",
with: { with: {
path: [ path: prCachePath,
"./target", key: prCacheKey,
"!./target/*/gn_out",
"!./target/*/*.zip",
"!./target/*/*.sha256sum",
"!./target/*/*.tar.gz",
].join("\n"),
key: prCacheKeyPrefix + "${{ github.sha }}",
}, },
}, },
]), ]),

View file

@ -180,7 +180,7 @@ jobs:
name: Install Deno name: Install Deno
uses: denoland/setup-deno@v2 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v2.x
- name: Install Python - name: Install Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
@ -361,8 +361,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '29-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '30-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '29-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' restore-keys: '30-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@ -375,7 +375,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '29-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '30-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -682,10 +682,10 @@ jobs:
path: |- path: |-
./target ./target
!./target/*/gn_out !./target/*/gn_out
!./target/*/gn_root
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '29-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '30-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04

45
.github/workflows/npm_publish.yml vendored Normal file
View file

@ -0,0 +1,45 @@
name: npm_publish
on:
workflow_dispatch:
inputs:
version:
description: 'Version'
type: string
release:
types: [published]
permissions:
id-token: write
jobs:
build:
name: npm publish
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Configure git
run: |
git config --global core.symlinks true
git config --global fetch.parallel 32
- name: Clone repository
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install Deno
uses: denoland/setup-deno@v2
with:
deno-version: v2.x
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: '22.x'
registry-url: 'https://registry.npmjs.org'
- name: Publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: ./tools/release/npm/build.ts ${{ github.event.inputs.version }} --publish

View file

@ -42,7 +42,7 @@ jobs:
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v2 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v2.x
- name: Install rust-codesign - name: Install rust-codesign
run: |- run: |-

View file

@ -36,7 +36,7 @@ jobs:
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v2 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v2.x
- name: Create Gist URL - name: Create Gist URL
env: env:

View file

@ -41,7 +41,7 @@ jobs:
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v2 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v2.x
- name: Run version bump - name: Run version bump
run: | run: |

76
Cargo.lock generated
View file

@ -658,9 +658,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "1.6.0" version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
[[package]] [[package]]
name = "cache_control" name = "cache_control"
@ -1215,7 +1215,7 @@ dependencies = [
[[package]] [[package]]
name = "deno" name = "deno"
version = "2.1.3" version = "2.1.4"
dependencies = [ dependencies = [
"anstream", "anstream",
"async-trait", "async-trait",
@ -1388,7 +1388,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.175.0" version = "0.176.0"
dependencies = [ dependencies = [
"bencher", "bencher",
"deno_core", "deno_core",
@ -1397,7 +1397,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_broadcast_channel" name = "deno_broadcast_channel"
version = "0.175.0" version = "0.176.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1408,7 +1408,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_cache" name = "deno_cache"
version = "0.113.0" version = "0.114.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1441,7 +1441,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_canvas" name = "deno_canvas"
version = "0.50.0" version = "0.51.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_webgpu", "deno_webgpu",
@ -1476,7 +1476,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_console" name = "deno_console"
version = "0.181.0" version = "0.182.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
] ]
@ -1524,7 +1524,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695"
[[package]] [[package]]
name = "deno_cron" name = "deno_cron"
version = "0.61.0" version = "0.62.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1537,7 +1537,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_crypto" name = "deno_crypto"
version = "0.195.0" version = "0.196.0"
dependencies = [ dependencies = [
"aes", "aes",
"aes-gcm", "aes-gcm",
@ -1626,7 +1626,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_fetch" name = "deno_fetch"
version = "0.205.0" version = "0.206.0"
dependencies = [ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
@ -1661,7 +1661,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_ffi" name = "deno_ffi"
version = "0.168.0" version = "0.169.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
@ -1681,7 +1681,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_fs" name = "deno_fs"
version = "0.91.0" version = "0.92.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base32", "base32",
@ -1734,7 +1734,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_http" name = "deno_http"
version = "0.179.0" version = "0.180.0"
dependencies = [ dependencies = [
"async-compression", "async-compression",
"async-trait", "async-trait",
@ -1773,7 +1773,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_io" name = "deno_io"
version = "0.91.0" version = "0.92.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1794,7 +1794,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_kv" name = "deno_kv"
version = "0.89.0" version = "0.90.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1867,7 +1867,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_napi" name = "deno_napi"
version = "0.112.0" version = "0.113.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
@ -1895,7 +1895,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_net" name = "deno_net"
version = "0.173.0" version = "0.174.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
@ -1912,7 +1912,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_node" name = "deno_node"
version = "0.118.0" version = "0.119.0"
dependencies = [ dependencies = [
"aead-gcm-stream", "aead-gcm-stream",
"aes", "aes",
@ -2025,7 +2025,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_npm_cache" name = "deno_npm_cache"
version = "0.1.0" version = "0.2.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -2096,8 +2096,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_permissions" name = "deno_permissions"
version = "0.41.0" version = "0.42.0"
dependencies = [ dependencies = [
"capacity_builder",
"deno_core", "deno_core",
"deno_path_util", "deno_path_util",
"deno_terminal 0.2.0", "deno_terminal 0.2.0",
@ -2114,7 +2115,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_resolver" name = "deno_resolver"
version = "0.13.0" version = "0.14.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base32", "base32",
@ -2133,7 +2134,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_runtime" name = "deno_runtime"
version = "0.190.0" version = "0.191.0"
dependencies = [ dependencies = [
"color-print", "color-print",
"deno_ast", "deno_ast",
@ -2202,9 +2203,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_semver" name = "deno_semver"
version = "0.6.0" version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4756be7351289726087408984db18b9eb5e0186907673f39f858d119d0162071" checksum = "7d1259270d66a5e6d29bb75c9289656541874f79ae9ff6c9f1c790846d5c07ba"
dependencies = [ dependencies = [
"deno_error", "deno_error",
"monch", "monch",
@ -2234,7 +2235,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_telemetry" name = "deno_telemetry"
version = "0.3.0" version = "0.4.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -2275,7 +2276,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_tls" name = "deno_tls"
version = "0.168.0" version = "0.169.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_native_certs", "deno_native_certs",
@ -2325,7 +2326,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_url" name = "deno_url"
version = "0.181.0" version = "0.182.0"
dependencies = [ dependencies = [
"deno_bench_util", "deno_bench_util",
"deno_console", "deno_console",
@ -2337,7 +2338,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_web" name = "deno_web"
version = "0.212.0" version = "0.213.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base64-simd 0.8.0", "base64-simd 0.8.0",
@ -2359,7 +2360,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webgpu" name = "deno_webgpu"
version = "0.148.0" version = "0.149.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"raw-window-handle", "raw-window-handle",
@ -2372,7 +2373,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webidl" name = "deno_webidl"
version = "0.181.0" version = "0.182.0"
dependencies = [ dependencies = [
"deno_bench_util", "deno_bench_util",
"deno_core", "deno_core",
@ -2380,7 +2381,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_websocket" name = "deno_websocket"
version = "0.186.0" version = "0.187.0"
dependencies = [ dependencies = [
"bytes", "bytes",
"deno_core", "deno_core",
@ -2402,7 +2403,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webstorage" name = "deno_webstorage"
version = "0.176.0" version = "0.177.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_web", "deno_web",
@ -4022,9 +4023,9 @@ dependencies = [
[[package]] [[package]]
name = "hyper-util" name = "hyper-util"
version = "0.1.7" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4"
dependencies = [ dependencies = [
"bytes", "bytes",
"futures-channel", "futures-channel",
@ -4035,7 +4036,6 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
"socket2", "socket2",
"tokio", "tokio",
"tower",
"tower-service", "tower-service",
"tracing", "tracing",
] ]
@ -4917,7 +4917,7 @@ dependencies = [
[[package]] [[package]]
name = "napi_sym" name = "napi_sym"
version = "0.111.0" version = "0.112.0"
dependencies = [ dependencies = [
"quote", "quote",
"serde", "serde",
@ -4972,7 +4972,7 @@ dependencies = [
[[package]] [[package]]
name = "node_resolver" name = "node_resolver"
version = "0.20.0" version = "0.21.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",

View file

@ -50,17 +50,17 @@ repository = "https://github.com/denoland/deno"
deno_ast = { version = "=0.44.0", features = ["transpiling"] } deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.325.0" } deno_core = { version = "0.325.0" }
deno_bench_util = { version = "0.175.0", path = "./bench_util" } deno_bench_util = { version = "0.176.0", path = "./bench_util" }
deno_config = { version = "=0.39.3", features = ["workspace", "sync"] } deno_config = { version = "=0.39.3", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.2" deno_lockfile = "=0.23.2"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] } deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.26.0" deno_npm = "=0.26.0"
deno_path_util = "=0.2.1" deno_path_util = "=0.2.1"
deno_permissions = { version = "0.41.0", path = "./runtime/permissions" } deno_permissions = { version = "0.42.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.190.0", path = "./runtime" } deno_runtime = { version = "0.191.0", path = "./runtime" }
deno_semver = "=0.6.0" deno_semver = "=0.6.1"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.111.0", path = "./ext/napi/sym" } napi_sym = { version = "0.112.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.4" denokv_proto = "0.8.4"
@ -69,34 +69,34 @@ denokv_remote = "0.8.4"
denokv_sqlite = { default-features = false, version = "0.8.4" } denokv_sqlite = { default-features = false, version = "0.8.4" }
# exts # exts
deno_broadcast_channel = { version = "0.175.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.176.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.113.0", path = "./ext/cache" } deno_cache = { version = "0.114.0", path = "./ext/cache" }
deno_canvas = { version = "0.50.0", path = "./ext/canvas" } deno_canvas = { version = "0.51.0", path = "./ext/canvas" }
deno_console = { version = "0.181.0", path = "./ext/console" } deno_console = { version = "0.182.0", path = "./ext/console" }
deno_cron = { version = "0.61.0", path = "./ext/cron" } deno_cron = { version = "0.62.0", path = "./ext/cron" }
deno_crypto = { version = "0.195.0", path = "./ext/crypto" } deno_crypto = { version = "0.196.0", path = "./ext/crypto" }
deno_fetch = { version = "0.205.0", path = "./ext/fetch" } deno_fetch = { version = "0.206.0", path = "./ext/fetch" }
deno_ffi = { version = "0.168.0", path = "./ext/ffi" } deno_ffi = { version = "0.169.0", path = "./ext/ffi" }
deno_fs = { version = "0.91.0", path = "./ext/fs" } deno_fs = { version = "0.92.0", path = "./ext/fs" }
deno_http = { version = "0.179.0", path = "./ext/http" } deno_http = { version = "0.180.0", path = "./ext/http" }
deno_io = { version = "0.91.0", path = "./ext/io" } deno_io = { version = "0.92.0", path = "./ext/io" }
deno_kv = { version = "0.89.0", path = "./ext/kv" } deno_kv = { version = "0.90.0", path = "./ext/kv" }
deno_napi = { version = "0.112.0", path = "./ext/napi" } deno_napi = { version = "0.113.0", path = "./ext/napi" }
deno_net = { version = "0.173.0", path = "./ext/net" } deno_net = { version = "0.174.0", path = "./ext/net" }
deno_node = { version = "0.118.0", path = "./ext/node" } deno_node = { version = "0.119.0", path = "./ext/node" }
deno_telemetry = { version = "0.3.0", path = "./ext/telemetry" } deno_telemetry = { version = "0.4.0", path = "./ext/telemetry" }
deno_tls = { version = "0.168.0", path = "./ext/tls" } deno_tls = { version = "0.169.0", path = "./ext/tls" }
deno_url = { version = "0.181.0", path = "./ext/url" } deno_url = { version = "0.182.0", path = "./ext/url" }
deno_web = { version = "0.212.0", path = "./ext/web" } deno_web = { version = "0.213.0", path = "./ext/web" }
deno_webgpu = { version = "0.148.0", path = "./ext/webgpu" } deno_webgpu = { version = "0.149.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.181.0", path = "./ext/webidl" } deno_webidl = { version = "0.182.0", path = "./ext/webidl" }
deno_websocket = { version = "0.186.0", path = "./ext/websocket" } deno_websocket = { version = "0.187.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.176.0", path = "./ext/webstorage" } deno_webstorage = { version = "0.177.0", path = "./ext/webstorage" }
# resolvers # resolvers
deno_npm_cache = { version = "0.1.0", path = "./resolvers/npm_cache" } deno_npm_cache = { version = "0.2.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.13.0", path = "./resolvers/deno" } deno_resolver = { version = "0.14.0", path = "./resolvers/deno" }
node_resolver = { version = "0.20.0", path = "./resolvers/node" } node_resolver = { version = "0.21.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
@ -108,6 +108,7 @@ boxed_error = "0.2.2"
brotli = "6.0.0" brotli = "6.0.0"
bytes = "1.4.0" bytes = "1.4.0"
cache_control = "=0.2.0" cache_control = "=0.2.0"
capacity_builder = "0.1.0"
cbc = { version = "=0.1.2", features = ["alloc"] } cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS. # Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now() # Instead use util::time::utc_now()
@ -141,7 +142,7 @@ http_v02 = { package = "http", version = "0.2.9" }
httparse = "1.8.0" httparse = "1.8.0"
hyper = { version = "1.4.1", features = ["full"] } hyper = { version = "1.4.1", features = ["full"] }
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] } hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] } hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] } indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3" ipnet = "2.3"

View file

@ -6,6 +6,26 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.1.4 / 2024.12.11
- feat(unstable): support caching npm dependencies only as they're needed
(#27300)
- fix(compile): correct read length for transpiled typescript files (#27301)
- fix(ext/node): accept file descriptor in fs.readFile(Sync) (#27252)
- fix(ext/node): handle Float16Array in node:v8 module (#27285)
- fix(lint): do not error providing --allow-import (#27321)
- fix(node): update list of builtin node modules, add missing export to
_http_common (#27294)
- fix(outdated): error when there are no config files (#27306)
- fix(outdated): respect --quiet flag for hints (#27317)
- fix(outdated): show a suggestion for updating (#27304)
- fix(task): do not always kill child on ctrl+c on windows (#27269)
- fix(unstable): don't unwrap optional state in otel (#27292)
- fix: do not error when subpath has an @ symbol (#27290)
- fix: do not panic when fetching invalid file url on Windows (#27259)
- fix: replace the @deno-types with @ts-types (#27310)
- perf(compile): improve FileBackedVfsFile (#27299)
### 2.1.3 / 2024.12.05 ### 2.1.3 / 2024.12.05
- feat(unstable): add metrics to otel (#27143) - feat(unstable): add metrics to otel (#27143)

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.175.0" version = "0.176.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno" name = "deno"
version = "2.1.3" version = "2.1.4"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true

View file

@ -64,6 +64,15 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
} }
} }
pub fn import_map_deps(
import_map: &serde_json::Value,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(import_map.get("imports"))
.into_iter()
.chain(scope_values(import_map.get("scopes")));
values_to_set(values)
}
pub fn deno_json_deps( pub fn deno_json_deps(
config: &deno_config::deno_json::ConfigFile, config: &deno_config::deno_json::ConfigFile,
) -> HashSet<JsrDepPackageReq> { ) -> HashSet<JsrDepPackageReq> {

View file

@ -37,6 +37,7 @@ use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsOptions; use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_permissions::SysDescriptor; use deno_runtime::deno_permissions::SysDescriptor;
use deno_telemetry::OtelConfig; use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig;
use log::debug; use log::debug;
use log::Level; use log::Level;
use serde::Deserialize; use serde::Deserialize;
@ -245,7 +246,7 @@ pub struct InstallFlagsGlobal {
} }
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub enum InstallKind { pub enum InstallFlags {
Local(InstallFlagsLocal), Local(InstallFlagsLocal),
Global(InstallFlagsGlobal), Global(InstallFlagsGlobal),
} }
@ -257,11 +258,6 @@ pub enum InstallFlagsLocal {
Entrypoints(Vec<String>), Entrypoints(Vec<String>),
} }
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct InstallFlags {
pub kind: InstallKind,
}
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub struct JSONReferenceFlags { pub struct JSONReferenceFlags {
pub json: deno_core::serde_json::Value, pub json: deno_core::serde_json::Value,
@ -600,6 +596,7 @@ pub struct UnstableConfig {
pub bare_node_builtins: bool, pub bare_node_builtins: bool,
pub detect_cjs: bool, pub detect_cjs: bool,
pub sloppy_imports: bool, pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron pub features: Vec<String>, // --unstabe-kv --unstable-cron
} }
@ -990,21 +987,41 @@ impl Flags {
args args
} }
pub fn otel_config(&self) -> Option<OtelConfig> { pub fn otel_config(&self) -> OtelConfig {
if self let has_unstable_flag = self
.unstable_config .unstable_config
.features .features
.contains(&String::from("otel")) .contains(&String::from("otel"));
{
Some(OtelConfig { let otel_var = |name| match std::env::var(name) {
runtime_name: Cow::Borrowed("deno"), Ok(s) if s.to_lowercase() == "true" => Some(true),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno), Ok(s) if s.to_lowercase() == "false" => Some(false),
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC") _ => None,
.is_ok(), };
..Default::default()
}) let disabled =
} else { !has_unstable_flag || otel_var("OTEL_SDK_DISABLED").unwrap_or(false);
None let default = !disabled && otel_var("OTEL_DENO").unwrap_or(false);
OtelConfig {
tracing_enabled: !disabled
&& otel_var("OTEL_DENO_TRACING").unwrap_or(default),
console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() {
Ok(_) if disabled => OtelConsoleConfig::Ignore,
Ok("ignore") => OtelConsoleConfig::Ignore,
Ok("capture") => OtelConsoleConfig::Capture,
Ok("replace") => OtelConsoleConfig::Replace,
_ => {
if default {
OtelConsoleConfig::Capture
} else {
OtelConsoleConfig::Ignore
}
}
},
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
.as_deref()
== Ok("1"),
} }
} }
@ -2904,6 +2921,7 @@ To ignore linting on an entire file, you can add an ignore comment at the top of
.arg(watch_arg(false)) .arg(watch_arg(false))
.arg(watch_exclude_arg()) .arg(watch_exclude_arg())
.arg(no_clear_screen_arg()) .arg(no_clear_screen_arg())
.arg(allow_import_arg())
}) })
} }
@ -4406,6 +4424,16 @@ impl CommandExt for Command {
}) })
.help_heading(UNSTABLE_HEADING) .help_heading(UNSTABLE_HEADING)
.display_order(next_display_order()) .display_order(next_display_order())
).arg(
Arg::new("unstable-npm-lazy-caching")
.long("unstable-npm-lazy-caching")
.help("Enable unstable lazy caching of npm dependencies, downloading them only as needed (disabled: all npm packages in package.json are installed on startup; enabled: only npm packages that are actually referenced in an import are installed")
.env("DENO_UNSTABLE_NPM_LAZY_CACHING")
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
.help_heading(UNSTABLE_HEADING)
.display_order(next_display_order()),
); );
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() { for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() {
@ -4919,15 +4947,14 @@ fn install_parse(
let module_url = cmd_values.next().unwrap(); let module_url = cmd_values.next().unwrap();
let args = cmd_values.collect(); let args = cmd_values.collect();
flags.subcommand = DenoSubcommand::Install(InstallFlags { flags.subcommand =
kind: InstallKind::Global(InstallFlagsGlobal { DenoSubcommand::Install(InstallFlags::Global(InstallFlagsGlobal {
name, name,
module_url, module_url,
args, args,
root, root,
force, force,
}), }));
});
return Ok(()); return Ok(());
} }
@ -4936,22 +4963,19 @@ fn install_parse(
allow_scripts_arg_parse(flags, matches)?; allow_scripts_arg_parse(flags, matches)?;
if matches.get_flag("entrypoint") { if matches.get_flag("entrypoint") {
let entrypoints = matches.remove_many::<String>("cmd").unwrap_or_default(); let entrypoints = matches.remove_many::<String>("cmd").unwrap_or_default();
flags.subcommand = DenoSubcommand::Install(InstallFlags { flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
kind: InstallKind::Local(InstallFlagsLocal::Entrypoints( InstallFlagsLocal::Entrypoints(entrypoints.collect()),
entrypoints.collect(), ));
)),
});
} else if let Some(add_files) = matches } else if let Some(add_files) = matches
.remove_many("cmd") .remove_many("cmd")
.map(|packages| add_parse_inner(matches, Some(packages))) .map(|packages| add_parse_inner(matches, Some(packages)))
{ {
flags.subcommand = DenoSubcommand::Install(InstallFlags { flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
kind: InstallKind::Local(InstallFlagsLocal::Add(add_files)), InstallFlagsLocal::Add(add_files),
}) ))
} else { } else {
flags.subcommand = DenoSubcommand::Install(InstallFlags { flags.subcommand =
kind: InstallKind::Local(InstallFlagsLocal::TopLevel), DenoSubcommand::Install(InstallFlags::Local(InstallFlagsLocal::TopLevel));
});
} }
Ok(()) Ok(())
} }
@ -5083,6 +5107,7 @@ fn lint_parse(
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly); unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly);
ext_arg_parse(flags, matches); ext_arg_parse(flags, matches);
config_args_parse(flags, matches); config_args_parse(flags, matches);
allow_import_parse(flags, matches);
let files = match matches.remove_many::<String>("files") { let files = match matches.remove_many::<String>("files") {
Some(f) => f.collect(), Some(f) => f.collect(),
@ -5996,6 +6021,8 @@ fn unstable_args_parse(
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs"); flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
flags.unstable_config.sloppy_imports = flags.unstable_config.sloppy_imports =
matches.get_flag("unstable-sloppy-imports"); matches.get_flag("unstable-sloppy-imports");
flags.unstable_config.npm_lazy_caching =
matches.get_flag("unstable-npm-lazy-caching");
if matches!(cfg, UnstableArgsConfig::ResolutionAndRuntime) { if matches!(cfg, UnstableArgsConfig::ResolutionAndRuntime) {
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS { for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
@ -7141,6 +7168,7 @@ mod tests {
let r = flags_from_vec(svec![ let r = flags_from_vec(svec![
"deno", "deno",
"lint", "lint",
"--allow-import",
"--watch", "--watch",
"script_1.ts", "script_1.ts",
"script_2.ts" "script_2.ts"
@ -7162,6 +7190,10 @@ mod tests {
compact: false, compact: false,
watch: Some(Default::default()), watch: Some(Default::default()),
}), }),
permissions: PermissionFlags {
allow_import: Some(vec![]),
..Default::default()
},
..Flags::default() ..Flags::default()
} }
); );
@ -8599,15 +8631,15 @@ mod tests {
assert_eq!( assert_eq!(
r.unwrap(), r.unwrap(),
Flags { Flags {
subcommand: DenoSubcommand::Install(InstallFlags { subcommand: DenoSubcommand::Install(InstallFlags::Global(
kind: InstallKind::Global(InstallFlagsGlobal { InstallFlagsGlobal {
name: None, name: None,
module_url: "jsr:@std/http/file-server".to_string(), module_url: "jsr:@std/http/file-server".to_string(),
args: vec![], args: vec![],
root: None, root: None,
force: false, force: false,
}), }
}), ),),
..Flags::default() ..Flags::default()
} }
); );
@ -8621,15 +8653,15 @@ mod tests {
assert_eq!( assert_eq!(
r.unwrap(), r.unwrap(),
Flags { Flags {
subcommand: DenoSubcommand::Install(InstallFlags { subcommand: DenoSubcommand::Install(InstallFlags::Global(
kind: InstallKind::Global(InstallFlagsGlobal { InstallFlagsGlobal {
name: None, name: None,
module_url: "jsr:@std/http/file-server".to_string(), module_url: "jsr:@std/http/file-server".to_string(),
args: vec![], args: vec![],
root: None, root: None,
force: false, force: false,
}), }
}), ),),
..Flags::default() ..Flags::default()
} }
); );
@ -8642,15 +8674,15 @@ mod tests {
assert_eq!( assert_eq!(
r.unwrap(), r.unwrap(),
Flags { Flags {
subcommand: DenoSubcommand::Install(InstallFlags { subcommand: DenoSubcommand::Install(InstallFlags::Global(
kind: InstallKind::Global(InstallFlagsGlobal { InstallFlagsGlobal {
name: Some("file_server".to_string()), name: Some("file_server".to_string()),
module_url: "jsr:@std/http/file-server".to_string(), module_url: "jsr:@std/http/file-server".to_string(),
args: svec!["foo", "bar"], args: svec!["foo", "bar"],
root: Some("/foo".to_string()), root: Some("/foo".to_string()),
force: true, force: true,
}), }
}), ),),
import_map_path: Some("import_map.json".to_string()), import_map_path: Some("import_map.json".to_string()),
no_remote: true, no_remote: true,
config_flag: ConfigFlag::Path("tsconfig.json".to_owned()), config_flag: ConfigFlag::Path("tsconfig.json".to_owned()),
@ -11204,9 +11236,9 @@ mod tests {
..Flags::default() ..Flags::default()
}, },
"install" => Flags { "install" => Flags {
subcommand: DenoSubcommand::Install(InstallFlags { subcommand: DenoSubcommand::Install(InstallFlags::Local(
kind: InstallKind::Local(InstallFlagsLocal::Add(flags)), InstallFlagsLocal::Add(flags),
}), )),
..Flags::default() ..Flags::default()
}, },
_ => unreachable!(), _ => unreachable!(),

View file

@ -9,18 +9,19 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard; use deno_core::parking_lot::MutexGuard;
use deno_core::serde_json;
use deno_lockfile::WorkspaceMemberConfig; use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use crate::args::deno_json::import_map_deps;
use crate::cache; use crate::cache;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::Flags; use crate::Flags;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::InstallFlags; use crate::args::InstallFlags;
use crate::args::InstallKind;
use deno_lockfile::Lockfile; use deno_lockfile::Lockfile;
@ -102,6 +103,7 @@ impl CliLockfile {
pub fn discover( pub fn discover(
flags: &Flags, flags: &Flags,
workspace: &Workspace, workspace: &Workspace,
maybe_external_import_map: Option<&serde_json::Value>,
) -> Result<Option<CliLockfile>, AnyError> { ) -> Result<Option<CliLockfile>, AnyError> {
fn pkg_json_deps( fn pkg_json_deps(
maybe_pkg_json: Option<&PackageJson>, maybe_pkg_json: Option<&PackageJson>,
@ -136,10 +138,8 @@ impl CliLockfile {
if flags.no_lock if flags.no_lock
|| matches!( || matches!(
flags.subcommand, flags.subcommand,
DenoSubcommand::Install(InstallFlags { DenoSubcommand::Install(InstallFlags::Global(..))
kind: InstallKind::Global(..), | DenoSubcommand::Uninstall(_)
..
}) | DenoSubcommand::Uninstall(_)
) )
{ {
return Ok(None); return Ok(None);
@ -174,7 +174,11 @@ impl CliLockfile {
let config = deno_lockfile::WorkspaceConfig { let config = deno_lockfile::WorkspaceConfig {
root: WorkspaceMemberConfig { root: WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()), package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
dependencies: deno_json_deps(root_folder.deno_json.as_deref()), dependencies: if let Some(map) = maybe_external_import_map {
import_map_deps(map)
} else {
deno_json_deps(root_folder.deno_json.as_deref())
},
}, },
members: workspace members: workspace
.config_folders() .config_folders()

View file

@ -31,6 +31,7 @@ use deno_npm_cache::NpmCacheSetting;
use deno_path_util::normalize_path; use deno_path_util::normalize_path;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_telemetry::OtelConfig; use deno_telemetry::OtelConfig;
use deno_telemetry::OtelRuntimeConfig;
use import_map::resolve_import_map_value_from_specifier; use import_map::resolve_import_map_value_from_specifier;
pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::BenchConfig;
@ -807,6 +808,7 @@ pub struct CliOptions {
maybe_node_modules_folder: Option<PathBuf>, maybe_node_modules_folder: Option<PathBuf>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
overrides: CliOptionOverrides, overrides: CliOptionOverrides,
pub start_dir: Arc<WorkspaceDirectory>, pub start_dir: Arc<WorkspaceDirectory>,
pub deno_dir_provider: Arc<DenoDirProvider>, pub deno_dir_provider: Arc<DenoDirProvider>,
@ -820,6 +822,7 @@ impl CliOptions {
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
start_dir: Arc<WorkspaceDirectory>, start_dir: Arc<WorkspaceDirectory>,
force_global_cache: bool, force_global_cache: bool,
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
if let Some(insecure_allowlist) = if let Some(insecure_allowlist) =
flags.unsafely_ignore_certificate_errors.as_ref() flags.unsafely_ignore_certificate_errors.as_ref()
@ -857,6 +860,7 @@ impl CliOptions {
maybe_node_modules_folder, maybe_node_modules_folder,
overrides: Default::default(), overrides: Default::default(),
main_module_cell: std::sync::OnceLock::new(), main_module_cell: std::sync::OnceLock::new(),
maybe_external_import_map,
start_dir, start_dir,
deno_dir_provider, deno_dir_provider,
}) })
@ -932,7 +936,33 @@ impl CliOptions {
let (npmrc, _) = discover_npmrc_from_workspace(&start_dir.workspace)?; let (npmrc, _) = discover_npmrc_from_workspace(&start_dir.workspace)?;
let maybe_lock_file = CliLockfile::discover(&flags, &start_dir.workspace)?; fn load_external_import_map(
deno_json: &ConfigFile,
) -> Result<Option<(PathBuf, serde_json::Value)>, AnyError> {
if !deno_json.is_an_import_map() {
if let Some(path) = deno_json.to_import_map_path()? {
let contents = std::fs::read_to_string(&path).with_context(|| {
format!("Unable to read import map at '{}'", path.display())
})?;
let map = serde_json::from_str(&contents)?;
return Ok(Some((path, map)));
}
}
Ok(None)
}
let external_import_map =
if let Some(deno_json) = start_dir.workspace.root_deno_json() {
load_external_import_map(deno_json)?
} else {
None
};
let maybe_lock_file = CliLockfile::discover(
&flags,
&start_dir.workspace,
external_import_map.as_ref().map(|(_, v)| v),
)?;
log::debug!("Finished config loading."); log::debug!("Finished config loading.");
@ -943,6 +973,7 @@ impl CliOptions {
npmrc, npmrc,
Arc::new(start_dir), Arc::new(start_dir),
false, false,
external_import_map,
) )
} }
@ -970,9 +1001,7 @@ impl CliOptions {
match self.sub_command() { match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Cache(_) => GraphKind::All,
DenoSubcommand::Check(_) => GraphKind::TypesOnly, DenoSubcommand::Check(_) => GraphKind::TypesOnly,
DenoSubcommand::Install(InstallFlags { DenoSubcommand::Install(InstallFlags::Local(_)) => GraphKind::All,
kind: InstallKind::Local(_),
}) => GraphKind::All,
_ => self.type_check_mode().as_graph_kind(), _ => self.type_check_mode().as_graph_kind(),
} }
} }
@ -1065,7 +1094,7 @@ impl CliOptions {
file_fetcher: &FileFetcher, file_fetcher: &FileFetcher,
pkg_json_dep_resolution: PackageJsonDepResolution, pkg_json_dep_resolution: PackageJsonDepResolution,
) -> Result<WorkspaceResolver, AnyError> { ) -> Result<WorkspaceResolver, AnyError> {
let overrode_no_import_map = self let overrode_no_import_map: bool = self
.overrides .overrides
.import_map_specifier .import_map_specifier
.as_ref() .as_ref()
@ -1093,7 +1122,19 @@ impl CliOptions {
value, value,
}) })
} }
None => None, None => {
if let Some((path, import_map)) =
self.maybe_external_import_map.as_ref()
{
let path_url = deno_path_util::url_from_file_path(path)?;
Some(deno_config::workspace::SpecifiedImportMap {
base_url: path_url,
value: import_map.clone(),
})
} else {
None
}
}
} }
}; };
Ok(self.workspace().create_resolver( Ok(self.workspace().create_resolver(
@ -1132,7 +1173,7 @@ impl CliOptions {
} }
} }
pub fn otel_config(&self) -> Option<OtelConfig> { pub fn otel_config(&self) -> OtelConfig {
self.flags.otel_config() self.flags.otel_config()
} }
@ -1549,11 +1590,11 @@ impl CliOptions {
DenoSubcommand::Check(check_flags) => { DenoSubcommand::Check(check_flags) => {
Some(files_to_urls(&check_flags.files)) Some(files_to_urls(&check_flags.files))
} }
DenoSubcommand::Install(InstallFlags { DenoSubcommand::Install(InstallFlags::Global(flags)) => {
kind: InstallKind::Global(flags), Url::parse(&flags.module_url)
}) => Url::parse(&flags.module_url) .ok()
.ok() .map(|url| vec![Cow::Owned(url)])
.map(|url| vec![Cow::Owned(url)]), }
DenoSubcommand::Doc(DocFlags { DenoSubcommand::Doc(DocFlags {
source_files: DocSourceFileFlag::Paths(paths), source_files: DocSourceFileFlag::Paths(paths),
.. ..
@ -1689,6 +1730,7 @@ impl CliOptions {
"detect-cjs", "detect-cjs",
"fmt-component", "fmt-component",
"fmt-sql", "fmt-sql",
"lazy-npm-caching",
]) ])
.collect(); .collect();
@ -1767,6 +1809,19 @@ impl CliOptions {
), ),
} }
} }
pub fn unstable_npm_lazy_caching(&self) -> bool {
self.flags.unstable_config.npm_lazy_caching
|| self.workspace().has_unstable("npm-lazy-caching")
}
pub fn default_npm_caching_strategy(&self) -> NpmCachingStrategy {
if self.flags.unstable_config.npm_lazy_caching {
NpmCachingStrategy::Lazy
} else {
NpmCachingStrategy::Eager
}
}
} }
/// Resolves the path to use for a local node_modules folder. /// Resolves the path to use for a local node_modules folder.
@ -1981,6 +2036,20 @@ fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
} }
} }
#[derive(Debug, Clone, Copy)]
pub enum NpmCachingStrategy {
Eager,
Lazy,
Manual,
}
pub(crate) fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;

View file

@ -984,6 +984,7 @@ impl CliFactory {
cli_options.sub_command().clone(), cli_options.sub_command().clone(),
self.create_cli_main_worker_options()?, self.create_cli_main_worker_options()?,
self.cli_options()?.otel_config(), self.cli_options()?.otel_config(),
self.cli_options()?.default_npm_caching_strategy(),
)) ))
} }

View file

@ -4,6 +4,7 @@ use crate::args::config_to_deno_graph_workspace_member;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::CliOptions; use crate::args::CliOptions;
pub use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::cache; use crate::cache;
use crate::cache::FetchCacher; use crate::cache::FetchCacher;
@ -218,6 +219,7 @@ pub struct CreateGraphOptions<'a> {
pub is_dynamic: bool, pub is_dynamic: bool,
/// Specify `None` to use the default CLI loader. /// Specify `None` to use the default CLI loader.
pub loader: Option<&'a mut dyn Loader>, pub loader: Option<&'a mut dyn Loader>,
pub npm_caching: NpmCachingStrategy,
} }
pub struct ModuleGraphCreator { pub struct ModuleGraphCreator {
@ -246,10 +248,11 @@ impl ModuleGraphCreator {
&self, &self,
graph_kind: GraphKind, graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>, roots: Vec<ModuleSpecifier>,
npm_caching: NpmCachingStrategy,
) -> Result<deno_graph::ModuleGraph, AnyError> { ) -> Result<deno_graph::ModuleGraph, AnyError> {
let mut cache = self.module_graph_builder.create_graph_loader(); let mut cache = self.module_graph_builder.create_graph_loader();
self self
.create_graph_with_loader(graph_kind, roots, &mut cache) .create_graph_with_loader(graph_kind, roots, &mut cache, npm_caching)
.await .await
} }
@ -258,6 +261,7 @@ impl ModuleGraphCreator {
graph_kind: GraphKind, graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>, roots: Vec<ModuleSpecifier>,
loader: &mut dyn Loader, loader: &mut dyn Loader,
npm_caching: NpmCachingStrategy,
) -> Result<ModuleGraph, AnyError> { ) -> Result<ModuleGraph, AnyError> {
self self
.create_graph_with_options(CreateGraphOptions { .create_graph_with_options(CreateGraphOptions {
@ -265,6 +269,7 @@ impl ModuleGraphCreator {
graph_kind, graph_kind,
roots, roots,
loader: Some(loader), loader: Some(loader),
npm_caching,
}) })
.await .await
} }
@ -317,6 +322,7 @@ impl ModuleGraphCreator {
graph_kind: deno_graph::GraphKind::All, graph_kind: deno_graph::GraphKind::All,
roots, roots,
loader: Some(&mut publish_loader), loader: Some(&mut publish_loader),
npm_caching: self.options.default_npm_caching_strategy(),
}) })
.await?; .await?;
self.graph_valid(&graph)?; self.graph_valid(&graph)?;
@ -376,6 +382,7 @@ impl ModuleGraphCreator {
graph_kind, graph_kind,
roots, roots,
loader: None, loader: None,
npm_caching: self.options.default_npm_caching_strategy(),
}) })
.await?; .await?;
@ -565,7 +572,8 @@ impl ModuleGraphBuilder {
}; };
let cli_resolver = &self.resolver; let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?; let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let graph_npm_resolver =
cli_resolver.create_graph_npm_resolver(options.npm_caching);
let maybe_file_watcher_reporter = self let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter .maybe_file_watcher_reporter
.as_ref() .as_ref()
@ -592,6 +600,7 @@ impl ModuleGraphBuilder {
resolver: Some(&graph_resolver), resolver: Some(&graph_resolver),
locker: locker.as_mut().map(|l| l as _), locker: locker.as_mut().map(|l| l as _),
}, },
options.npm_caching,
) )
.await .await
} }
@ -602,6 +611,7 @@ impl ModuleGraphBuilder {
roots: Vec<ModuleSpecifier>, roots: Vec<ModuleSpecifier>,
loader: &'a mut dyn deno_graph::source::Loader, loader: &'a mut dyn deno_graph::source::Loader,
options: deno_graph::BuildOptions<'a>, options: deno_graph::BuildOptions<'a>,
npm_caching: NpmCachingStrategy,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory // opted into using a node_modules directory
@ -612,7 +622,13 @@ impl ModuleGraphBuilder {
.unwrap_or(false) .unwrap_or(false)
{ {
if let Some(npm_resolver) = self.npm_resolver.as_managed() { if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?; let already_done =
npm_resolver.ensure_top_level_package_json_install().await?;
if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) {
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
} }
} }
@ -701,7 +717,9 @@ impl ModuleGraphBuilder {
let parser = self.parsed_source_cache.as_capturing_parser(); let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver; let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?; let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(
self.cli_options.default_npm_caching_strategy(),
);
graph.build_fast_check_type_graph( graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions { deno_graph::BuildFastCheckTypeGraphOptions {

View file

@ -1387,7 +1387,7 @@ impl CodeActionCollection {
character: import_start.column_index as u32, character: import_start.column_index as u32,
}; };
let new_text = format!( let new_text = format!(
"{}// @deno-types=\"{}\"\n", "{}// @ts-types=\"{}\"\n",
if position.character == 0 { "" } else { "\n" }, if position.character == 0 { "" } else { "\n" },
&types_specifier_text &types_specifier_text
); );
@ -1400,7 +1400,7 @@ impl CodeActionCollection {
}; };
Some(lsp::CodeAction { Some(lsp::CodeAction {
title: format!( title: format!(
"Add @deno-types directive for \"{}\"", "Add @ts-types directive for \"{}\"",
&types_specifier_text &types_specifier_text
), ),
kind: Some(lsp::CodeActionKind::QUICKFIX), kind: Some(lsp::CodeActionKind::QUICKFIX),

View file

@ -743,13 +743,16 @@ fn get_node_completions(
} }
let items = SUPPORTED_BUILTIN_NODE_MODULES let items = SUPPORTED_BUILTIN_NODE_MODULES
.iter() .iter()
.map(|name| { .filter_map(|name| {
if name.starts_with('_') {
return None;
}
let specifier = format!("node:{}", name); let specifier = format!("node:{}", name);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range, range: *range,
new_text: specifier.clone(), new_text: specifier.clone(),
})); }));
lsp::CompletionItem { Some(lsp::CompletionItem {
label: specifier, label: specifier,
kind: Some(lsp::CompletionItemKind::FILE), kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(node)".to_string()), detail: Some("(node)".to_string()),
@ -758,7 +761,7 @@ fn get_node_completions(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(), IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
), ),
..Default::default() ..Default::default()
} })
}) })
.collect(); .collect();
Some(CompletionList { Some(CompletionList {

View file

@ -1355,7 +1355,7 @@ fn diagnose_resolution(
} }
// don't bother warning about sloppy import redirects from .js to .d.ts // don't bother warning about sloppy import redirects from .js to .d.ts
// because explaining how to fix this via a diagnostic involves using // because explaining how to fix this via a diagnostic involves using
// @deno-types and that's a bit complicated to explain // @ts-types and that's a bit complicated to explain
let is_sloppy_import_dts_redirect = doc_specifier.scheme() == "file" let is_sloppy_import_dts_redirect = doc_specifier.scheme() == "file"
&& doc.media_type().is_declaration() && doc.media_type().is_declaration()
&& !MediaType::from_specifier(specifier).is_declaration(); && !MediaType::from_specifier(specifier).is_declaration();
@ -1523,7 +1523,7 @@ fn diagnose_dependency(
.iter() .iter()
.map(|i| documents::to_lsp_range(&i.specifier_range)) .map(|i| documents::to_lsp_range(&i.specifier_range))
.collect(); .collect();
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has // TODO(nayeemrmn): This is a crude way of detecting `@ts-types` which has
// a different specifier and therefore needs a separate call to // a different specifier and therefore needs a separate call to
// `diagnose_resolution()`. It would be much cleaner if that were modelled as // `diagnose_resolution()`. It would be much cleaner if that were modelled as
// a separate dependency: https://github.com/denoland/deno_graph/issues/247. // a separate dependency: https://github.com/denoland/deno_graph/issues/247.
@ -1540,7 +1540,7 @@ fn diagnose_dependency(
snapshot, snapshot,
dependency_key, dependency_key,
if dependency.maybe_code.is_none() if dependency.maybe_code.is_none()
// If not @deno-types, diagnose the types if the code errored because // If not @ts-types, diagnose the types if the code errored because
// it's likely resolving into the node_modules folder, which might be // it's likely resolving into the node_modules folder, which might be
// erroring correctly due to resolution only being for bundlers. Let this // erroring correctly due to resolution only being for bundlers. Let this
// fail at runtime if necessary, but don't bother erroring in the editor // fail at runtime if necessary, but don't bother erroring in the editor
@ -1951,7 +1951,7 @@ let c: number = "a";
&[( &[(
"a.ts", "a.ts",
r#" r#"
// @deno-types="bad.d.ts" // @ts-types="bad.d.ts"
import "bad.js"; import "bad.js";
import "bad.js"; import "bad.js";
"#, "#,
@ -2005,11 +2005,11 @@ let c: number = "a";
"range": { "range": {
"start": { "start": {
"line": 1, "line": 1,
"character": 23 "character": 21
}, },
"end": { "end": {
"line": 1, "line": 1,
"character": 33 "character": 31
} }
}, },
"severity": 1, "severity": 1,

View file

@ -65,6 +65,12 @@ pub enum LanguageId {
Html, Html,
Css, Css,
Yaml, Yaml,
Sql,
Svelte,
Vue,
Astro,
Vento,
Nunjucks,
Unknown, Unknown,
} }
@ -81,6 +87,12 @@ impl LanguageId {
LanguageId::Html => Some("html"), LanguageId::Html => Some("html"),
LanguageId::Css => Some("css"), LanguageId::Css => Some("css"),
LanguageId::Yaml => Some("yaml"), LanguageId::Yaml => Some("yaml"),
LanguageId::Sql => Some("sql"),
LanguageId::Svelte => Some("svelte"),
LanguageId::Vue => Some("vue"),
LanguageId::Astro => Some("astro"),
LanguageId::Vento => Some("vto"),
LanguageId::Nunjucks => Some("njk"),
LanguageId::Unknown => None, LanguageId::Unknown => None,
} }
} }
@ -96,6 +108,12 @@ impl LanguageId {
LanguageId::Html => Some("text/html"), LanguageId::Html => Some("text/html"),
LanguageId::Css => Some("text/css"), LanguageId::Css => Some("text/css"),
LanguageId::Yaml => Some("application/yaml"), LanguageId::Yaml => Some("application/yaml"),
LanguageId::Sql => None,
LanguageId::Svelte => None,
LanguageId::Vue => None,
LanguageId::Astro => None,
LanguageId::Vento => None,
LanguageId::Nunjucks => None,
LanguageId::Unknown => None, LanguageId::Unknown => None,
} }
} }
@ -123,6 +141,12 @@ impl FromStr for LanguageId {
"html" => Ok(Self::Html), "html" => Ok(Self::Html),
"css" => Ok(Self::Css), "css" => Ok(Self::Css),
"yaml" => Ok(Self::Yaml), "yaml" => Ok(Self::Yaml),
"sql" => Ok(Self::Sql),
"svelte" => Ok(Self::Svelte),
"vue" => Ok(Self::Vue),
"astro" => Ok(Self::Astro),
"vento" => Ok(Self::Vento),
"nunjucks" => Ok(Self::Nunjucks),
_ => Ok(Self::Unknown), _ => Ok(Self::Unknown),
} }
} }

View file

@ -270,7 +270,12 @@ impl LanguageServer {
open_docs: &open_docs, open_docs: &open_docs,
}; };
let graph = module_graph_creator let graph = module_graph_creator
.create_graph_with_loader(GraphKind::All, roots.clone(), &mut loader) .create_graph_with_loader(
GraphKind::All,
roots.clone(),
&mut loader,
graph_util::NpmCachingStrategy::Eager,
)
.await?; .await?;
graph_util::graph_valid( graph_util::graph_valid(
&graph, &graph,
@ -3671,6 +3676,7 @@ impl Inner {
.unwrap_or_else(create_default_npmrc), .unwrap_or_else(create_default_npmrc),
workspace, workspace,
force_global_cache, force_global_cache,
None,
)?; )?;
let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable); let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable);

View file

@ -133,7 +133,8 @@ impl LspScopeResolver {
cache.for_specifier(config_data.map(|d| d.scope.as_ref())), cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()), config_data.and_then(|d| d.lockfile.clone()),
))); )));
let npm_graph_resolver = cli_resolver.create_graph_npm_resolver(); let npm_graph_resolver = cli_resolver
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager);
let maybe_jsx_import_source_config = let maybe_jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config()); config_data.and_then(|d| d.maybe_jsx_import_source_config());
let graph_imports = config_data let graph_imports = config_data
@ -343,7 +344,9 @@ impl LspResolver {
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> WorkerCliNpmGraphResolver { ) -> WorkerCliNpmGraphResolver {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
resolver.resolver.create_graph_npm_resolver() resolver
.resolver
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager)
} }
pub fn as_is_cjs_resolver( pub fn as_is_cjs_resolver(

View file

@ -3419,7 +3419,7 @@ fn parse_code_actions(
&specifier_rewrite.new_deno_types_specifier &specifier_rewrite.new_deno_types_specifier
{ {
text_edit.new_text = format!( text_edit.new_text = format!(
"// @deno-types=\"{}\"\n{}", "// @ts-types=\"{}\"\n{}",
deno_types_specifier, &text_edit.new_text deno_types_specifier, &text_edit.new_text
); );
} }
@ -3594,10 +3594,8 @@ impl CompletionEntryDetails {
if let Some(deno_types_specifier) = if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier &specifier_rewrite.new_deno_types_specifier
{ {
*new_text = format!( *new_text =
"// @deno-types=\"{}\"\n{}", format!("// @ts-types=\"{}\"\n{}", deno_types_specifier, new_text);
deno_types_specifier, new_text
);
} }
} }
} }

View file

@ -437,20 +437,18 @@ fn resolve_flags_and_init(
if err.kind() == clap::error::ErrorKind::DisplayVersion => if err.kind() == clap::error::ErrorKind::DisplayVersion =>
{ {
// Ignore results to avoid BrokenPipe errors. // Ignore results to avoid BrokenPipe errors.
util::logger::init(None); util::logger::init(None, None);
let _ = err.print(); let _ = err.print();
deno_runtime::exit(0); deno_runtime::exit(0);
} }
Err(err) => { Err(err) => {
util::logger::init(None); util::logger::init(None, None);
exit_for_error(AnyError::from(err)) exit_for_error(AnyError::from(err))
} }
}; };
if let Some(otel_config) = flags.otel_config() { deno_telemetry::init(crate::args::otel_runtime_config())?;
deno_telemetry::init(otel_config)?; util::logger::init(flags.log_level, Some(flags.otel_config()));
}
util::logger::init(flags.log_level);
// TODO(bartlomieju): remove in Deno v2.5 and hard error then. // TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled { if flags.unstable_config.legacy_flag_enabled {

View file

@ -87,17 +87,18 @@ fn main() {
let future = async move { let future = async move {
match standalone { match standalone {
Ok(Some(data)) => { Ok(Some(data)) => {
if let Some(otel_config) = data.metadata.otel_config.clone() { deno_telemetry::init(crate::args::otel_runtime_config())?;
deno_telemetry::init(otel_config)?; util::logger::init(
} data.metadata.log_level,
util::logger::init(data.metadata.log_level); Some(data.metadata.otel_config.clone()),
);
load_env_vars(&data.metadata.env_vars_from_env_file); load_env_vars(&data.metadata.env_vars_from_env_file);
let exit_code = standalone::run(data).await?; let exit_code = standalone::run(data).await?;
deno_runtime::exit(exit_code); deno_runtime::exit(exit_code);
} }
Ok(None) => Ok(()), Ok(None) => Ok(()),
Err(err) => { Err(err) => {
util::logger::init(None); util::logger::init(None, None);
Err(err) Err(err)
} }
} }

View file

@ -156,6 +156,7 @@ impl ModuleLoadPreparer {
graph_kind: graph.graph_kind(), graph_kind: graph.graph_kind(),
roots: roots.to_vec(), roots: roots.to_vec(),
loader: Some(&mut cache), loader: Some(&mut cache),
npm_caching: self.options.default_npm_caching_strategy(),
}, },
) )
.await?; .await?;

View file

@ -296,6 +296,12 @@ pub fn create_managed_in_npm_pkg_checker(
Arc::new(ManagedInNpmPackageChecker { root_dir }) Arc::new(ManagedInNpmPackageChecker { root_dir })
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PackageCaching<'a> {
Only(Cow<'a, [PackageReq]>),
All,
}
/// An npm resolver where the resolution is managed by Deno rather than /// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system. /// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver { pub struct ManagedCliNpmResolver {
@ -420,19 +426,44 @@ impl ManagedCliNpmResolver {
/// Adds package requirements to the resolver and ensures everything is setup. /// Adds package requirements to the resolver and ensures everything is setup.
/// This includes setting up the `node_modules` directory, if applicable. /// This includes setting up the `node_modules` directory, if applicable.
pub async fn add_package_reqs( pub async fn add_and_cache_package_reqs(
&self, &self,
packages: &[PackageReq], packages: &[PackageReq],
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
self self
.add_package_reqs_raw(packages) .add_package_reqs_raw(
packages,
Some(PackageCaching::Only(packages.into())),
)
.await .await
.dependencies_result .dependencies_result
} }
pub async fn add_package_reqs_raw( pub async fn add_package_reqs_no_cache(
&self, &self,
packages: &[PackageReq], packages: &[PackageReq],
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages, None)
.await
.dependencies_result
}
pub async fn add_package_reqs(
&self,
packages: &[PackageReq],
caching: PackageCaching<'_>,
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages, Some(caching))
.await
.dependencies_result
}
pub async fn add_package_reqs_raw<'a>(
&self,
packages: &[PackageReq],
caching: Option<PackageCaching<'a>>,
) -> AddPkgReqsResult { ) -> AddPkgReqsResult {
if packages.is_empty() { if packages.is_empty() {
return AddPkgReqsResult { return AddPkgReqsResult {
@ -449,7 +480,9 @@ impl ManagedCliNpmResolver {
} }
} }
if result.dependencies_result.is_ok() { if result.dependencies_result.is_ok() {
result.dependencies_result = self.cache_packages().await; if let Some(caching) = caching {
result.dependencies_result = self.cache_packages(caching).await;
}
} }
result result
@ -491,16 +524,20 @@ impl ManagedCliNpmResolver {
pub async fn inject_synthetic_types_node_package( pub async fn inject_synthetic_types_node_package(
&self, &self,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let reqs = &[PackageReq::from_str("@types/node").unwrap()];
// add and ensure this isn't added to the lockfile // add and ensure this isn't added to the lockfile
self self
.add_package_reqs(&[PackageReq::from_str("@types/node").unwrap()]) .add_package_reqs(reqs, PackageCaching::Only(reqs.into()))
.await?; .await?;
Ok(()) Ok(())
} }
pub async fn cache_packages(&self) -> Result<(), AnyError> { pub async fn cache_packages(
self.fs_resolver.cache_packages().await &self,
caching: PackageCaching<'_>,
) -> Result<(), AnyError> {
self.fs_resolver.cache_packages(caching).await
} }
pub fn resolve_pkg_folder_from_deno_module( pub fn resolve_pkg_folder_from_deno_module(
@ -545,18 +582,18 @@ impl ManagedCliNpmResolver {
/// Ensures that the top level `package.json` dependencies are installed. /// Ensures that the top level `package.json` dependencies are installed.
/// This may set up the `node_modules` directory. /// This may set up the `node_modules` directory.
/// ///
/// Returns `true` if any changes (such as caching packages) were made. /// Returns `true` if the top level packages are already installed. A
/// If this returns `false`, `node_modules` has _not_ been set up. /// return value of `false` means that new packages were added to the NPM resolution.
pub async fn ensure_top_level_package_json_install( pub async fn ensure_top_level_package_json_install(
&self, &self,
) -> Result<bool, AnyError> { ) -> Result<bool, AnyError> {
if !self.top_level_install_flag.raise() { if !self.top_level_install_flag.raise() {
return Ok(false); // already did this return Ok(true); // already did this
} }
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs(); let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
if pkg_json_remote_pkgs.is_empty() { if pkg_json_remote_pkgs.is_empty() {
return Ok(false); return Ok(true);
} }
// check if something needs resolving before bothering to load all // check if something needs resolving before bothering to load all
@ -570,14 +607,16 @@ impl ManagedCliNpmResolver {
log::debug!( log::debug!(
"All package.json deps resolvable. Skipping top level install." "All package.json deps resolvable. Skipping top level install."
); );
return Ok(false); // everything is already resolvable return Ok(true); // everything is already resolvable
} }
let pkg_reqs = pkg_json_remote_pkgs let pkg_reqs = pkg_json_remote_pkgs
.iter() .iter()
.map(|pkg| pkg.req.clone()) .map(|pkg| pkg.req.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.add_package_reqs(&pkg_reqs).await.map(|_| true) self.add_package_reqs_no_cache(&pkg_reqs).await?;
Ok(false)
} }
pub async fn cache_package_info( pub async fn cache_package_info(

View file

@ -255,6 +255,10 @@ impl NpmResolution {
.read() .read()
.as_valid_serialized_for_system(system_info) .as_valid_serialized_for_system(system_info)
} }
pub fn subset(&self, package_reqs: &[PackageReq]) -> NpmResolutionSnapshot {
self.snapshot.read().subset(package_reqs)
}
} }
async fn add_package_reqs_to_snapshot( async fn add_package_reqs_to_snapshot(

View file

@ -11,6 +11,7 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use super::super::PackageCaching;
use async_trait::async_trait; use async_trait::async_trait;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
@ -57,7 +58,10 @@ pub trait NpmPackageFsResolver: Send + Sync {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageCacheFolderId>, AnyError>; ) -> Result<Option<NpmPackageCacheFolderId>, AnyError>;
async fn cache_packages(&self) -> Result<(), AnyError>; async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a>( fn ensure_read_permission<'a>(

View file

@ -8,6 +8,7 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use crate::colors; use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache; use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache; use crate::npm::CliNpmTarballCache;
use async_trait::async_trait; use async_trait::async_trait;
@ -150,10 +151,19 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
) )
} }
async fn cache_packages(&self) -> Result<(), AnyError> { async fn cache_packages<'a>(
let package_partitions = self &self,
.resolution caching: PackageCaching<'a>,
.all_system_packages_partitioned(&self.system_info); ) -> Result<(), AnyError> {
let package_partitions = match caching {
PackageCaching::All => self
.resolution
.all_system_packages_partitioned(&self.system_info),
PackageCaching::Only(reqs) => self
.resolution
.subset(&reqs)
.all_system_packages_partitioned(&self.system_info),
};
cache_packages(&package_partitions.packages, &self.tarball_cache).await?; cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
// create the copy package folders // create the copy package folders

View file

@ -17,6 +17,7 @@ use std::sync::Arc;
use crate::args::LifecycleScriptsConfig; use crate::args::LifecycleScriptsConfig;
use crate::colors; use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache; use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache; use crate::npm::CliNpmTarballCache;
use async_trait::async_trait; use async_trait::async_trait;
@ -253,9 +254,16 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
)) ))
} }
async fn cache_packages(&self) -> Result<(), AnyError> { async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError> {
let snapshot = match caching {
PackageCaching::All => self.resolution.snapshot(),
PackageCaching::Only(reqs) => self.resolution.subset(&reqs),
};
sync_resolution_with_fs( sync_resolution_with_fs(
&self.resolution.snapshot(), &snapshot,
&self.cache, &self.cache,
&self.npm_install_deps_provider, &self.npm_install_deps_provider,
&self.progress_bar, &self.progress_bar,

View file

@ -41,6 +41,7 @@ pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions; pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver; pub use self::managed::ManagedCliNpmResolver;
pub use self::managed::PackageCaching;
pub type CliNpmTarballCache = deno_npm_cache::TarballCache<CliNpmCacheEnv>; pub type CliNpmTarballCache = deno_npm_cache::TarballCache<CliNpmCacheEnv>;
pub type CliNpmCache = deno_npm_cache::NpmCache<CliNpmCacheEnv>; pub type CliNpmCache = deno_npm_cache::NpmCache<CliNpmCacheEnv>;

View file

@ -32,6 +32,7 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use thiserror::Error; use thiserror::Error;
use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
@ -240,11 +241,15 @@ impl CliResolver {
// todo(dsherret): move this off CliResolver as CliResolver is acting // todo(dsherret): move this off CliResolver as CliResolver is acting
// like a factory by doing this (it's beyond its responsibility) // like a factory by doing this (it's beyond its responsibility)
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver { pub fn create_graph_npm_resolver(
&self,
npm_caching: NpmCachingStrategy,
) -> WorkerCliNpmGraphResolver {
WorkerCliNpmGraphResolver { WorkerCliNpmGraphResolver {
npm_resolver: self.npm_resolver.as_ref(), npm_resolver: self.npm_resolver.as_ref(),
found_package_json_dep_flag: &self.found_package_json_dep_flag, found_package_json_dep_flag: &self.found_package_json_dep_flag,
bare_node_builtins_enabled: self.bare_node_builtins_enabled, bare_node_builtins_enabled: self.bare_node_builtins_enabled,
npm_caching,
} }
} }
@ -304,6 +309,7 @@ pub struct WorkerCliNpmGraphResolver<'a> {
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>, npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
found_package_json_dep_flag: &'a AtomicFlag, found_package_json_dep_flag: &'a AtomicFlag,
bare_node_builtins_enabled: bool, bare_node_builtins_enabled: bool,
npm_caching: NpmCachingStrategy,
} }
#[async_trait(?Send)] #[async_trait(?Send)]
@ -373,7 +379,20 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
Ok(()) Ok(())
}; };
let result = npm_resolver.add_package_reqs_raw(package_reqs).await; let result = npm_resolver
.add_package_reqs_raw(
package_reqs,
match self.npm_caching {
NpmCachingStrategy::Eager => {
Some(crate::npm::PackageCaching::All)
}
NpmCachingStrategy::Lazy => {
Some(crate::npm::PackageCaching::Only(package_reqs.into()))
}
NpmCachingStrategy::Manual => None,
},
)
.await;
NpmResolvePkgReqsResult { NpmResolvePkgReqsResult {
results: result results: result

View file

@ -291,7 +291,7 @@
"type": "array", "type": "array",
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.", "description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
"items": { "items": {
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json" "$ref": "lint-tags.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -300,7 +300,7 @@
"type": "array", "type": "array",
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.", "description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
"items": { "items": {
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json" "$ref": "lint-rules.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -309,7 +309,7 @@
"type": "array", "type": "array",
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.", "description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
"items": { "items": {
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json" "$ref": "lint-rules.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true

View file

@ -0,0 +1,112 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"enum": [
"adjacent-overload-signatures",
"ban-ts-comment",
"ban-types",
"ban-unknown-rule-code",
"ban-untagged-ignore",
"ban-untagged-todo",
"ban-unused-ignore",
"camelcase",
"constructor-super",
"default-param-last",
"eqeqeq",
"explicit-function-return-type",
"explicit-module-boundary-types",
"for-direction",
"fresh-handler-export",
"fresh-server-event-handlers",
"getter-return",
"guard-for-in",
"no-array-constructor",
"no-async-promise-executor",
"no-await-in-loop",
"no-await-in-sync-fn",
"no-boolean-literal-for-arguments",
"no-case-declarations",
"no-class-assign",
"no-compare-neg-zero",
"no-cond-assign",
"no-console",
"no-const-assign",
"no-constant-condition",
"no-control-regex",
"no-debugger",
"no-delete-var",
"no-deprecated-deno-api",
"no-dupe-args",
"no-dupe-class-members",
"no-dupe-else-if",
"no-dupe-keys",
"no-duplicate-case",
"no-empty",
"no-empty-character-class",
"no-empty-enum",
"no-empty-interface",
"no-empty-pattern",
"no-eval",
"no-ex-assign",
"no-explicit-any",
"no-external-import",
"no-extra-boolean-cast",
"no-extra-non-null-assertion",
"no-fallthrough",
"no-func-assign",
"no-global-assign",
"no-implicit-declare-namespace-export",
"no-import-assertions",
"no-import-assign",
"no-inferrable-types",
"no-inner-declarations",
"no-invalid-regexp",
"no-invalid-triple-slash-reference",
"no-irregular-whitespace",
"no-misused-new",
"no-namespace",
"no-new-symbol",
"no-node-globals",
"no-non-null-asserted-optional-chain",
"no-non-null-assertion",
"no-obj-calls",
"no-octal",
"no-process-globals",
"no-prototype-builtins",
"no-redeclare",
"no-regex-spaces",
"no-self-assign",
"no-self-compare",
"no-setter-return",
"no-shadow-restricted-names",
"no-sloppy-imports",
"no-slow-types",
"no-sparse-arrays",
"no-sync-fn-in-async-fn",
"no-this-alias",
"no-this-before-super",
"no-throw-literal",
"no-top-level-await",
"no-undef",
"no-unreachable",
"no-unsafe-finally",
"no-unsafe-negation",
"no-unused-labels",
"no-unused-vars",
"no-var",
"no-window",
"no-window-prefix",
"no-with",
"prefer-as-const",
"prefer-ascii",
"prefer-const",
"prefer-namespace-keyword",
"prefer-primordials",
"require-await",
"require-yield",
"single-var-declarator",
"triple-slash-reference",
"use-isnan",
"valid-typeof",
"verbatim-module-syntax"
]
}

View file

@ -0,0 +1,4 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"enum": ["fresh", "jsr", "jsx", "react", "recommended"]
}

View file

@ -44,6 +44,9 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_fs::RealFs;
@ -76,6 +79,7 @@ use crate::resolver::CjsTracker;
use crate::shared::ReleaseChannel; use crate::shared::ReleaseChannel;
use crate::standalone::virtual_fs::VfsEntry; use crate::standalone::virtual_fs::VfsEntry;
use crate::util::archive; use crate::util::archive;
use crate::util::fs::canonicalize_path;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
@ -87,31 +91,29 @@ use super::serialization::DenoCompileModuleData;
use super::serialization::DeserializedDataSection; use super::serialization::DeserializedDataSection;
use super::serialization::RemoteModulesStore; use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder; use super::serialization::RemoteModulesStoreBuilder;
use super::virtual_fs::output_vfs;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileBackedVfs; use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsFileSubDataKind; use super::virtual_fs::VfsFileSubDataKind;
use super::virtual_fs::VfsRoot; use super::virtual_fs::VfsRoot;
use super::virtual_fs::VirtualDirectory; use super::virtual_fs::VirtualDirectory;
use super::virtual_fs::WindowsSystemRootablePath;
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
/// A URL that can be designated as the base for relative URLs. /// A URL that can be designated as the base for relative URLs.
/// ///
/// After creation, this URL may be used to get the key for a /// After creation, this URL may be used to get the key for a
/// module in the binary. /// module in the binary.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url); pub enum StandaloneRelativeFileBaseUrl<'a> {
WindowsSystemRoot,
impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> { Path(&'a Url),
fn from(url: &'a Url) -> Self {
Self(url)
}
} }
impl<'a> StandaloneRelativeFileBaseUrl<'a> { impl<'a> StandaloneRelativeFileBaseUrl<'a> {
pub fn new(url: &'a Url) -> Self {
debug_assert_eq!(url.scheme(), "file");
Self(url)
}
/// Gets the module map key of the provided specifier. /// Gets the module map key of the provided specifier.
/// ///
/// * Descendant file specifiers will be made relative to the base. /// * Descendant file specifiers will be made relative to the base.
@ -121,22 +123,29 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> {
if target.scheme() != "file" { if target.scheme() != "file" {
return Cow::Borrowed(target.as_str()); return Cow::Borrowed(target.as_str());
} }
let base = match self {
Self::Path(base) => base,
Self::WindowsSystemRoot => return Cow::Borrowed(target.path()),
};
match self.0.make_relative(target) { match base.make_relative(target) {
Some(relative) => { Some(relative) => {
if relative.starts_with("../") { // This is not a great scenario to have because it means that the
Cow::Borrowed(target.as_str()) // specifier is outside the vfs and could cause the binary to act
} else { // strangely. If you encounter this, the fix is to add more paths
Cow::Owned(relative) // to the vfs builder by calling `add_possible_min_root_dir`.
} debug_assert!(
!relative.starts_with("../"),
"{} -> {} ({})",
base.as_str(),
target.as_str(),
relative,
);
Cow::Owned(relative)
} }
None => Cow::Borrowed(target.as_str()), None => Cow::Borrowed(target.as_str()),
} }
} }
pub fn inner(&self) -> &Url {
self.0
}
} }
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
@ -191,7 +200,7 @@ pub struct Metadata {
pub entrypoint_key: String, pub entrypoint_key: String,
pub node_modules: Option<NodeModules>, pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig, pub unstable_config: UnstableConfig,
pub otel_config: Option<OtelConfig>, // None means disabled. pub otel_config: OtelConfig,
} }
fn write_binary_bytes( fn write_binary_bytes(
@ -200,7 +209,7 @@ fn write_binary_bytes(
metadata: &Metadata, metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>, npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder, remote_modules: &RemoteModulesStoreBuilder,
vfs: VfsBuilder, vfs: &BuiltVfs,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let data_section_bytes = let data_section_bytes =
@ -367,6 +376,15 @@ pub fn extract_standalone(
})) }))
} }
pub struct WriteBinOptions<'a> {
pub writer: File,
pub display_output_filename: &'a str,
pub graph: &'a ModuleGraph,
pub entrypoint: &'a ModuleSpecifier,
pub include_files: &'a [ModuleSpecifier],
pub compile_flags: &'a CompileFlags,
}
pub struct DenoCompileBinaryWriter<'a> { pub struct DenoCompileBinaryWriter<'a> {
cjs_tracker: &'a CjsTracker, cjs_tracker: &'a CjsTracker,
cli_options: &'a CliOptions, cli_options: &'a CliOptions,
@ -407,18 +425,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
pub async fn write_bin( pub async fn write_bin(
&self, &self,
writer: File, options: WriteBinOptions<'_>,
graph: &ModuleGraph,
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier,
include_files: &[ModuleSpecifier],
compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
// Select base binary based on target // Select base binary based on target
let mut original_binary = self.get_base_binary(compile_flags).await?; let mut original_binary =
self.get_base_binary(options.compile_flags).await?;
if compile_flags.no_terminal { if options.compile_flags.no_terminal {
let target = compile_flags.resolve_target(); let target = options.compile_flags.resolve_target();
if !target.contains("windows") { if !target.contains("windows") {
bail!( bail!(
"The `--no-terminal` flag is only available when targeting Windows (current: {})", "The `--no-terminal` flag is only available when targeting Windows (current: {})",
@ -428,8 +442,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
set_windows_binary_to_gui(&mut original_binary) set_windows_binary_to_gui(&mut original_binary)
.context("Setting windows binary to GUI.")?; .context("Setting windows binary to GUI.")?;
} }
if compile_flags.icon.is_some() { if options.compile_flags.icon.is_some() {
let target = compile_flags.resolve_target(); let target = options.compile_flags.resolve_target();
if !target.contains("windows") { if !target.contains("windows") {
bail!( bail!(
"The `--icon` flag is only available when targeting Windows (current: {})", "The `--icon` flag is only available when targeting Windows (current: {})",
@ -437,17 +451,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
) )
} }
} }
self self.write_standalone_binary(options, original_binary).await
.write_standalone_binary(
writer,
original_binary,
graph,
root_dir_url,
entrypoint,
include_files,
compile_flags,
)
.await
} }
async fn get_base_binary( async fn get_base_binary(
@ -552,14 +556,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
async fn write_standalone_binary( async fn write_standalone_binary(
&self, &self,
writer: File, options: WriteBinOptions<'_>,
original_bin: Vec<u8>, original_bin: Vec<u8>,
graph: &ModuleGraph,
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier,
include_files: &[ModuleSpecifier],
compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let WriteBinOptions {
writer,
display_output_filename,
graph,
entrypoint,
include_files,
compile_flags,
} = options;
let ca_data = match self.cli_options.ca_data() { let ca_data = match self.cli_options.ca_data() {
Some(CaData::File(ca_file)) => Some( Some(CaData::File(ca_file)) => Some(
std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?, std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?,
@ -567,74 +574,28 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Some(CaData::Bytes(bytes)) => Some(bytes.clone()), Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
None => None, None => None,
}; };
let root_path = root_dir_url.inner().to_file_path().unwrap(); let mut vfs = VfsBuilder::new();
let (maybe_npm_vfs, node_modules, npm_snapshot) = let npm_snapshot = match self.npm_resolver.as_inner() {
match self.npm_resolver.as_inner() { InnerCliNpmResolverRef::Managed(managed) => {
InnerCliNpmResolverRef::Managed(managed) => { let snapshot =
let snapshot = managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
managed.serialized_valid_snapshot_for_system(&self.npm_system_info); if !snapshot.as_serialized().packages.is_empty() {
if !snapshot.as_serialized().packages.is_empty() { self.fill_npm_vfs(&mut vfs).context("Building npm vfs.")?;
let npm_vfs_builder = self Some(snapshot)
.build_npm_vfs(&root_path) } else {
.context("Building npm vfs.")?; None
(
Some(npm_vfs_builder),
Some(NodeModules::Managed {
node_modules_dir: self
.npm_resolver
.root_node_modules_path()
.map(|path| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(path).unwrap(),
)
.into_owned()
}),
}),
Some(snapshot),
)
} else {
(None, None, None)
}
} }
InnerCliNpmResolverRef::Byonm(resolver) => { }
let npm_vfs_builder = self.build_npm_vfs(&root_path)?; InnerCliNpmResolverRef::Byonm(_) => {
( self.fill_npm_vfs(&mut vfs)?;
Some(npm_vfs_builder), None
Some(NodeModules::Byonm { }
root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(node_modules_dir)
.unwrap(),
)
.into_owned()
},
),
}),
None,
)
}
};
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
npm_vfs
} else {
VfsBuilder::new(root_path.clone())?
}; };
for include_file in include_files { for include_file in include_files {
let path = deno_path_util::url_to_file_path(include_file)?; let path = deno_path_util::url_to_file_path(include_file)?;
if path.is_dir() { vfs
// TODO(#26941): we should analyze if any of these are .add_file_at_path(&path)
// modules in order to include their dependencies .with_context(|| format!("Including {}", path.display()))?;
vfs
.add_dir_recursive(&path)
.with_context(|| format!("Including {}", path.display()))?;
} else {
vfs
.add_file_at_path(&path)
.with_context(|| format!("Including {}", path.display()))?;
}
} }
let mut remote_modules_store = RemoteModulesStoreBuilder::default(); let mut remote_modules_store = RemoteModulesStoreBuilder::default();
let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() { let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() {
@ -706,6 +667,62 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} }
remote_modules_store.add_redirects(&graph.redirects); remote_modules_store.add_redirects(&graph.redirects);
if let Some(import_map) = self.workspace_resolver.maybe_import_map() {
if let Ok(file_path) = url_to_file_path(import_map.base_url()) {
if let Some(import_map_parent_dir) = file_path.parent() {
// tell the vfs about the import map's parent directory in case it
// falls outside what the root of where the VFS will be based
vfs.add_possible_min_root_dir(import_map_parent_dir);
}
}
}
if let Some(node_modules_dir) = self.npm_resolver.root_node_modules_path() {
// ensure the vfs doesn't go below the node_modules directory's parent
if let Some(parent) = node_modules_dir.parent() {
vfs.add_possible_min_root_dir(parent);
}
}
let vfs = self.build_vfs_consolidating_global_npm_cache(vfs);
let root_dir_url = match &vfs.root_path {
WindowsSystemRootablePath::Path(dir) => {
Some(url_from_directory_path(dir)?)
}
WindowsSystemRootablePath::WindowSystemRoot => None,
};
let root_dir_url = match &root_dir_url {
Some(url) => StandaloneRelativeFileBaseUrl::Path(url),
None => StandaloneRelativeFileBaseUrl::WindowsSystemRoot,
};
let node_modules = match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(_) => {
npm_snapshot.as_ref().map(|_| NodeModules::Managed {
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|path| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(path).unwrap(),
)
.into_owned()
},
),
})
}
InnerCliNpmResolverRef::Byonm(resolver) => Some(NodeModules::Byonm {
root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(node_modules_dir)
.unwrap(),
)
.into_owned()
},
),
}),
};
let env_vars_from_env_file = match self.cli_options.env_file_name() { let env_vars_from_env_file = match self.cli_options.env_file_name() {
Some(env_filenames) => { Some(env_filenames) => {
let mut aggregated_env_vars = IndexMap::new(); let mut aggregated_env_vars = IndexMap::new();
@ -720,6 +737,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None => Default::default(), None => Default::default(),
}; };
output_vfs(&vfs, display_output_filename);
let metadata = Metadata { let metadata = Metadata {
argv: compile_flags.args.clone(), argv: compile_flags.args.clone(),
seed: self.cli_options.seed(), seed: self.cli_options.seed(),
@ -779,6 +798,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
detect_cjs: self.cli_options.unstable_detect_cjs(), detect_cjs: self.cli_options.unstable_detect_cjs(),
sloppy_imports: self.cli_options.unstable_sloppy_imports(), sloppy_imports: self.cli_options.unstable_sloppy_imports(),
features: self.cli_options.unstable_features(), features: self.cli_options.unstable_features(),
npm_lazy_caching: self.cli_options.unstable_npm_lazy_caching(),
}, },
otel_config: self.cli_options.otel_config(), otel_config: self.cli_options.otel_config(),
}; };
@ -789,13 +809,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
&metadata, &metadata,
npm_snapshot.map(|s| s.into_serialized()), npm_snapshot.map(|s| s.into_serialized()),
&remote_modules_store, &remote_modules_store,
vfs, &vfs,
compile_flags, compile_flags,
) )
.context("Writing binary bytes") .context("Writing binary bytes")
} }
fn build_npm_vfs(&self, root_path: &Path) -> Result<VfsBuilder, AnyError> { fn fill_npm_vfs(&self, builder: &mut VfsBuilder) -> Result<(), AnyError> {
fn maybe_warn_different_system(system_info: &NpmSystemInfo) { fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
if system_info != &NpmSystemInfo::default() { if system_info != &NpmSystemInfo::default() {
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning")); log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
@ -806,15 +826,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
InnerCliNpmResolverRef::Managed(npm_resolver) => { InnerCliNpmResolverRef::Managed(npm_resolver) => {
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() { if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
maybe_warn_different_system(&self.npm_system_info); maybe_warn_different_system(&self.npm_system_info);
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
builder.add_dir_recursive(node_modules_path)?; builder.add_dir_recursive(node_modules_path)?;
Ok(builder) Ok(())
} else { } else {
// DO NOT include the user's registry url as it may contain credentials, // we'll flatten to remove any custom registries later
// but also don't make this dependent on the registry url
let global_cache_root_path = npm_resolver.global_cache_root_path();
let mut builder =
VfsBuilder::new(global_cache_root_path.to_path_buf())?;
let mut packages = let mut packages =
npm_resolver.all_system_packages(&self.npm_system_info); npm_resolver.all_system_packages(&self.npm_system_info);
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
@ -823,55 +838,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?; npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
builder.add_dir_recursive(&folder)?; builder.add_dir_recursive(&folder)?;
} }
Ok(())
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
// that will be used by denort when loading the npm cache. This avoids us exposing
// the user's private registry information and means we don't have to bother
// serializing all the different registry config into the binary.
builder.with_root_dir(|root_dir| {
root_dir.name = ".deno_compile_node_modules".to_string();
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
let mut localhost_entries = IndexMap::new();
for entry in std::mem::take(&mut root_dir.entries) {
match entry {
VfsEntry::Dir(dir) => {
for entry in dir.entries {
log::debug!(
"Flattening {} into node_modules",
entry.name()
);
if let Some(existing) =
localhost_entries.insert(entry.name().to_string(), entry)
{
panic!(
"Unhandled scenario where a duplicate entry was found: {:?}",
existing
);
}
}
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
new_entries.push(entry);
}
}
}
new_entries.push(VfsEntry::Dir(VirtualDirectory {
name: "localhost".to_string(),
entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
}));
// needs to be sorted by name
new_entries.sort_by(|a, b| a.name().cmp(b.name()));
root_dir.entries = new_entries;
});
builder.set_new_root_path(root_path.to_path_buf())?;
Ok(builder)
} }
} }
InnerCliNpmResolverRef::Byonm(_) => { InnerCliNpmResolverRef::Byonm(_) => {
maybe_warn_different_system(&self.npm_system_info); maybe_warn_different_system(&self.npm_system_info);
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
for pkg_json in self.cli_options.workspace().package_jsons() { for pkg_json in self.cli_options.workspace().package_jsons() {
builder.add_file_at_path(&pkg_json.path)?; builder.add_file_at_path(&pkg_json.path)?;
} }
@ -904,10 +875,102 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} }
} }
} }
Ok(builder) Ok(())
} }
} }
} }
fn build_vfs_consolidating_global_npm_cache(
&self,
mut vfs: VfsBuilder,
) -> BuiltVfs {
match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
if npm_resolver.root_node_modules_path().is_some() {
return vfs.build();
}
let global_cache_root_path = npm_resolver.global_cache_root_path();
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
// that will be used by denort when loading the npm cache. This avoids us exposing
// the user's private registry information and means we don't have to bother
// serializing all the different registry config into the binary.
let Some(root_dir) = vfs.get_dir_mut(global_cache_root_path) else {
return vfs.build();
};
root_dir.name = DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME.to_string();
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
let mut localhost_entries = IndexMap::new();
for entry in std::mem::take(&mut root_dir.entries) {
match entry {
VfsEntry::Dir(dir) => {
for entry in dir.entries {
log::debug!("Flattening {} into node_modules", entry.name());
if let Some(existing) =
localhost_entries.insert(entry.name().to_string(), entry)
{
panic!(
"Unhandled scenario where a duplicate entry was found: {:?}",
existing
);
}
}
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
new_entries.push(entry);
}
}
}
new_entries.push(VfsEntry::Dir(VirtualDirectory {
name: "localhost".to_string(),
entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
}));
// needs to be sorted by name
new_entries.sort_by(|a, b| a.name().cmp(b.name()));
root_dir.entries = new_entries;
// it's better to not expose the user's cache directory, so take it out
// of there
let parent = global_cache_root_path.parent().unwrap();
let parent_dir = vfs.get_dir_mut(parent).unwrap();
let index = parent_dir
.entries
.iter()
.position(|entry| {
entry.name() == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME
})
.unwrap();
let npm_global_cache_dir_entry = parent_dir.entries.remove(index);
// go up from the ancestors removing empty directories...
// this is not as optimized as it could be
let mut last_name =
Cow::Borrowed(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME);
for ancestor in parent.ancestors() {
let dir = vfs.get_dir_mut(ancestor).unwrap();
if let Some(index) = dir
.entries
.iter()
.position(|entry| entry.name() == last_name)
{
dir.entries.remove(index);
}
last_name = Cow::Owned(dir.name.clone());
if !dir.entries.is_empty() {
break;
}
}
// now build the vfs and add the global cache dir entry there
let mut built_vfs = vfs.build();
built_vfs.root.insert_entry(npm_global_cache_dir_entry);
built_vfs
}
InnerCliNpmResolverRef::Byonm(_) => vfs.build(),
}
}
} }
fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> { fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> {

View file

@ -924,6 +924,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
serve_host: None, serve_host: None,
}, },
metadata.otel_config, metadata.otel_config,
crate::args::NpmCachingStrategy::Lazy,
); );
// Initialize v8 once from the main thread. // Initialize v8 once from the main thread.

View file

@ -23,6 +23,7 @@ use deno_semver::package::PackageReq;
use crate::standalone::virtual_fs::VirtualDirectory; use crate::standalone::virtual_fs::VirtualDirectory;
use super::binary::Metadata; use super::binary::Metadata;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsBuilder;
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
@ -39,7 +40,7 @@ pub fn serialize_binary_data_section(
metadata: &Metadata, metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>, npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder, remote_modules: &RemoteModulesStoreBuilder,
vfs: VfsBuilder, vfs: &BuiltVfs,
) -> Result<Vec<u8>, AnyError> { ) -> Result<Vec<u8>, AnyError> {
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) { fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes()); bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
@ -73,12 +74,11 @@ pub fn serialize_binary_data_section(
} }
// 4. VFS // 4. VFS
{ {
let (vfs, vfs_files) = vfs.into_dir_and_files(); let serialized_vfs = serde_json::to_string(&vfs.root)?;
let vfs = serde_json::to_string(&vfs)?; write_bytes_with_len(&mut bytes, serialized_vfs.as_bytes());
write_bytes_with_len(&mut bytes, vfs.as_bytes()); let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>();
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes()); bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
for file in &vfs_files { for file in &vfs.files {
bytes.extend_from_slice(file); bytes.extend_from_slice(file);
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -585,7 +585,13 @@ pub async fn run_future_forwarding_signals<TOutput>(
async fn listen_ctrl_c(kill_signal: KillSignal) { async fn listen_ctrl_c(kill_signal: KillSignal) {
while let Ok(()) = tokio::signal::ctrl_c().await { while let Ok(()) = tokio::signal::ctrl_c().await {
kill_signal.send(deno_task_shell::SignalKind::SIGINT) // On windows, ctrl+c is sent to the process group, so the signal would
// have already been sent to the child process. We still want to listen
// for ctrl+c here to keep the process alive when receiving it, but no
// need to forward the signal because it's already been sent.
if !cfg!(windows) {
kill_signal.send(deno_task_shell::SignalKind::SIGINT)
}
} }
} }

View file

@ -538,7 +538,11 @@ pub async fn run_benchmarks_with_watch(
)?; )?;
let graph = module_graph_creator let graph = module_graph_creator
.create_graph(graph_kind, collected_bench_modules.clone()) .create_graph(
graph_kind,
collected_bench_modules.clone(),
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?; .await?;
module_graph_creator.graph_valid(&graph)?; module_graph_creator.graph_valid(&graph)?;
let bench_modules = &graph.roots; let bench_modules = &graph.roots;

View file

@ -5,7 +5,7 @@ use crate::args::CompileFlags;
use crate::args::Flags; use crate::args::Flags;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::standalone::binary::StandaloneRelativeFileBaseUrl; use crate::standalone::binary::WriteBinOptions;
use crate::standalone::is_standalone_binary; use crate::standalone::is_standalone_binary;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
@ -15,8 +15,12 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_terminal::colors; use deno_terminal::colors;
use rand::Rng; use rand::Rng;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -69,7 +73,11 @@ pub async fn compile(
// create a module graph with types information in it. We don't want to // create a module graph with types information in it. We don't want to
// store that in the binary so create a code only module graph from scratch. // store that in the binary so create a code only module graph from scratch.
module_graph_creator module_graph_creator
.create_graph(GraphKind::CodeOnly, module_roots) .create_graph(
GraphKind::CodeOnly,
module_roots,
crate::graph_util::NpmCachingStrategy::Eager,
)
.await? .await?
} else { } else {
graph graph
@ -78,20 +86,6 @@ pub async fn compile(
let ts_config_for_emit = cli_options let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
check_warn_tsconfig(&ts_config_for_emit); check_warn_tsconfig(&ts_config_for_emit);
let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace().root_dir(),
graph
.specifiers()
.map(|(s, _)| s)
.chain(
cli_options
.node_modules_dir_path()
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
.iter(),
)
.chain(include_files.iter()),
);
log::debug!("Binary root dir: {}", root_dir_url);
log::info!( log::info!(
"{} {} to {}", "{} {} to {}",
colors::green("Compile"), colors::green("Compile"),
@ -116,14 +110,17 @@ pub async fn compile(
})?; })?;
let write_result = binary_writer let write_result = binary_writer
.write_bin( .write_bin(WriteBinOptions {
file, writer: file,
&graph, display_output_filename: &output_path
StandaloneRelativeFileBaseUrl::from(&root_dir_url), .file_name()
.unwrap()
.to_string_lossy(),
graph: &graph,
entrypoint, entrypoint,
&include_files, include_files: &include_files,
&compile_flags, compile_flags: &compile_flags,
) })
.await .await
.with_context(|| { .with_context(|| {
format!( format!(
@ -242,15 +239,58 @@ fn get_module_roots_and_include_files(
} }
} }
let mut module_roots = Vec::with_capacity(compile_flags.include.len() + 1); fn analyze_path(
let mut include_files = Vec::with_capacity(compile_flags.include.len()); url: &ModuleSpecifier,
module_roots: &mut Vec<ModuleSpecifier>,
include_files: &mut Vec<ModuleSpecifier>,
searched_paths: &mut HashSet<PathBuf>,
) -> Result<(), AnyError> {
let Ok(path) = url_to_file_path(url) else {
return Ok(());
};
let mut pending = VecDeque::from([path]);
while let Some(path) = pending.pop_front() {
if !searched_paths.insert(path.clone()) {
continue;
}
if !path.is_dir() {
let url = url_from_file_path(&path)?;
include_files.push(url.clone());
if is_module_graph_module(&url) {
module_roots.push(url);
}
continue;
}
for entry in std::fs::read_dir(&path).with_context(|| {
format!("Failed reading directory '{}'", path.display())
})? {
let entry = entry.with_context(|| {
format!("Failed reading entry in directory '{}'", path.display())
})?;
pending.push_back(entry.path());
}
}
Ok(())
}
let mut searched_paths = HashSet::new();
let mut module_roots = Vec::new();
let mut include_files = Vec::new();
module_roots.push(entrypoint.clone()); module_roots.push(entrypoint.clone());
for side_module in &compile_flags.include { for side_module in &compile_flags.include {
let url = resolve_url_or_path(side_module, initial_cwd)?; let url = resolve_url_or_path(side_module, initial_cwd)?;
if is_module_graph_module(&url) { if is_module_graph_module(&url) {
module_roots.push(url); module_roots.push(url.clone());
if url.scheme() == "file" {
include_files.push(url);
}
} else { } else {
include_files.push(url); analyze_path(
&url,
&mut module_roots,
&mut include_files,
&mut searched_paths,
)?;
} }
} }
Ok((module_roots, include_files)) Ok((module_roots, include_files))
@ -316,68 +356,6 @@ fn get_os_specific_filepath(
} }
} }
fn resolve_root_dir_from_specifiers<'a>(
starting_dir: &ModuleSpecifier,
specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
) -> ModuleSpecifier {
fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
let min_length = a.len().min(b.len());
let mut last_slash = 0;
for i in 0..min_length {
if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
last_slash = i;
} else if a.as_bytes()[i] != b.as_bytes()[i] {
break;
}
}
// Return the common root path up to the last common slash.
// This returns a slice of the original string 'a', up to and including the last matching '/'.
let common = &a[..=last_slash];
if cfg!(windows) && common == "file:///" {
a
} else {
common
}
}
fn is_file_system_root(url: &str) -> bool {
let Some(path) = url.strip_prefix("file:///") else {
return false;
};
if cfg!(windows) {
let Some((_drive, path)) = path.split_once('/') else {
return true;
};
path.is_empty()
} else {
path.is_empty()
}
}
let mut found_dir = starting_dir.as_str();
if !is_file_system_root(found_dir) {
for specifier in specifiers {
if specifier.scheme() == "file" {
found_dir = select_common_root(found_dir, specifier.as_str());
}
}
}
let found_dir = if is_file_system_root(found_dir) {
found_dir
} else {
// include the parent dir name because it helps create some context
found_dir
.strip_suffix('/')
.unwrap_or(found_dir)
.rfind('/')
.map(|i| &found_dir[..i + 1])
.unwrap_or(found_dir)
};
ModuleSpecifier::parse(found_dir).unwrap()
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
pub use super::*; pub use super::*;
@ -454,38 +432,4 @@ mod test {
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe"); run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2"); run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
} }
#[test]
fn test_resolve_root_dir_from_specifiers() {
fn resolve(start: &str, specifiers: &[&str]) -> String {
let specifiers = specifiers
.iter()
.map(|s| ModuleSpecifier::parse(s).unwrap())
.collect::<Vec<_>>();
resolve_root_dir_from_specifiers(
&ModuleSpecifier::parse(start).unwrap(),
specifiers.iter(),
)
.to_string()
}
assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
assert_eq!(
resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
"file:///a/b/"
);
assert_eq!(
resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
"file:///a/b/"
);
assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
if cfg!(windows) {
assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
// this will ignore the other one because it's on a separate drive
assert_eq!(
resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
"file:///c:/a/b/"
);
}
}
} }

View file

@ -131,7 +131,11 @@ pub async fn doc(
|_| true, |_| true,
)?; )?;
let graph = module_graph_creator let graph = module_graph_creator
.create_graph(GraphKind::TypesOnly, module_specifiers.clone()) .create_graph(
GraphKind::TypesOnly,
module_specifiers.clone(),
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?; .await?;
graph_exit_integrity_errors(&graph); graph_exit_integrity_errors(&graph);

View file

@ -2,7 +2,6 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fmt;
use std::fmt::Write; use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
@ -35,6 +34,7 @@ use crate::graph_util::graph_exit_integrity_errors;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::ManagedCliNpmResolver; use crate::npm::ManagedCliNpmResolver;
use crate::util::checksum; use crate::util::checksum;
use crate::util::display::DisplayTreeNode;
const JSON_SCHEMA_VERSION: u8 = 1; const JSON_SCHEMA_VERSION: u8 = 1;
@ -123,7 +123,12 @@ pub async fn info(
let mut loader = module_graph_builder.create_graph_loader(); let mut loader = module_graph_builder.create_graph_loader();
loader.enable_loading_cache_info(); // for displaying the cache information loader.enable_loading_cache_info(); // for displaying the cache information
let graph = module_graph_creator let graph = module_graph_creator
.create_graph_with_loader(GraphKind::All, vec![specifier], &mut loader) .create_graph_with_loader(
GraphKind::All,
vec![specifier],
&mut loader,
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?; .await?;
// write out the lockfile if there is one // write out the lockfile if there is one
@ -337,76 +342,6 @@ fn add_npm_packages_to_json(
json.insert("npmPackages".to_string(), json_packages.into()); json.insert("npmPackages".to_string(), json_packages.into());
} }
struct TreeNode {
text: String,
children: Vec<TreeNode>,
}
impl TreeNode {
pub fn from_text(text: String) -> Self {
Self {
text,
children: Default::default(),
}
}
}
fn print_tree_node<TWrite: Write>(
tree_node: &TreeNode,
writer: &mut TWrite,
) -> fmt::Result {
fn print_children<TWrite: Write>(
writer: &mut TWrite,
prefix: &str,
children: &[TreeNode],
) -> fmt::Result {
const SIBLING_CONNECTOR: char = '├';
const LAST_SIBLING_CONNECTOR: char = '└';
const CHILD_DEPS_CONNECTOR: char = '┬';
const CHILD_NO_DEPS_CONNECTOR: char = '─';
const VERTICAL_CONNECTOR: char = '│';
const EMPTY_CONNECTOR: char = ' ';
let child_len = children.len();
for (index, child) in children.iter().enumerate() {
let is_last = index + 1 == child_len;
let sibling_connector = if is_last {
LAST_SIBLING_CONNECTOR
} else {
SIBLING_CONNECTOR
};
let child_connector = if child.children.is_empty() {
CHILD_NO_DEPS_CONNECTOR
} else {
CHILD_DEPS_CONNECTOR
};
writeln!(
writer,
"{} {}",
colors::gray(format!("{prefix}{sibling_connector}{child_connector}")),
child.text
)?;
let child_prefix = format!(
"{}{}{}",
prefix,
if is_last {
EMPTY_CONNECTOR
} else {
VERTICAL_CONNECTOR
},
EMPTY_CONNECTOR
);
print_children(writer, &child_prefix, &child.children)?;
}
Ok(())
}
writeln!(writer, "{}", tree_node.text)?;
print_children(writer, "", &tree_node.children)?;
Ok(())
}
/// Precached information about npm packages that are used in deno info. /// Precached information about npm packages that are used in deno info.
#[derive(Default)] #[derive(Default)]
struct NpmInfo { struct NpmInfo {
@ -563,7 +498,7 @@ impl<'a> GraphDisplayContext<'a> {
)?; )?;
writeln!(writer)?; writeln!(writer)?;
let root_node = self.build_module_info(root, false); let root_node = self.build_module_info(root, false);
print_tree_node(&root_node, writer)?; root_node.print(writer)?;
Ok(()) Ok(())
} }
Err(err) => { Err(err) => {
@ -579,7 +514,7 @@ impl<'a> GraphDisplayContext<'a> {
} }
} }
fn build_dep_info(&mut self, dep: &Dependency) -> Vec<TreeNode> { fn build_dep_info(&mut self, dep: &Dependency) -> Vec<DisplayTreeNode> {
let mut children = Vec::with_capacity(2); let mut children = Vec::with_capacity(2);
if !dep.maybe_code.is_none() { if !dep.maybe_code.is_none() {
if let Some(child) = self.build_resolved_info(&dep.maybe_code, false) { if let Some(child) = self.build_resolved_info(&dep.maybe_code, false) {
@ -594,7 +529,11 @@ impl<'a> GraphDisplayContext<'a> {
children children
} }
fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode { fn build_module_info(
&mut self,
module: &Module,
type_dep: bool,
) -> DisplayTreeNode {
enum PackageOrSpecifier { enum PackageOrSpecifier {
Package(Box<NpmResolutionPackage>), Package(Box<NpmResolutionPackage>),
Specifier(ModuleSpecifier), Specifier(ModuleSpecifier),
@ -640,7 +579,7 @@ impl<'a> GraphDisplayContext<'a> {
format!("{} {}", header_text, maybe_size_to_text(maybe_size)) format!("{} {}", header_text, maybe_size_to_text(maybe_size))
}; };
let mut tree_node = TreeNode::from_text(header_text); let mut tree_node = DisplayTreeNode::from_text(header_text);
if !was_seen { if !was_seen {
match &package_or_specifier { match &package_or_specifier {
@ -678,14 +617,14 @@ impl<'a> GraphDisplayContext<'a> {
fn build_npm_deps( fn build_npm_deps(
&mut self, &mut self,
package: &NpmResolutionPackage, package: &NpmResolutionPackage,
) -> Vec<TreeNode> { ) -> Vec<DisplayTreeNode> {
let mut deps = package.dependencies.values().collect::<Vec<_>>(); let mut deps = package.dependencies.values().collect::<Vec<_>>();
deps.sort(); deps.sort();
let mut children = Vec::with_capacity(deps.len()); let mut children = Vec::with_capacity(deps.len());
for dep_id in deps.into_iter() { for dep_id in deps.into_iter() {
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned(); let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
let size_str = maybe_size_to_text(maybe_size); let size_str = maybe_size_to_text(maybe_size);
let mut child = TreeNode::from_text(format!( let mut child = DisplayTreeNode::from_text(format!(
"npm:/{} {}", "npm:/{} {}",
dep_id.as_serialized(), dep_id.as_serialized(),
size_str size_str
@ -710,7 +649,7 @@ impl<'a> GraphDisplayContext<'a> {
&mut self, &mut self,
err: &ModuleError, err: &ModuleError,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> TreeNode { ) -> DisplayTreeNode {
self.seen.insert(specifier.to_string()); self.seen.insert(specifier.to_string());
match err { match err {
ModuleError::InvalidTypeAssertion { .. } => { ModuleError::InvalidTypeAssertion { .. } => {
@ -753,8 +692,8 @@ impl<'a> GraphDisplayContext<'a> {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
error_msg: &str, error_msg: &str,
) -> TreeNode { ) -> DisplayTreeNode {
TreeNode::from_text(format!( DisplayTreeNode::from_text(format!(
"{} {}", "{} {}",
colors::red(specifier), colors::red(specifier),
colors::red_bold(error_msg) colors::red_bold(error_msg)
@ -765,7 +704,7 @@ impl<'a> GraphDisplayContext<'a> {
&mut self, &mut self,
resolution: &Resolution, resolution: &Resolution,
type_dep: bool, type_dep: bool,
) -> Option<TreeNode> { ) -> Option<DisplayTreeNode> {
match resolution { match resolution {
Resolution::Ok(resolved) => { Resolution::Ok(resolved) => {
let specifier = &resolved.specifier; let specifier = &resolved.specifier;
@ -773,14 +712,14 @@ impl<'a> GraphDisplayContext<'a> {
Some(match self.graph.try_get(resolved_specifier) { Some(match self.graph.try_get(resolved_specifier) {
Ok(Some(module)) => self.build_module_info(module, type_dep), Ok(Some(module)) => self.build_module_info(module, type_dep),
Err(err) => self.build_error_info(err, resolved_specifier), Err(err) => self.build_error_info(err, resolved_specifier),
Ok(None) => TreeNode::from_text(format!( Ok(None) => DisplayTreeNode::from_text(format!(
"{} {}", "{} {}",
colors::red(specifier), colors::red(specifier),
colors::red_bold("(missing)") colors::red_bold("(missing)")
)), )),
}) })
} }
Resolution::Err(err) => Some(TreeNode::from_text(format!( Resolution::Err(err) => Some(DisplayTreeNode::from_text(format!(
"{} {}", "{} {}",
colors::italic(err.to_string()), colors::italic(err.to_string()),
colors::red_bold("(resolve error)") colors::red_bold("(resolve error)")

View file

@ -9,7 +9,6 @@ use crate::args::Flags;
use crate::args::InstallFlags; use crate::args::InstallFlags;
use crate::args::InstallFlagsGlobal; use crate::args::InstallFlagsGlobal;
use crate::args::InstallFlagsLocal; use crate::args::InstallFlagsLocal;
use crate::args::InstallKind;
use crate::args::TypeCheckMode; use crate::args::TypeCheckMode;
use crate::args::UninstallFlags; use crate::args::UninstallFlags;
use crate::args::UninstallKind; use crate::args::UninstallKind;
@ -339,11 +338,11 @@ pub async fn install_command(
flags: Arc<Flags>, flags: Arc<Flags>,
install_flags: InstallFlags, install_flags: InstallFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
match install_flags.kind { match install_flags {
InstallKind::Global(global_flags) => { InstallFlags::Global(global_flags) => {
install_global(flags, global_flags).await install_global(flags, global_flags).await
} }
InstallKind::Local(local_flags) => { InstallFlags::Local(local_flags) => {
if let InstallFlagsLocal::Add(add_flags) = &local_flags { if let InstallFlagsLocal::Add(add_flags) = &local_flags {
check_if_installs_a_single_package_globally(Some(add_flags))?; check_if_installs_a_single_package_globally(Some(add_flags))?;
} }

View file

@ -556,3 +556,68 @@ struct LintError {
file_path: String, file_path: String,
message: String, message: String,
} }
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use serde::Deserialize;
use test_util as util;
#[derive(Serialize, Deserialize)]
struct RulesSchema {
#[serde(rename = "$schema")]
schema: String,
#[serde(rename = "enum")]
rules: Vec<String>,
}
fn get_all_rules() -> Vec<String> {
let rule_provider = LintRuleProvider::new(None, None);
let configured_rules =
rule_provider.resolve_lint_rules(Default::default(), None);
let mut all_rules = configured_rules
.all_rule_codes
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>();
all_rules.sort();
all_rules
}
// TODO(bartlomieju): do the same for tags, once https://github.com/denoland/deno/pull/27162 lands
#[test]
fn all_lint_rules_are_listed_in_schema_file() {
let all_rules = get_all_rules();
let rules_schema_path =
util::root_path().join("cli/schemas/lint-rules.v1.json");
let rules_schema_file =
std::fs::read_to_string(&rules_schema_path).unwrap();
let schema: RulesSchema = serde_json::from_str(&rules_schema_file).unwrap();
const UPDATE_ENV_VAR_NAME: &str = "UPDATE_EXPECTED";
if std::env::var(UPDATE_ENV_VAR_NAME).ok().is_none() {
assert_eq!(
schema.rules, all_rules,
"Lint rules schema file not up to date. Run again with {}=1 to update the expected output",
UPDATE_ENV_VAR_NAME
);
return;
}
std::fs::write(
&rules_schema_path,
serde_json::to_string_pretty(&RulesSchema {
schema: schema.schema,
rules: all_rules,
})
.unwrap(),
)
.unwrap();
}
}

View file

@ -432,9 +432,8 @@ pub async fn add(
let mut package_reqs = Vec::with_capacity(add_flags.packages.len()); let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() { for entry_text in add_flags.packages.iter() {
let req = AddRmPackageReq::parse(entry_text).with_context(|| { let req = AddRmPackageReq::parse(entry_text)
format!("Failed to parse package required: {}", entry_text) .with_context(|| format!("Failed to parse package: {}", entry_text))?;
})?;
match req { match req {
Ok(add_req) => package_reqs.push(add_req), Ok(add_req) => package_reqs.push(add_req),
@ -805,9 +804,8 @@ pub async fn remove(
let mut removed_packages = vec![]; let mut removed_packages = vec![];
for package in &remove_flags.packages { for package in &remove_flags.packages {
let req = AddRmPackageReq::parse(package).with_context(|| { let req = AddRmPackageReq::parse(package)
format!("Failed to parse package required: {}", package) .with_context(|| format!("Failed to parse package: {}", package))?;
})?;
let mut parsed_pkg_name = None; let mut parsed_pkg_name = None;
for config in configs.iter_mut().flatten() { for config in configs.iter_mut().flatten() {
match &req { match &req {

View file

@ -6,6 +6,7 @@ use std::sync::Arc;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::graph_container::ModuleGraphContainer; use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
@ -18,18 +19,16 @@ pub async fn cache_top_level_deps(
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let npm_resolver = factory.npm_resolver().await?; let npm_resolver = factory.npm_resolver().await?;
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let root_permissions = factory.root_permissions_container()?;
if let Some(npm_resolver) = npm_resolver.as_managed() { if let Some(npm_resolver) = npm_resolver.as_managed() {
if !npm_resolver.ensure_top_level_package_json_install().await? { npm_resolver.ensure_top_level_package_json_install().await?;
if let Some(lockfile) = cli_options.maybe_lockfile() { if let Some(lockfile) = cli_options.maybe_lockfile() {
lockfile.error_if_changed()?; lockfile.error_if_changed()?;
}
npm_resolver.cache_packages().await?;
} }
} }
// cache as many entries in the import map as we can // cache as many entries in the import map as we can
let resolver = factory.workspace_resolver().await?; let resolver = factory.workspace_resolver().await?;
let mut maybe_graph_error = Ok(());
if let Some(import_map) = resolver.maybe_import_map() { if let Some(import_map) = resolver.maybe_import_map() {
let jsr_resolver = if let Some(resolver) = jsr_resolver { let jsr_resolver = if let Some(resolver) = jsr_resolver {
resolver resolver
@ -122,19 +121,29 @@ pub async fn cache_top_level_deps(
} }
drop(info_futures); drop(info_futures);
factory let graph_builder = factory.module_graph_builder().await?;
.module_load_preparer() graph_builder
.await? .build_graph_with_npm_resolution(
.prepare_module_load(
graph, graph,
&roots, CreateGraphOptions {
false, loader: None,
deno_config::deno_json::TsTypeLib::DenoWorker, graph_kind: graph.graph_kind(),
root_permissions.clone(), is_dynamic: false,
None, roots: roots.clone(),
npm_caching: crate::graph_util::NpmCachingStrategy::Manual,
},
) )
.await?; .await?;
maybe_graph_error = graph_builder.graph_roots_valid(graph, &roots);
}
if let Some(npm_resolver) = npm_resolver.as_managed() {
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
} }
maybe_graph_error?;
Ok(()) Ok(())
} }

View file

@ -2,6 +2,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use std::sync::Arc; use std::sync::Arc;
@ -11,6 +12,7 @@ use deno_config::deno_json::ConfigFileRc;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectory;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::future::try_join; use deno_core::futures::future::try_join;
use deno_core::futures::stream::FuturesOrdered; use deno_core::futures::stream::FuturesOrdered;
@ -43,10 +45,10 @@ use crate::npm::NpmFetchResolver;
use super::ConfigUpdater; use super::ConfigUpdater;
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum ImportMapKind { pub enum ImportMapKind {
Inline, Inline,
Outline, Outline(PathBuf),
} }
#[derive(Clone)] #[derive(Clone)]
@ -62,9 +64,12 @@ impl DepLocation {
pub fn file_path(&self) -> Cow<std::path::Path> { pub fn file_path(&self) -> Cow<std::path::Path> {
match self { match self {
DepLocation::DenoJson(arc, _, _) => { DepLocation::DenoJson(arc, _, kind) => match kind {
Cow::Owned(arc.specifier.to_file_path().unwrap()) ImportMapKind::Inline => {
} Cow::Owned(arc.specifier.to_file_path().unwrap())
}
ImportMapKind::Outline(path) => Cow::Borrowed(path.as_path()),
},
DepLocation::PackageJson(arc, _) => Cow::Borrowed(arc.path.as_ref()), DepLocation::PackageJson(arc, _) => Cow::Borrowed(arc.path.as_ref()),
} }
} }
@ -238,22 +243,30 @@ fn to_import_map_value_from_imports(
fn deno_json_import_map( fn deno_json_import_map(
deno_json: &ConfigFile, deno_json: &ConfigFile,
) -> Result<Option<(ImportMapWithDiagnostics, ImportMapKind)>, AnyError> { ) -> Result<Option<(ImportMapWithDiagnostics, ImportMapKind)>, AnyError> {
let (value, kind) = let (value, kind) = if deno_json.json.imports.is_some()
if deno_json.json.imports.is_some() || deno_json.json.scopes.is_some() { || deno_json.json.scopes.is_some()
( {
to_import_map_value_from_imports(deno_json), (
ImportMapKind::Inline, to_import_map_value_from_imports(deno_json),
) ImportMapKind::Inline,
} else { )
match deno_json.to_import_map_path()? { } else {
Some(path) => { match deno_json.to_import_map_path()? {
let text = std::fs::read_to_string(&path)?; Some(path) => {
let value = serde_json::from_str(&text)?; let err_context = || {
(value, ImportMapKind::Outline) format!(
} "loading import map at '{}' (from \"importMap\" field in '{}')",
None => return Ok(None), path.display(),
deno_json.specifier
)
};
let text = std::fs::read_to_string(&path).with_context(err_context)?;
let value = serde_json::from_str(&text).with_context(err_context)?;
(value, ImportMapKind::Outline(path))
} }
}; None => return Ok(None),
}
};
import_map::parse_from_value(deno_json.specifier.clone(), value) import_map::parse_from_value(deno_json.specifier.clone(), value)
.map_err(Into::into) .map_err(Into::into)
@ -303,7 +316,7 @@ fn add_deps_from_deno_json(
location: DepLocation::DenoJson( location: DepLocation::DenoJson(
deno_json.clone(), deno_json.clone(),
key_path, key_path,
import_map_kind, import_map_kind.clone(),
), ),
kind, kind,
req, req,
@ -747,11 +760,7 @@ impl DepManager {
let dep = &mut self.deps[dep_id.0]; let dep = &mut self.deps[dep_id.0];
dep.req.version_req = version_req.clone(); dep.req.version_req = version_req.clone();
match &dep.location { match &dep.location {
DepLocation::DenoJson(arc, key_path, import_map_kind) => { DepLocation::DenoJson(arc, key_path, _) => {
if matches!(import_map_kind, ImportMapKind::Outline) {
// not supported
continue;
}
let updater = let updater =
get_or_create_updater(&mut config_updaters, &dep.location)?; get_or_create_updater(&mut config_updaters, &dep.location)?;

View file

@ -3,6 +3,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
@ -100,6 +101,23 @@ fn print_outdated_table(packages: &[OutdatedPackage]) {
println!("{package_fill}{current_fill}{update_fill}{latest_fill}",); println!("{package_fill}{current_fill}{update_fill}{latest_fill}",);
} }
fn print_suggestion(compatible: bool) {
log::info!("");
let (cmd, txt) = if compatible {
("", "compatible")
} else {
(" --latest", "available")
};
log::info!(
"{}",
color_print::cformat!(
"<p(245)>Run</> <u>deno outdated --update{}</> <p(245)>to update to the latest {} versions,</>\n<p(245)>or</> <u>deno outdated --help</> <p(245)>for more information.</>",
cmd,
txt,
)
);
}
fn print_outdated( fn print_outdated(
deps: &mut DepManager, deps: &mut DepManager,
compatible: bool, compatible: bool,
@ -148,6 +166,7 @@ fn print_outdated(
if !outdated.is_empty() { if !outdated.is_empty() {
outdated.sort(); outdated.sort();
print_outdated_table(&outdated); print_outdated_table(&outdated);
print_suggestion(compatible);
} }
Ok(()) Ok(())
@ -179,6 +198,15 @@ pub async fn outdated(
let jsr_fetch_resolver = let jsr_fetch_resolver =
Arc::new(JsrFetchResolver::new(file_fetcher.clone())); Arc::new(JsrFetchResolver::new(file_fetcher.clone()));
if !cli_options.start_dir.has_deno_json()
&& !cli_options.start_dir.has_pkg_json()
{
bail!(
"No deno.json or package.json in \"{}\".",
cli_options.initial_cwd().display(),
);
}
let args = dep_manager_args( let args = dep_manager_args(
&factory, &factory,
cli_options, cli_options,

View file

@ -727,7 +727,9 @@ impl ReplSession {
let has_node_specifier = let has_node_specifier =
resolved_imports.iter().any(|url| url.scheme() == "node"); resolved_imports.iter().any(|url| url.scheme() == "node");
if !npm_imports.is_empty() || has_node_specifier { if !npm_imports.is_empty() || has_node_specifier {
npm_resolver.add_package_reqs(&npm_imports).await?; npm_resolver
.add_and_cache_package_reqs(&npm_imports)
.await?;
// prevent messages in the repl about @types/node not being cached // prevent messages in the repl about @types/node not being cached
if has_node_specifier { if has_node_specifier {

View file

@ -198,13 +198,23 @@ pub async fn eval_command(
} }
pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> { pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> {
let cli_options = factory.cli_options()?;
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a managed node_modules directory // opted into using a managed node_modules directory
if factory.cli_options()?.node_modules_dir()? if cli_options.node_modules_dir()? == Some(NodeModulesDirMode::Auto) {
== Some(NodeModulesDirMode::Auto)
{
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() { if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?; let already_done =
npm_resolver.ensure_top_level_package_json_install().await?;
if !already_done
&& matches!(
cli_options.default_npm_caching_strategy(),
crate::graph_util::NpmCachingStrategy::Eager
)
{
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
} }
} }
Ok(()) Ok(())

View file

@ -440,6 +440,13 @@ impl<'a> TaskRunner<'a> {
kill_signal: KillSignal, kill_signal: KillSignal,
argv: &'a [String], argv: &'a [String],
) -> Result<i32, deno_core::anyhow::Error> { ) -> Result<i32, deno_core::anyhow::Error> {
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?;
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
let cwd = match &self.task_flags.cwd { let cwd = match &self.task_flags.cwd {
Some(path) => canonicalize_path(&PathBuf::from(path)) Some(path) => canonicalize_path(&PathBuf::from(path))
.context("failed canonicalizing --cwd")?, .context("failed canonicalizing --cwd")?,
@ -450,6 +457,7 @@ impl<'a> TaskRunner<'a> {
self.npm_resolver, self.npm_resolver,
self.node_resolver, self.node_resolver,
)?; )?;
self self
.run_single(RunSingleOptions { .run_single(RunSingleOptions {
task_name, task_name,
@ -473,6 +481,9 @@ impl<'a> TaskRunner<'a> {
// ensure the npm packages are installed if using a managed resolver // ensure the npm packages are installed if using a managed resolver
if let Some(npm_resolver) = self.npm_resolver.as_managed() { if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?; npm_resolver.ensure_top_level_package_json_install().await?;
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
} }
let cwd = match &self.task_flags.cwd { let cwd = match &self.task_flags.cwd {
@ -492,6 +503,7 @@ impl<'a> TaskRunner<'a> {
self.npm_resolver, self.npm_resolver,
self.node_resolver, self.node_resolver,
)?; )?;
for task_name in &task_names { for task_name in &task_names {
if let Some(script) = scripts.get(task_name) { if let Some(script) = scripts.get(task_name) {
let exit_code = self let exit_code = self

View file

@ -1716,7 +1716,11 @@ pub async fn run_tests_with_watch(
&cli_options.permissions_options(), &cli_options.permissions_options(),
)?; )?;
let graph = module_graph_creator let graph = module_graph_creator
.create_graph(graph_kind, test_modules) .create_graph(
graph_kind,
test_modules,
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?; .await?;
module_graph_creator.graph_valid(&graph)?; module_graph_creator.graph_valid(&graph)?;
let test_modules = &graph.roots; let test_modules = &graph.roots;

View file

@ -2,6 +2,7 @@
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_runtime::colors;
use std::io::Write; use std::io::Write;
/// A function that converts a float to a string the represents a human /// A function that converts a float to a string the represents a human
@ -85,6 +86,78 @@ where
Ok(()) Ok(())
} }
pub struct DisplayTreeNode {
pub text: String,
pub children: Vec<DisplayTreeNode>,
}
impl DisplayTreeNode {
pub fn from_text(text: String) -> Self {
Self {
text,
children: Default::default(),
}
}
pub fn print<TWrite: std::fmt::Write>(
&self,
writer: &mut TWrite,
) -> std::fmt::Result {
fn print_children<TWrite: std::fmt::Write>(
writer: &mut TWrite,
prefix: &str,
children: &[DisplayTreeNode],
) -> std::fmt::Result {
const SIBLING_CONNECTOR: char = '├';
const LAST_SIBLING_CONNECTOR: char = '└';
const CHILD_DEPS_CONNECTOR: char = '┬';
const CHILD_NO_DEPS_CONNECTOR: char = '─';
const VERTICAL_CONNECTOR: char = '│';
const EMPTY_CONNECTOR: char = ' ';
let child_len = children.len();
for (index, child) in children.iter().enumerate() {
let is_last = index + 1 == child_len;
let sibling_connector = if is_last {
LAST_SIBLING_CONNECTOR
} else {
SIBLING_CONNECTOR
};
let child_connector = if child.children.is_empty() {
CHILD_NO_DEPS_CONNECTOR
} else {
CHILD_DEPS_CONNECTOR
};
writeln!(
writer,
"{} {}",
colors::gray(format!(
"{prefix}{sibling_connector}─{child_connector}"
)),
child.text
)?;
let child_prefix = format!(
"{}{}{}",
prefix,
if is_last {
EMPTY_CONNECTOR
} else {
VERTICAL_CONNECTOR
},
EMPTY_CONNECTOR
);
print_children(writer, &child_prefix, &child.children)?;
}
Ok(())
}
writeln!(writer, "{}", self.text)?;
print_children(writer, "", &self.children)?;
Ok(())
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -1,24 +1,34 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use super::draw_thread::DrawThread;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig;
use std::io::Write; use std::io::Write;
use super::draw_thread::DrawThread; struct CliLogger {
otel_console_config: OtelConsoleConfig,
struct CliLogger(env_logger::Logger); logger: env_logger::Logger,
}
impl CliLogger { impl CliLogger {
pub fn new(logger: env_logger::Logger) -> Self { pub fn new(
Self(logger) logger: env_logger::Logger,
otel_console_config: OtelConsoleConfig,
) -> Self {
Self {
logger,
otel_console_config,
}
} }
pub fn filter(&self) -> log::LevelFilter { pub fn filter(&self) -> log::LevelFilter {
self.0.filter() self.logger.filter()
} }
} }
impl log::Log for CliLogger { impl log::Log for CliLogger {
fn enabled(&self, metadata: &log::Metadata) -> bool { fn enabled(&self, metadata: &log::Metadata) -> bool {
self.0.enabled(metadata) self.logger.enabled(metadata)
} }
fn log(&self, record: &log::Record) { fn log(&self, record: &log::Record) {
@ -28,18 +38,30 @@ impl log::Log for CliLogger {
// could potentially block other threads that access the draw // could potentially block other threads that access the draw
// thread's state // thread's state
DrawThread::hide(); DrawThread::hide();
self.0.log(record);
deno_telemetry::handle_log(record); match self.otel_console_config {
OtelConsoleConfig::Ignore => {
self.logger.log(record);
}
OtelConsoleConfig::Capture => {
self.logger.log(record);
deno_telemetry::handle_log(record);
}
OtelConsoleConfig::Replace => {
deno_telemetry::handle_log(record);
}
}
DrawThread::show(); DrawThread::show();
} }
} }
fn flush(&self) { fn flush(&self) {
self.0.flush(); self.logger.flush();
} }
} }
pub fn init(maybe_level: Option<log::Level>) { pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) {
let log_level = maybe_level.unwrap_or(log::Level::Info); let log_level = maybe_level.unwrap_or(log::Level::Info);
let logger = env_logger::Builder::from_env( let logger = env_logger::Builder::from_env(
env_logger::Env::new() env_logger::Env::new()
@ -93,7 +115,12 @@ pub fn init(maybe_level: Option<log::Level>) {
}) })
.build(); .build();
let cli_logger = CliLogger::new(logger); let cli_logger = CliLogger::new(
logger,
otel_config
.map(|c| c.console)
.unwrap_or(OtelConsoleConfig::Ignore),
);
let max_level = cli_logger.filter(); let max_level = cli_logger.filter();
let r = log::set_boxed_logger(Box::new(cli_logger)); let r = log::set_boxed_logger(Box::new(cli_logger));
if r.is_ok() { if r.is_ok() {

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt::Write;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -58,8 +59,8 @@ pub fn get_atomic_file_path(file_path: &Path) -> PathBuf {
} }
fn gen_rand_path_component() -> String { fn gen_rand_path_component() -> String {
(0..4).fold(String::new(), |mut output, _| { (0..4).fold(String::with_capacity(8), |mut output, _| {
output.push_str(&format!("{:02x}", rand::random::<u8>())); write!(&mut output, "{:02x}", rand::random::<u8>()).unwrap();
output output
}) })
} }

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::fmt::Write;
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
use std::time::Duration; use std::time::Duration;
@ -81,12 +82,14 @@ impl ProgressBarRenderer for BarProgressBarRenderer {
let elapsed_text = get_elapsed_text(data.duration); let elapsed_text = get_elapsed_text(data.duration);
let mut text = String::new(); let mut text = String::new();
if !display_entry.message.is_empty() { if !display_entry.message.is_empty() {
text.push_str(&format!( writeln!(
"{} {}{}\n", &mut text,
"{} {}{}",
colors::green("Download"), colors::green("Download"),
display_entry.message, display_entry.message,
bytes_text, bytes_text,
)); )
.unwrap();
} }
text.push_str(&elapsed_text); text.push_str(&elapsed_text);
let max_width = (data.terminal_width as i32 - 5).clamp(10, 75) as usize; let max_width = (data.terminal_width as i32 - 5).clamp(10, 75) as usize;

View file

@ -50,6 +50,7 @@ use tokio::select;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::NpmCachingStrategy;
use crate::args::StorageKeyResolver; use crate::args::StorageKeyResolver;
use crate::errors; use crate::errors;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
@ -153,7 +154,8 @@ struct SharedWorkerState {
storage_key_resolver: StorageKeyResolver, storage_key_resolver: StorageKeyResolver,
options: CliMainWorkerOptions, options: CliMainWorkerOptions,
subcommand: DenoSubcommand, subcommand: DenoSubcommand,
otel_config: Option<OtelConfig>, // `None` means OpenTelemetry is disabled. otel_config: OtelConfig,
default_npm_caching_strategy: NpmCachingStrategy,
} }
impl SharedWorkerState { impl SharedWorkerState {
@ -424,7 +426,8 @@ impl CliMainWorkerFactory {
storage_key_resolver: StorageKeyResolver, storage_key_resolver: StorageKeyResolver,
subcommand: DenoSubcommand, subcommand: DenoSubcommand,
options: CliMainWorkerOptions, options: CliMainWorkerOptions,
otel_config: Option<OtelConfig>, otel_config: OtelConfig,
default_npm_caching_strategy: NpmCachingStrategy,
) -> Self { ) -> Self {
Self { Self {
shared: Arc::new(SharedWorkerState { shared: Arc::new(SharedWorkerState {
@ -448,6 +451,7 @@ impl CliMainWorkerFactory {
options, options,
subcommand, subcommand,
otel_config, otel_config,
default_npm_caching_strategy,
}), }),
} }
} }
@ -487,8 +491,19 @@ impl CliMainWorkerFactory {
NpmPackageReqReference::from_specifier(&main_module) NpmPackageReqReference::from_specifier(&main_module)
{ {
if let Some(npm_resolver) = shared.npm_resolver.as_managed() { if let Some(npm_resolver) = shared.npm_resolver.as_managed() {
let reqs = &[package_ref.req().clone()];
npm_resolver npm_resolver
.add_package_reqs(&[package_ref.req().clone()]) .add_package_reqs(
reqs,
if matches!(
shared.default_npm_caching_strategy,
NpmCachingStrategy::Lazy
) {
crate::npm::PackageCaching::Only(reqs.into())
} else {
crate::npm::PackageCaching::All
},
)
.await?; .await?;
} }

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_broadcast_channel" name = "deno_broadcast_channel"
version = "0.175.0" version = "0.176.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_cache" name = "deno_cache"
version = "0.113.0" version = "0.114.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_canvas" name = "deno_canvas"
version = "0.50.0" version = "0.51.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_console" name = "deno_console"
version = "0.181.0" version = "0.182.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_cron" name = "deno_cron"
version = "0.61.0" version = "0.62.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_crypto" name = "deno_crypto"
version = "0.195.0" version = "0.196.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_fetch" name = "deno_fetch"
version = "0.205.0" version = "0.206.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -206,9 +206,6 @@ pub enum FetchError {
RequestBuilderHook(deno_core::error::AnyError), RequestBuilderHook(deno_core::error::AnyError),
#[error(transparent)] #[error(transparent)]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
// Only used for node upgrade
#[error(transparent)]
Hyper(#[from] hyper::Error),
} }
pub type CancelableResponseFuture = pub type CancelableResponseFuture =

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_ffi" name = "deno_ffi"
version = "0.168.0" version = "0.169.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_fs" name = "deno_fs"
version = "0.91.0" version = "0.92.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_http" name = "deno_http"
version = "0.179.0" version = "0.180.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_io" name = "deno_io"
version = "0.91.0" version = "0.92.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_kv" name = "deno_kv"
version = "0.89.0" version = "0.90.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_napi" name = "deno_napi"
version = "0.112.0" version = "0.113.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "napi_sym" name = "napi_sym"
version = "0.111.0" version = "0.112.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_net" name = "deno_net"
version = "0.173.0" version = "0.174.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_node" name = "deno_node"
version = "0.118.0" version = "0.119.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -364,9 +364,9 @@ deno_core::extension!(deno_node,
ops::zlib::brotli::op_create_brotli_decompress, ops::zlib::brotli::op_create_brotli_decompress,
ops::zlib::brotli::op_brotli_decompress_stream, ops::zlib::brotli::op_brotli_decompress_stream,
ops::zlib::brotli::op_brotli_decompress_stream_end, ops::zlib::brotli::op_brotli_decompress_stream_end,
ops::http::op_node_http_request<P>,
ops::http::op_node_http_fetch_response_upgrade, ops::http::op_node_http_fetch_response_upgrade,
ops::http::op_node_http_fetch_send, ops::http::op_node_http_request_with_conn<P>,
ops::http::op_node_http_await_response,
ops::http2::op_http2_connect, ops::http2::op_http2_connect,
ops::http2::op_http2_poll_client_connection, ops::http2::op_http2_poll_client_connection,
ops::http2::op_http2_client_request, ops::http2::op_http2_client_request,

View file

@ -2,18 +2,20 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::fmt::Debug;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use std::task::Context; use std::task::Context;
use std::task::Poll; use std::task::Poll;
use bytes::Bytes; use bytes::Bytes;
use deno_core::error::bad_resource;
use deno_core::error::type_error;
use deno_core::futures::stream::Peekable; use deno_core::futures::stream::Peekable;
use deno_core::futures::Future; use deno_core::futures::Future;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::Stream; use deno_core::futures::Stream;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::futures::TryFutureExt;
use deno_core::op2; use deno_core::op2;
use deno_core::serde::Serialize; use deno_core::serde::Serialize;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
@ -25,17 +27,17 @@ use deno_core::ByteString;
use deno_core::CancelFuture; use deno_core::CancelFuture;
use deno_core::CancelHandle; use deno_core::CancelHandle;
use deno_core::CancelTryFuture; use deno_core::CancelTryFuture;
use deno_core::Canceled;
use deno_core::OpState; use deno_core::OpState;
use deno_core::RcRef; use deno_core::RcRef;
use deno_core::Resource; use deno_core::Resource;
use deno_core::ResourceId; use deno_core::ResourceId;
use deno_fetch::get_or_create_client_from_state;
use deno_fetch::FetchCancelHandle; use deno_fetch::FetchCancelHandle;
use deno_fetch::FetchError;
use deno_fetch::FetchRequestResource;
use deno_fetch::FetchReturn; use deno_fetch::FetchReturn;
use deno_fetch::HttpClientResource;
use deno_fetch::ResBody; use deno_fetch::ResBody;
use deno_net::io::TcpStreamResource;
use deno_net::ops_tls::TlsStreamResource;
use deno_permissions::PermissionCheckError;
use http::header::HeaderMap; use http::header::HeaderMap;
use http::header::HeaderName; use http::header::HeaderName;
use http::header::HeaderValue; use http::header::HeaderValue;
@ -44,41 +46,140 @@ use http::header::CONTENT_LENGTH;
use http::Method; use http::Method;
use http_body_util::BodyExt; use http_body_util::BodyExt;
use hyper::body::Frame; use hyper::body::Frame;
use hyper::body::Incoming;
use hyper_util::rt::TokioIo; use hyper_util::rt::TokioIo;
use std::cmp::min; use std::cmp::min;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
#[op2(stack_trace)] #[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct NodeHttpResponse {
pub status: u16,
pub status_text: String,
pub headers: Vec<(ByteString, ByteString)>,
pub url: String,
pub response_rid: ResourceId,
pub content_length: Option<u64>,
pub remote_addr_ip: Option<String>,
pub remote_addr_port: Option<u16>,
pub error: Option<String>,
}
type CancelableResponseResult =
Result<Result<http::Response<Incoming>, hyper::Error>, Canceled>;
pub struct NodeHttpClientResponse {
response: Pin<Box<dyn Future<Output = CancelableResponseResult>>>,
url: String,
}
impl Debug for NodeHttpClientResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("NodeHttpClientResponse")
.field("url", &self.url)
.finish()
}
}
impl deno_core::Resource for NodeHttpClientResponse {
fn name(&self) -> Cow<str> {
"nodeHttpClientResponse".into()
}
}
#[derive(Debug, thiserror::Error)]
pub enum ConnError {
#[error(transparent)]
Resource(deno_core::error::AnyError),
#[error(transparent)]
Permission(#[from] PermissionCheckError),
#[error("Invalid URL {0}")]
InvalidUrl(Url),
#[error(transparent)]
InvalidHeaderName(#[from] http::header::InvalidHeaderName),
#[error(transparent)]
InvalidHeaderValue(#[from] http::header::InvalidHeaderValue),
#[error(transparent)]
Url(#[from] url::ParseError),
#[error(transparent)]
Method(#[from] http::method::InvalidMethod),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error("TLS stream is currently in use")]
TlsStreamBusy,
#[error("TCP stream is currently in use")]
TcpStreamBusy,
#[error(transparent)]
ReuniteTcp(#[from] tokio::net::tcp::ReuniteError),
#[error(transparent)]
Canceled(#[from] deno_core::Canceled),
#[error(transparent)]
Hyper(#[from] hyper::Error),
}
#[op2(async, stack_trace)]
#[serde] #[serde]
pub fn op_node_http_request<P>( pub async fn op_node_http_request_with_conn<P>(
state: &mut OpState, state: Rc<RefCell<OpState>>,
#[serde] method: ByteString, #[serde] method: ByteString,
#[string] url: String, #[string] url: String,
#[serde] headers: Vec<(ByteString, ByteString)>, #[serde] headers: Vec<(ByteString, ByteString)>,
#[smi] client_rid: Option<u32>,
#[smi] body: Option<ResourceId>, #[smi] body: Option<ResourceId>,
) -> Result<FetchReturn, FetchError> #[smi] conn_rid: ResourceId,
encrypted: bool,
) -> Result<FetchReturn, ConnError>
where where
P: crate::NodePermissions + 'static, P: crate::NodePermissions + 'static,
{ {
let client = if let Some(rid) = client_rid { let (_handle, mut sender) = if encrypted {
let r = state let resource_rc = state
.borrow_mut()
.resource_table .resource_table
.get::<HttpClientResource>(rid) .take::<TlsStreamResource>(conn_rid)
.map_err(FetchError::Resource)?; .map_err(ConnError::Resource)?;
r.client.clone() let resource =
Rc::try_unwrap(resource_rc).map_err(|_e| ConnError::TlsStreamBusy)?;
let (read_half, write_half) = resource.into_inner();
let tcp_stream = read_half.unsplit(write_half);
let io = TokioIo::new(tcp_stream);
let (sender, conn) = hyper::client::conn::http1::handshake(io).await?;
(
tokio::task::spawn(async move { conn.with_upgrades().await }),
sender,
)
} else { } else {
get_or_create_client_from_state(state)? let resource_rc = state
.borrow_mut()
.resource_table
.take::<TcpStreamResource>(conn_rid)
.map_err(ConnError::Resource)?;
let resource =
Rc::try_unwrap(resource_rc).map_err(|_| ConnError::TcpStreamBusy)?;
let (read_half, write_half) = resource.into_inner();
let tcp_stream = read_half.reunite(write_half)?;
let io = TokioIo::new(tcp_stream);
let (sender, conn) = hyper::client::conn::http1::handshake(io).await?;
// Spawn a task to poll the connection, driving the HTTP state
(
tokio::task::spawn(async move {
conn.with_upgrades().await?;
Ok::<_, _>(())
}),
sender,
)
}; };
// Create the request.
let method = Method::from_bytes(&method)?; let method = Method::from_bytes(&method)?;
let mut url = Url::parse(&url)?; let mut url_parsed = Url::parse(&url)?;
let maybe_authority = deno_fetch::extract_authority(&mut url); let maybe_authority = deno_fetch::extract_authority(&mut url_parsed);
{ {
let permissions = state.borrow_mut::<P>(); let mut state_ = state.borrow_mut();
permissions.check_net_url(&url, "ClientRequest")?; let permissions = state_.borrow_mut::<P>();
permissions.check_net_url(&url_parsed, "ClientRequest")?;
} }
let mut header_map = HeaderMap::new(); let mut header_map = HeaderMap::new();
@ -93,9 +194,10 @@ where
( (
BodyExt::boxed(NodeHttpResourceToBodyAdapter::new( BodyExt::boxed(NodeHttpResourceToBodyAdapter::new(
state state
.borrow_mut()
.resource_table .resource_table
.take_any(body) .take_any(body)
.map_err(FetchError::Resource)?, .map_err(ConnError::Resource)?,
)), )),
None, None,
) )
@ -117,10 +219,13 @@ where
let mut request = http::Request::new(body); let mut request = http::Request::new(body);
*request.method_mut() = method.clone(); *request.method_mut() = method.clone();
*request.uri_mut() = url let path = url_parsed.path();
.as_str() let query = url_parsed.query();
*request.uri_mut() = query
.map(|q| format!("{}?{}", path, q))
.unwrap_or_else(|| path.to_string())
.parse() .parse()
.map_err(|_| FetchError::InvalidUrl(url.clone()))?; .map_err(|_| ConnError::InvalidUrl(url_parsed.clone()))?;
*request.headers_mut() = header_map; *request.headers_mut() = header_map;
if let Some((username, password)) = maybe_authority { if let Some((username, password)) = maybe_authority {
@ -136,86 +241,44 @@ where
let cancel_handle = CancelHandle::new_rc(); let cancel_handle = CancelHandle::new_rc();
let cancel_handle_ = cancel_handle.clone(); let cancel_handle_ = cancel_handle.clone();
let fut = async move { let fut =
client async move { sender.send_request(request).or_cancel(cancel_handle_).await };
.send(request)
.map_err(Into::into)
.or_cancel(cancel_handle_)
.await
};
let request_rid = state.resource_table.add(FetchRequestResource { let rid = state
future: Box::pin(fut), .borrow_mut()
url, .resource_table
}); .add(NodeHttpClientResponse {
response: Box::pin(fut),
url: url.clone(),
});
let cancel_handle_rid = let cancel_handle_rid = state
state.resource_table.add(FetchCancelHandle(cancel_handle)); .borrow_mut()
.resource_table
.add(FetchCancelHandle(cancel_handle));
Ok(FetchReturn { Ok(FetchReturn {
request_rid, request_rid: rid,
cancel_handle_rid: Some(cancel_handle_rid), cancel_handle_rid: Some(cancel_handle_rid),
}) })
} }
#[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct NodeHttpFetchResponse {
pub status: u16,
pub status_text: String,
pub headers: Vec<(ByteString, ByteString)>,
pub url: String,
pub response_rid: ResourceId,
pub content_length: Option<u64>,
pub remote_addr_ip: Option<String>,
pub remote_addr_port: Option<u16>,
pub error: Option<String>,
}
#[op2(async)] #[op2(async)]
#[serde] #[serde]
pub async fn op_node_http_fetch_send( pub async fn op_node_http_await_response(
state: Rc<RefCell<OpState>>, state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId, #[smi] rid: ResourceId,
) -> Result<NodeHttpFetchResponse, FetchError> { ) -> Result<NodeHttpResponse, ConnError> {
let request = state let resource = state
.borrow_mut() .borrow_mut()
.resource_table .resource_table
.take::<FetchRequestResource>(rid) .take::<NodeHttpClientResponse>(rid)
.map_err(FetchError::Resource)?; .map_err(ConnError::Resource)?;
let resource = Rc::try_unwrap(resource)
let request = Rc::try_unwrap(request) .map_err(|_| ConnError::Resource(bad_resource("NodeHttpClientResponse")))?;
.ok()
.expect("multiple op_node_http_fetch_send ongoing");
let res = match request.future.await {
Ok(Ok(res)) => res,
Ok(Err(err)) => {
// We're going to try and rescue the error cause from a stream and return it from this fetch.
// If any error in the chain is a hyper body error, return that as a special result we can use to
// reconstruct an error chain (eg: `new TypeError(..., { cause: new Error(...) })`).
// TODO(mmastrac): it would be a lot easier if we just passed a v8::Global through here instead
if let FetchError::ClientSend(err_src) = &err {
if let Some(client_err) = std::error::Error::source(&err_src.source) {
if let Some(err_src) = client_err.downcast_ref::<hyper::Error>() {
if let Some(err_src) = std::error::Error::source(err_src) {
return Ok(NodeHttpFetchResponse {
error: Some(err_src.to_string()),
..Default::default()
});
}
}
}
}
return Err(err);
}
Err(_) => return Err(FetchError::RequestCanceled),
};
let res = resource.response.await??;
let status = res.status(); let status = res.status();
let url = request.url.into();
let mut res_headers = Vec::new(); let mut res_headers = Vec::new();
for (key, val) in res.headers().iter() { for (key, val) in res.headers().iter() {
res_headers.push((key.as_str().into(), val.as_bytes().into())); res_headers.push((key.as_str().into(), val.as_bytes().into()));
@ -232,16 +295,22 @@ pub async fn op_node_http_fetch_send(
(None, None) (None, None)
}; };
let (parts, body) = res.into_parts();
let body = body.map_err(deno_core::anyhow::Error::from);
let body = body.boxed();
let res = http::Response::from_parts(parts, body);
let response_rid = state let response_rid = state
.borrow_mut() .borrow_mut()
.resource_table .resource_table
.add(NodeHttpFetchResponseResource::new(res, content_length)); .add(NodeHttpResponseResource::new(res, content_length));
Ok(NodeHttpFetchResponse { Ok(NodeHttpResponse {
status: status.as_u16(), status: status.as_u16(),
status_text: status.canonical_reason().unwrap_or("").to_string(), status_text: status.canonical_reason().unwrap_or("").to_string(),
headers: res_headers, headers: res_headers,
url, url: resource.url,
response_rid, response_rid,
content_length, content_length,
remote_addr_ip, remote_addr_ip,
@ -255,12 +324,12 @@ pub async fn op_node_http_fetch_send(
pub async fn op_node_http_fetch_response_upgrade( pub async fn op_node_http_fetch_response_upgrade(
state: Rc<RefCell<OpState>>, state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId, #[smi] rid: ResourceId,
) -> Result<ResourceId, FetchError> { ) -> Result<ResourceId, ConnError> {
let raw_response = state let raw_response = state
.borrow_mut() .borrow_mut()
.resource_table .resource_table
.take::<NodeHttpFetchResponseResource>(rid) .take::<NodeHttpResponseResource>(rid)
.map_err(FetchError::Resource)?; .map_err(ConnError::Resource)?;
let raw_response = Rc::try_unwrap(raw_response) let raw_response = Rc::try_unwrap(raw_response)
.expect("Someone is holding onto NodeHttpFetchResponseResource"); .expect("Someone is holding onto NodeHttpFetchResponseResource");
@ -283,7 +352,7 @@ pub async fn op_node_http_fetch_response_upgrade(
} }
read_tx.write_all(&buf[..read]).await?; read_tx.write_all(&buf[..read]).await?;
} }
Ok::<_, FetchError>(()) Ok::<_, ConnError>(())
}); });
spawn(async move { spawn(async move {
let mut buf = [0; 1024]; let mut buf = [0; 1024];
@ -294,7 +363,7 @@ pub async fn op_node_http_fetch_response_upgrade(
} }
upgraded_tx.write_all(&buf[..read]).await?; upgraded_tx.write_all(&buf[..read]).await?;
} }
Ok::<_, FetchError>(()) Ok::<_, ConnError>(())
}); });
} }
@ -379,13 +448,13 @@ impl Default for NodeHttpFetchResponseReader {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct NodeHttpFetchResponseResource { pub struct NodeHttpResponseResource {
pub response_reader: AsyncRefCell<NodeHttpFetchResponseReader>, pub response_reader: AsyncRefCell<NodeHttpFetchResponseReader>,
pub cancel: CancelHandle, pub cancel: CancelHandle,
pub size: Option<u64>, pub size: Option<u64>,
} }
impl NodeHttpFetchResponseResource { impl NodeHttpResponseResource {
pub fn new(response: http::Response<ResBody>, size: Option<u64>) -> Self { pub fn new(response: http::Response<ResBody>, size: Option<u64>) -> Self {
Self { Self {
response_reader: AsyncRefCell::new(NodeHttpFetchResponseReader::Start( response_reader: AsyncRefCell::new(NodeHttpFetchResponseReader::Start(
@ -400,14 +469,14 @@ impl NodeHttpFetchResponseResource {
let reader = self.response_reader.into_inner(); let reader = self.response_reader.into_inner();
match reader { match reader {
NodeHttpFetchResponseReader::Start(resp) => { NodeHttpFetchResponseReader::Start(resp) => {
Ok(hyper::upgrade::on(resp).await?) hyper::upgrade::on(resp).await
} }
_ => unreachable!(), _ => unreachable!(),
} }
} }
} }
impl Resource for NodeHttpFetchResponseResource { impl Resource for NodeHttpResponseResource {
fn name(&self) -> Cow<str> { fn name(&self) -> Cow<str> {
"fetchResponse".into() "fetchResponse".into()
} }
@ -454,9 +523,7 @@ impl Resource for NodeHttpFetchResponseResource {
// safely call `await` on it without creating a race condition. // safely call `await` on it without creating a race condition.
Some(_) => match reader.as_mut().next().await.unwrap() { Some(_) => match reader.as_mut().next().await.unwrap() {
Ok(chunk) => assert!(chunk.is_empty()), Ok(chunk) => assert!(chunk.is_empty()),
Err(err) => { Err(err) => break Err(type_error(err.to_string())),
break Err(deno_core::error::type_error(err.to_string()))
}
}, },
None => break Ok(BufView::empty()), None => break Ok(BufView::empty()),
} }
@ -464,7 +531,7 @@ impl Resource for NodeHttpFetchResponseResource {
}; };
let cancel_handle = RcRef::map(self, |r| &r.cancel); let cancel_handle = RcRef::map(self, |r| &r.cancel);
fut.try_or_cancel(cancel_handle).await.map_err(Into::into) fut.try_or_cancel(cancel_handle).await
}) })
} }
@ -514,8 +581,9 @@ impl Stream for NodeHttpResourceToBodyAdapter {
Poll::Ready(res) => match res { Poll::Ready(res) => match res {
Ok(buf) if buf.is_empty() => Poll::Ready(None), Ok(buf) if buf.is_empty() => Poll::Ready(None),
Ok(buf) => { Ok(buf) => {
let bytes: Bytes = buf.to_vec().into();
this.1 = Some(this.0.clone().read(64 * 1024)); this.1 = Some(this.0.clone().read(64 * 1024));
Poll::Ready(Some(Ok(buf.to_vec().into()))) Poll::Ready(Some(Ok(bytes)))
} }
Err(err) => Poll::Ready(Some(Err(err))), Err(err) => Poll::Ready(Some(Err(err))),
}, },

View file

@ -25,6 +25,17 @@ macro_rules! generate_builtin_node_module_lists {
// NOTE(bartlomieju): keep this list in sync with `ext/node/polyfills/01_require.js` // NOTE(bartlomieju): keep this list in sync with `ext/node/polyfills/01_require.js`
generate_builtin_node_module_lists! { generate_builtin_node_module_lists! {
"_http_agent",
"_http_common",
"_http_outgoing",
"_http_server",
"_stream_duplex",
"_stream_passthrough",
"_stream_readable",
"_stream_transform",
"_stream_writable",
"_tls_common",
"_tls_wrap",
"assert", "assert",
"assert/strict", "assert/strict",
"async_hooks", "async_hooks",

View file

@ -7,6 +7,7 @@ const {
SafeRegExp, SafeRegExp,
Symbol, Symbol,
} = primordials; } = primordials;
import { HTTPParser } from "ext:deno_node/internal_binding/http_parser.ts";
export const CRLF = "\r\n"; export const CRLF = "\r\n";
export const kIncomingMessage = Symbol("IncomingMessage"); export const kIncomingMessage = Symbol("IncomingMessage");
@ -79,6 +80,8 @@ export {
checkIsHttpToken as _checkIsHttpToken, checkIsHttpToken as _checkIsHttpToken,
}; };
export { HTTPParser };
export default { export default {
_checkInvalidHeaderChar: checkInvalidHeaderChar, _checkInvalidHeaderChar: checkInvalidHeaderChar,
_checkIsHttpToken: checkIsHttpToken, _checkIsHttpToken: checkIsHttpToken,
@ -87,4 +90,5 @@ export default {
continueExpression, continueExpression,
kIncomingMessage, kIncomingMessage,
methods, methods,
HTTPParser,
}; };

View file

@ -491,19 +491,53 @@ Object.defineProperties(
return ret; return ret;
}, },
/** Right after socket is ready, we need to writeHeader() to setup the request and
* client. This is invoked by onSocket(). */
_flushHeaders() {
if (!this._headerSent) {
this._headerSent = true;
this._writeHeader();
}
},
// deno-lint-ignore no-explicit-any // deno-lint-ignore no-explicit-any
_send(data: any, encoding?: string | null, callback?: () => void) { _send(data: any, encoding?: string | null, callback?: () => void) {
if (!this._headerSent && this._header !== null) { // if socket is ready, write the data after headers are written.
this._writeHeader(); // if socket is not ready, buffer data in outputbuffer.
this._headerSent = true; if (
this.socket && !this.socket.connecting && this.outputData.length === 0
) {
if (!this._headerSent) {
this._writeHeader();
this._headerSent = true;
}
return this._writeRaw(data, encoding, callback);
} else {
this.outputData.push({ data, encoding, callback });
} }
return this._writeRaw(data, encoding, callback); return false;
}, },
_writeHeader() { _writeHeader() {
throw new ERR_METHOD_NOT_IMPLEMENTED("_writeHeader()"); throw new ERR_METHOD_NOT_IMPLEMENTED("_writeHeader()");
}, },
_flushBuffer() {
const outputLength = this.outputData.length;
if (outputLength <= 0 || !this.socket || !this._bodyWriter) {
return undefined;
}
const { data, encoding, callback } = this.outputData.shift();
const ret = this._writeRaw(data, encoding, callback);
if (this.outputData.length > 0) {
this.once("drain", this._flushBuffer);
}
return ret;
},
_writeRaw( _writeRaw(
// deno-lint-ignore no-explicit-any // deno-lint-ignore no-explicit-any
data: any, data: any,
@ -517,11 +551,15 @@ Object.defineProperties(
data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
} }
if (data.buffer.byteLength > 0) { if (data.buffer.byteLength > 0) {
this._bodyWriter.write(data).then(() => { this._bodyWriter.ready.then(() => {
callback?.(); if (this._bodyWriter.desiredSize > 0) {
this.emit("drain"); this._bodyWriter.write(data).then(() => {
}).catch((e) => { callback?.();
this._requestSendError = e; this.emit("drain");
}).catch((e) => {
this._requestSendError = e;
});
}
}); });
} }
return false; return false;
@ -658,7 +696,6 @@ Object.defineProperties(
const { header } = state; const { header } = state;
this._header = header + "\r\n"; this._header = header + "\r\n";
this._headerSent = false;
// Wait until the first body chunk, or close(), is sent to flush, // Wait until the first body chunk, or close(), is sent to flush,
// UNLESS we're sending Expect: 100-continue. // UNLESS we're sending Expect: 100-continue.

View file

@ -154,6 +154,13 @@ export class TLSSocket extends net.Socket {
const afterConnect = handle.afterConnect; const afterConnect = handle.afterConnect;
handle.afterConnect = async (req: any, status: number) => { handle.afterConnect = async (req: any, status: number) => {
options.hostname ??= undefined; // coerce to undefined if null, startTls expects hostname to be undefined options.hostname ??= undefined; // coerce to undefined if null, startTls expects hostname to be undefined
if (tlssock._isNpmAgent) {
// skips the TLS handshake for @npmcli/agent as it's handled by
// onSocket handler of ClientRequest object.
tlssock.emit("secure");
tlssock.removeListener("end", onConnectEnd);
return afterConnect.call(handle, req, status);
}
try { try {
const conn = await Deno.startTls(handle[kStreamBaseField], options); const conn = await Deno.startTls(handle[kStreamBaseField], options);

View file

@ -5,16 +5,17 @@
import { core, primordials } from "ext:core/mod.js"; import { core, primordials } from "ext:core/mod.js";
import { import {
op_node_http_await_response,
op_node_http_fetch_response_upgrade, op_node_http_fetch_response_upgrade,
op_node_http_fetch_send, op_node_http_request_with_conn,
op_node_http_request, op_tls_start,
} from "ext:core/ops"; } from "ext:core/ops";
import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; import { TextEncoder } from "ext:deno_web/08_text_encoding.js";
import { setTimeout } from "ext:deno_web/02_timers.js"; import { setTimeout } from "ext:deno_web/02_timers.js";
import { import {
_normalizeArgs, _normalizeArgs,
// createConnection, createConnection,
ListenOptions, ListenOptions,
Socket, Socket,
} from "node:net"; } from "node:net";
@ -48,9 +49,10 @@ import { kOutHeaders } from "ext:deno_node/internal/http.ts";
import { _checkIsHttpToken as checkIsHttpToken } from "node:_http_common"; import { _checkIsHttpToken as checkIsHttpToken } from "node:_http_common";
import { Agent, globalAgent } from "node:_http_agent"; import { Agent, globalAgent } from "node:_http_agent";
import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { urlToHttpOptions } from "ext:deno_node/internal/url.ts";
import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; import { kEmptyObject, once } from "ext:deno_node/internal/util.mjs";
import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts"; import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts";
import { notImplemented, warnNotImplemented } from "ext:deno_node/_utils.ts"; import { kStreamBaseField } from "ext:deno_node/internal_binding/stream_wrap.ts";
import { notImplemented } from "ext:deno_node/_utils.ts";
import { import {
connResetException, connResetException,
ERR_HTTP_HEADERS_SENT, ERR_HTTP_HEADERS_SENT,
@ -62,7 +64,6 @@ import {
} from "ext:deno_node/internal/errors.ts"; } from "ext:deno_node/internal/errors.ts";
import { getTimerDuration } from "ext:deno_node/internal/timers.mjs"; import { getTimerDuration } from "ext:deno_node/internal/timers.mjs";
import { serve, upgradeHttpRaw } from "ext:deno_http/00_serve.ts"; import { serve, upgradeHttpRaw } from "ext:deno_http/00_serve.ts";
import { createHttpClient } from "ext:deno_fetch/22_http_client.js";
import { headersEntries } from "ext:deno_fetch/20_headers.js"; import { headersEntries } from "ext:deno_fetch/20_headers.js";
import { timerId } from "ext:deno_web/03_abort_signal.js"; import { timerId } from "ext:deno_web/03_abort_signal.js";
import { clearTimeout as webClearTimeout } from "ext:deno_web/02_timers.js"; import { clearTimeout as webClearTimeout } from "ext:deno_web/02_timers.js";
@ -148,6 +149,10 @@ class FakeSocket extends EventEmitter {
} }
} }
function emitErrorEvent(request, error) {
request.emit("error", error);
}
/** ClientRequest represents the http(s) request from the client */ /** ClientRequest represents the http(s) request from the client */
class ClientRequest extends OutgoingMessage { class ClientRequest extends OutgoingMessage {
defaultProtocol = "http:"; defaultProtocol = "http:";
@ -160,6 +165,8 @@ class ClientRequest extends OutgoingMessage {
useChunkedEncodingByDefault: boolean; useChunkedEncodingByDefault: boolean;
path: string; path: string;
_req: { requestRid: number; cancelHandleRid: number | null } | undefined; _req: { requestRid: number; cancelHandleRid: number | null } | undefined;
_encrypted = false;
socket: Socket;
constructor( constructor(
input: string | URL, input: string | URL,
@ -382,17 +389,11 @@ class ClientRequest extends OutgoingMessage {
delete optsWithoutSignal.signal; delete optsWithoutSignal.signal;
} }
if (options!.createConnection) {
warnNotImplemented("ClientRequest.options.createConnection");
}
if (options!.lookup) { if (options!.lookup) {
notImplemented("ClientRequest.options.lookup"); notImplemented("ClientRequest.options.lookup");
} }
// initiate connection if (this.agent) {
// TODO(crowlKats): finish this
/*if (this.agent) {
this.agent.addRequest(this, optsWithoutSignal); this.agent.addRequest(this, optsWithoutSignal);
} else { } else {
// No agent, default to Connection:close. // No agent, default to Connection:close.
@ -422,8 +423,7 @@ class ClientRequest extends OutgoingMessage {
debug("CLIENT use net.createConnection", optsWithoutSignal); debug("CLIENT use net.createConnection", optsWithoutSignal);
this.onSocket(createConnection(optsWithoutSignal)); this.onSocket(createConnection(optsWithoutSignal));
} }
}*/ }
this.onSocket(new FakeSocket({ encrypted: this._encrypted }));
} }
_writeHeader() { _writeHeader() {
@ -437,9 +437,6 @@ class ClientRequest extends OutgoingMessage {
} }
} }
const client = this._getClient() ?? createHttpClient({ http2: false });
this._client = client;
if ( if (
this.method === "POST" || this.method === "PATCH" || this.method === "PUT" this.method === "POST" || this.method === "PATCH" || this.method === "PUT"
) { ) {
@ -455,17 +452,29 @@ class ClientRequest extends OutgoingMessage {
this._bodyWriteRid = resourceForReadableStream(readable); this._bodyWriteRid = resourceForReadableStream(readable);
} }
this._req = op_node_http_request(
this.method,
url,
headers,
client[internalRidSymbol],
this._bodyWriteRid,
);
(async () => { (async () => {
try { try {
const res = await op_node_http_fetch_send(this._req.requestRid); const parsedUrl = new URL(url);
let baseConnRid =
this.socket._handle[kStreamBaseField][internalRidSymbol];
if (this._encrypted) {
[baseConnRid] = op_tls_start({
rid: baseConnRid,
hostname: parsedUrl.hostname,
caCerts: [],
alpnProtocols: ["http/1.0", "http/1.1"],
});
}
this._req = await op_node_http_request_with_conn(
this.method,
url,
headers,
this._bodyWriteRid,
baseConnRid,
this._encrypted,
);
this._flushBuffer();
const res = await op_node_http_await_response(this._req!.requestRid);
if (this._req.cancelHandleRid !== null) { if (this._req.cancelHandleRid !== null) {
core.tryClose(this._req.cancelHandleRid); core.tryClose(this._req.cancelHandleRid);
} }
@ -473,7 +482,6 @@ class ClientRequest extends OutgoingMessage {
this._timeout.removeEventListener("abort", this._timeoutCb); this._timeout.removeEventListener("abort", this._timeoutCb);
webClearTimeout(this._timeout[timerId]); webClearTimeout(this._timeout[timerId]);
} }
this._client.close();
const incoming = new IncomingMessageForClient(this.socket); const incoming = new IncomingMessageForClient(this.socket);
incoming.req = this; incoming.req = this;
this.res = incoming; this.res = incoming;
@ -512,12 +520,9 @@ class ClientRequest extends OutgoingMessage {
if (this.method === "CONNECT") { if (this.method === "CONNECT") {
throw new Error("not implemented CONNECT"); throw new Error("not implemented CONNECT");
} }
const upgradeRid = await op_node_http_fetch_response_upgrade( const upgradeRid = await op_node_http_fetch_response_upgrade(
res.responseRid, res.responseRid,
); );
assert(typeof res.remoteAddrIp !== "undefined");
assert(typeof res.remoteAddrIp !== "undefined");
const conn = new UpgradedConn( const conn = new UpgradedConn(
upgradeRid, upgradeRid,
{ {
@ -543,13 +548,11 @@ class ClientRequest extends OutgoingMessage {
this._closed = true; this._closed = true;
this.emit("close"); this.emit("close");
} else { } else {
{ incoming._bodyRid = res.responseRid;
incoming._bodyRid = res.responseRid;
}
this.emit("response", incoming); this.emit("response", incoming);
} }
} catch (err) { } catch (err) {
if (this._req.cancelHandleRid !== null) { if (this._req && this._req.cancelHandleRid !== null) {
core.tryClose(this._req.cancelHandleRid); core.tryClose(this._req.cancelHandleRid);
} }
@ -592,11 +595,54 @@ class ClientRequest extends OutgoingMessage {
return undefined; return undefined;
} }
// TODO(bartlomieju): handle error onSocket(socket, err) {
onSocket(socket, _err) {
nextTick(() => { nextTick(() => {
this.socket = socket; // deno-lint-ignore no-this-alias
this.emit("socket", socket); const req = this;
if (req.destroyed || err) {
req.destroyed = true;
// deno-lint-ignore no-inner-declarations
function _destroy(req, err) {
if (!req.aborted && !err) {
err = new connResetException("socket hang up");
}
if (err) {
emitErrorEvent(req, err);
}
req._closed = true;
req.emit("close");
}
if (socket) {
if (!err && req.agent && !socket.destroyed) {
socket.emit("free");
} else {
finished(socket.destroy(err || req[kError]), (er) => {
if (er?.code === "ERR_STREAM_PREMATURE_CLOSE") {
er = null;
}
_destroy(req, er || err);
});
return;
}
}
_destroy(req, err || req[kError]);
} else {
// Note: this code is specific to deno to initiate a request.
const onConnect = () => {
// Flush the internal buffers once socket is ready.
this._flushHeaders();
};
this.socket = socket;
this.emit("socket", socket);
if (socket.readyState === "opening") {
socket.on("connect", onConnect);
} else {
onConnect();
}
}
}); });
} }
@ -618,14 +664,20 @@ class ClientRequest extends OutgoingMessage {
if (chunk) { if (chunk) {
this.write_(chunk, encoding, null, true); this.write_(chunk, encoding, null, true);
} else if (!this._headerSent) { } else if (!this._headerSent) {
this._contentLength = 0; if (
this._implicitHeader(); (this.socket && !this.socket.connecting) || // socket is not connecting, or
this._send("", "latin1"); (!this.socket && this.outputData.length === 0) // no data to send
) {
this._contentLength = 0;
this._implicitHeader();
this._send("", "latin1");
}
} }
(async () => { const finish = async () => {
try { try {
await this._bodyWriter.ready;
await this._bodyWriter?.close(); await this._bodyWriter?.close();
} catch (_) { } catch {
// The readable stream resource is dropped right after // The readable stream resource is dropped right after
// read is complete closing the writable stream resource. // read is complete closing the writable stream resource.
// If we try to close the writer again, it will result in an // If we try to close the writer again, it will result in an
@ -633,10 +685,20 @@ class ClientRequest extends OutgoingMessage {
} }
try { try {
cb?.(); cb?.();
} catch (_) { } catch {
// //
} }
})(); };
if (this.socket && this._bodyWriter) {
finish();
} else {
this.on("drain", () => {
if (this.outputData.length === 0) {
finish();
}
});
}
return this; return this;
} }
@ -658,11 +720,6 @@ class ClientRequest extends OutgoingMessage {
} }
this.destroyed = true; this.destroyed = true;
const rid = this._client?.[internalRidSymbol];
if (rid) {
core.tryClose(rid);
}
// Request might be closed before we actually made it // Request might be closed before we actually made it
if (this._req !== undefined && this._req.cancelHandleRid !== null) { if (this._req !== undefined && this._req.cancelHandleRid !== null) {
core.tryClose(this._req.cancelHandleRid); core.tryClose(this._req.cancelHandleRid);

View file

@ -112,7 +112,7 @@ export const globalAgent = new Agent({
/** HttpsClientRequest class loosely follows http.ClientRequest class API. */ /** HttpsClientRequest class loosely follows http.ClientRequest class API. */
class HttpsClientRequest extends ClientRequest { class HttpsClientRequest extends ClientRequest {
override _encrypted: true; override _encrypted = true;
override defaultProtocol = "https:"; override defaultProtocol = "https:";
override _getClient(): Deno.HttpClient | undefined { override _getClient(): Deno.HttpClient | undefined {
if (caCerts === null) { if (caCerts === null) {

View file

@ -36,7 +36,6 @@ import {
} from "ext:deno_node/internal_binding/async_wrap.ts"; } from "ext:deno_node/internal_binding/async_wrap.ts";
import { ares_strerror } from "ext:deno_node/internal_binding/ares.ts"; import { ares_strerror } from "ext:deno_node/internal_binding/ares.ts";
import { notImplemented } from "ext:deno_node/_utils.ts"; import { notImplemented } from "ext:deno_node/_utils.ts";
import { isWindows } from "ext:deno_node/_util/os.ts";
interface LookupAddress { interface LookupAddress {
address: string; address: string;
@ -68,7 +67,7 @@ export function getaddrinfo(
_hints: number, _hints: number,
verbatim: boolean, verbatim: boolean,
): number { ): number {
let addresses: string[] = []; const addresses: string[] = [];
// TODO(cmorten): use hints // TODO(cmorten): use hints
// REF: https://nodejs.org/api/dns.html#dns_supported_getaddrinfo_flags // REF: https://nodejs.org/api/dns.html#dns_supported_getaddrinfo_flags
@ -107,13 +106,6 @@ export function getaddrinfo(
}); });
} }
// TODO(@bartlomieju): Forces IPv4 as a workaround for Deno not
// aligning with Node on implicit binding on Windows
// REF: https://github.com/denoland/deno/issues/10762
if (isWindows && hostname === "localhost") {
addresses = addresses.filter((address) => isIPv4(address));
}
req.oncomplete(error, addresses); req.oncomplete(error, addresses);
})(); })();

View file

@ -986,16 +986,20 @@ function _lookupAndConnect(
} else { } else {
self._unrefTimer(); self._unrefTimer();
defaultTriggerAsyncIdScope( defaultTriggerAsyncIdScope(self[asyncIdSymbol], nextTick, () => {
self[asyncIdSymbol], if (self.connecting) {
_internalConnect, defaultTriggerAsyncIdScope(
self, self[asyncIdSymbol],
ip, _internalConnect,
port, self,
addressType, ip,
localAddress, port,
localPort, addressType,
); localAddress,
localPort,
);
}
});
} }
}, },
); );
@ -1197,6 +1201,9 @@ export class Socket extends Duplex {
_host: string | null = null; _host: string | null = null;
// deno-lint-ignore no-explicit-any // deno-lint-ignore no-explicit-any
_parent: any = null; _parent: any = null;
// The flag for detecting if it's called in @npmcli/agent
// See discussions in https://github.com/denoland/deno/pull/25470 for more details.
_isNpmAgent = false;
autoSelectFamilyAttemptedAddresses: AddressInfo[] | undefined = undefined; autoSelectFamilyAttemptedAddresses: AddressInfo[] | undefined = undefined;
constructor(options: SocketOptions | number) { constructor(options: SocketOptions | number) {
@ -1217,6 +1224,19 @@ export class Socket extends Duplex {
super(options); super(options);
// Note: If the socket is created from @npmcli/agent, the 'socket' event
// on ClientRequest object happens after 'connect' event on Socket object.
// That swaps the sequence of op_node_http_request_with_conn() call and
// initial socket read. That causes op_node_http_request_with_conn() not
// working.
// To avoid the above situation, we detect the socket created from
// @npmcli/agent and pause the socket (and also skips the startTls call
// if it's TLSSocket)
this._isNpmAgent = new Error().stack?.includes("@npmcli/agent") || false;
if (this._isNpmAgent) {
this.pause();
}
if (options.handle) { if (options.handle) {
this._handle = options.handle; this._handle = options.handle;
this[asyncIdSymbol] = _getNewAsyncId(this._handle); this[asyncIdSymbol] = _getNewAsyncId(this._handle);

View file

@ -227,6 +227,7 @@ function arrayBufferViewTypeToIndex(abView: ArrayBufferView) {
// Index 10 is FastBuffer. // Index 10 is FastBuffer.
if (type === "[object BigInt64Array]") return 11; if (type === "[object BigInt64Array]") return 11;
if (type === "[object BigUint64Array]") return 12; if (type === "[object BigUint64Array]") return 12;
if (type === "[object Float16Array]") return 13;
return -1; return -1;
} }
export class DefaultSerializer extends Serializer { export class DefaultSerializer extends Serializer {
@ -276,6 +277,7 @@ function arrayBufferViewIndexToType(index: number): any {
if (index === 10) return Buffer; if (index === 10) return Buffer;
if (index === 11) return BigInt64Array; if (index === 11) return BigInt64Array;
if (index === 12) return BigUint64Array; if (index === 12) return BigUint64Array;
if (index === 13) return Float16Array;
return undefined; return undefined;
} }

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_telemetry" name = "deno_telemetry"
version = "0.3.0" version = "0.4.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -97,13 +97,28 @@ deno_core::extension!(
); );
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OtelConfig { pub struct OtelRuntimeConfig {
pub runtime_name: Cow<'static, str>, pub runtime_name: Cow<'static, str>,
pub runtime_version: Cow<'static, str>, pub runtime_version: Cow<'static, str>,
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct OtelConfig {
pub tracing_enabled: bool,
pub console: OtelConsoleConfig, pub console: OtelConsoleConfig,
pub deterministic: bool, pub deterministic: bool,
} }
impl OtelConfig {
pub fn as_v8(&self) -> Box<[u8]> {
Box::new([
self.tracing_enabled as u8,
self.console as u8,
self.deterministic as u8,
])
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[repr(u8)] #[repr(u8)]
pub enum OtelConsoleConfig { pub enum OtelConsoleConfig {
@ -112,14 +127,9 @@ pub enum OtelConsoleConfig {
Replace = 2, Replace = 2,
} }
impl Default for OtelConfig { impl Default for OtelConsoleConfig {
fn default() -> Self { fn default() -> Self {
Self { Self::Ignore
runtime_name: Cow::Borrowed(env!("CARGO_PKG_NAME")),
runtime_version: Cow::Borrowed(env!("CARGO_PKG_VERSION")),
console: OtelConsoleConfig::Capture,
deterministic: false,
}
} }
} }
@ -411,16 +421,14 @@ static BUILT_IN_INSTRUMENTATION_SCOPE: OnceCell<
opentelemetry::InstrumentationScope, opentelemetry::InstrumentationScope,
> = OnceCell::new(); > = OnceCell::new();
pub fn init(config: OtelConfig) -> anyhow::Result<()> { pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> {
// Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_* // Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_*
// crates don't do this automatically. // crates don't do this automatically.
// TODO(piscisaureus): enable GRPC support. // TODO(piscisaureus): enable GRPC support.
let protocol = match env::var("OTEL_EXPORTER_OTLP_PROTOCOL").as_deref() { let protocol = match env::var("OTEL_EXPORTER_OTLP_PROTOCOL").as_deref() {
Ok("http/protobuf") => Protocol::HttpBinary, Ok("http/protobuf") => Protocol::HttpBinary,
Ok("http/json") => Protocol::HttpJson, Ok("http/json") => Protocol::HttpJson,
Ok("") | Err(env::VarError::NotPresent) => { Ok("") | Err(env::VarError::NotPresent) => Protocol::HttpBinary,
return Ok(());
}
Ok(protocol) => { Ok(protocol) => {
return Err(anyhow!( return Err(anyhow!(
"Env var OTEL_EXPORTER_OTLP_PROTOCOL specifies an unsupported protocol: {}", "Env var OTEL_EXPORTER_OTLP_PROTOCOL specifies an unsupported protocol: {}",
@ -732,9 +740,9 @@ fn op_otel_instrumentation_scope_enter(
#[op2(fast)] #[op2(fast)]
fn op_otel_instrumentation_scope_enter_builtin(state: &mut OpState) { fn op_otel_instrumentation_scope_enter_builtin(state: &mut OpState) {
state.put(InstrumentationScope( if let Some(scope) = BUILT_IN_INSTRUMENTATION_SCOPE.get() {
BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap().clone(), state.put(InstrumentationScope(scope.clone()));
)); }
} }
#[op2(fast)] #[op2(fast)]
@ -749,6 +757,9 @@ fn op_otel_log(
let Some(Processors { logs, .. }) = OTEL_PROCESSORS.get() else { let Some(Processors { logs, .. }) = OTEL_PROCESSORS.get() else {
return; return;
}; };
let Some(instrumentation_scope) = BUILT_IN_INSTRUMENTATION_SCOPE.get() else {
return;
};
// Convert the integer log level that ext/console uses to the corresponding // Convert the integer log level that ext/console uses to the corresponding
// OpenTelemetry log severity. // OpenTelemetry log severity.
@ -776,10 +787,7 @@ fn op_otel_log(
); );
} }
logs.emit( logs.emit(&mut log_record, instrumentation_scope);
&mut log_record,
BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap(),
);
} }
fn owned_string<'s>( fn owned_string<'s>(

View file

@ -220,6 +220,7 @@ function submitSpan(
startTime: number, startTime: number,
endTime: number, endTime: number,
) { ) {
if (!TRACING_ENABLED) return;
if (!(traceFlags & TRACE_FLAG_SAMPLED)) return; if (!(traceFlags & TRACE_FLAG_SAMPLED)) return;
// TODO(@lucacasonato): `resource` is ignored for now, should we implement it? // TODO(@lucacasonato): `resource` is ignored for now, should we implement it?
@ -949,15 +950,15 @@ const otelConsoleConfig = {
}; };
export function bootstrap( export function bootstrap(
config: [] | [ config: [
0 | 1,
typeof otelConsoleConfig[keyof typeof otelConsoleConfig], typeof otelConsoleConfig[keyof typeof otelConsoleConfig],
number, 0 | 1,
], ],
): void { ): void {
if (config.length === 0) return; const { 0: tracingEnabled, 1: consoleConfig, 2: deterministic } = config;
const { 0: consoleConfig, 1: deterministic } = config;
TRACING_ENABLED = true; TRACING_ENABLED = tracingEnabled === 1;
DETERMINISTIC = deterministic === 1; DETERMINISTIC = deterministic === 1;
switch (consoleConfig) { switch (consoleConfig) {

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_tls" name = "deno_tls"
version = "0.168.0" version = "0.169.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_url" name = "deno_url"
version = "0.181.0" version = "0.182.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -9,14 +9,15 @@
import { primordials } from "ext:core/mod.js"; import { primordials } from "ext:core/mod.js";
const { const {
Error,
ErrorPrototype, ErrorPrototype,
ErrorCaptureStackTrace,
ObjectDefineProperty, ObjectDefineProperty,
ObjectCreate, ObjectCreate,
ObjectEntries, ObjectEntries,
ObjectHasOwn, ObjectHasOwn,
ObjectPrototypeIsPrototypeOf, ObjectPrototypeIsPrototypeOf,
ObjectSetPrototypeOf, ObjectSetPrototypeOf,
ReflectConstruct,
Symbol, Symbol,
SymbolFor, SymbolFor,
} = primordials; } = primordials;
@ -107,12 +108,14 @@ class DOMException {
); );
const code = nameToCodeMapping[name] ?? 0; const code = nameToCodeMapping[name] ?? 0;
this[_message] = message; // execute Error constructor to have stack property and [[ErrorData]] internal slot
this[_name] = name; const error = ReflectConstruct(Error, [], new.target);
this[_code] = code; error[_message] = message;
this[webidl.brand] = webidl.brand; error[_name] = name;
error[_code] = code;
error[webidl.brand] = webidl.brand;
ErrorCaptureStackTrace(this, DOMException); return error;
} }
get message() { get message() {

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_web" name = "deno_web"
version = "0.212.0" version = "0.213.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_webgpu" name = "deno_webgpu"
version = "0.148.0" version = "0.149.0"
authors = ["the Deno authors"] authors = ["the Deno authors"]
edition.workspace = true edition.workspace = true
license = "MIT" license = "MIT"

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_webidl" name = "deno_webidl"
version = "0.181.0" version = "0.182.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_websocket" name = "deno_websocket"
version = "0.186.0" version = "0.187.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

Some files were not shown because too many files have changed in this diff Show more