mirror of
https://github.com/denoland/deno.git
synced 2025-02-01 12:16:11 -05:00
Merge branch 'main' into Add-Dynamic-Device-Path-Handling-for-Windows-File-Access
This commit is contained in:
commit
b0db638420
136 changed files with 3156 additions and 1057 deletions
|
@ -1,9 +1,8 @@
|
|||
FROM mcr.microsoft.com/vscode/devcontainers/rust:1-bullseye
|
||||
|
||||
# Install cmake and protobuf-compiler
|
||||
# Install cmake
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y cmake \
|
||||
&& apt-get install -y protobuf-compiler \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Deno
|
||||
|
|
6
.github/workflows/cargo_publish.yml
vendored
6
.github/workflows/cargo_publish.yml
vendored
|
@ -32,12 +32,6 @@ jobs:
|
|||
with:
|
||||
deno-version: v1.x
|
||||
|
||||
- name: Install protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
version: '21.12'
|
||||
repo-token: '${{ secrets.GITHUB_TOKEN }}'
|
||||
|
||||
- name: Publish
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
|
|
8
.github/workflows/ci.generate.ts
vendored
8
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 16;
|
||||
const cacheVersion = 18;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-22.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
|
||||
|
@ -191,11 +191,6 @@ const installNodeStep = {
|
|||
uses: "actions/setup-node@v4",
|
||||
with: { "node-version": 18 },
|
||||
};
|
||||
const installProtocStep = {
|
||||
name: "Install protoc",
|
||||
uses: "arduino/setup-protoc@v3",
|
||||
with: { "version": "21.12", "repo-token": "${{ secrets.GITHUB_TOKEN }}" },
|
||||
};
|
||||
const installDenoStep = {
|
||||
name: "Install Deno",
|
||||
uses: "denoland/setup-deno@v1",
|
||||
|
@ -494,7 +489,6 @@ const ci = {
|
|||
if: "matrix.job == 'bench' || matrix.job == 'test'",
|
||||
...installNodeStep,
|
||||
},
|
||||
installProtocStep,
|
||||
{
|
||||
if: [
|
||||
"matrix.profile == 'release' &&",
|
||||
|
|
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
|
@ -199,12 +199,6 @@ jobs:
|
|||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Install protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
version: '21.12'
|
||||
repo-token: '${{ secrets.GITHUB_TOKEN }}'
|
||||
if: '!(matrix.skip)'
|
||||
- if: |-
|
||||
!(matrix.skip) && (matrix.profile == 'release' &&
|
||||
matrix.job == 'test' &&
|
||||
|
@ -367,8 +361,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '16-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '16-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '18-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '18-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -381,7 +375,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '16-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '18-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -691,7 +685,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '16-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '18-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
528
Cargo.lock
generated
528
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
150
Cargo.toml
150
Cargo.toml
|
@ -45,19 +45,19 @@ license = "MIT"
|
|||
repository = "https://github.com/denoland/deno"
|
||||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.42.1", features = ["transpiling"] }
|
||||
deno_ast = { version = "=0.42.2", features = ["transpiling"] }
|
||||
deno_core = { version = "0.311.0" }
|
||||
|
||||
deno_bench_util = { version = "0.163.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.165.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.2"
|
||||
deno_npm = "=0.25.3"
|
||||
deno_path_util = "=0.2.0"
|
||||
deno_permissions = { version = "0.29.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.178.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.31.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.180.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.14"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.99.0", path = "./cli/napi/sym" }
|
||||
napi_sym = { version = "0.101.0", path = "./cli/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
|
@ -66,32 +66,32 @@ denokv_remote = "0.8.1"
|
|||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.163.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.101.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.38.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.169.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.49.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.183.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.193.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.156.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.79.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.167.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.79.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.77.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.100.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.161.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.106.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.156.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.169.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.200.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.136.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.169.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.174.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.164.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.165.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.103.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.40.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.171.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.51.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.185.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.195.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.158.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.81.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.169.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.81.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.79.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.102.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.163.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.108.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.158.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.171.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.202.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.138.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.171.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.176.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.166.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.1.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.8.0", path = "./resolvers/node" }
|
||||
deno_resolver = { version = "0.3.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.10.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -111,7 +111,7 @@ dashmap = "5.5.3"
|
|||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
deno_cache_dir = "=0.13.0"
|
||||
deno_package_json = { version = "0.1.1", default-features = false }
|
||||
deno_package_json = { version = "0.1.2", default-features = false }
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
|
||||
|
@ -139,7 +139,7 @@ ipnet = "2.3"
|
|||
jsonc-parser = { version = "=0.23.0", features = ["serde"] }
|
||||
lazy-regex = "3"
|
||||
libc = "0.2.126"
|
||||
libz-sys = { version = "1.1", default-features = false }
|
||||
libz-sys = { version = "1.1.20", default-features = false }
|
||||
log = "0.4.20"
|
||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
memmem = "0.1.1"
|
||||
|
@ -225,10 +225,9 @@ nix = "=0.26.2"
|
|||
# windows deps
|
||||
junction = "=0.2.0"
|
||||
winapi = "=0.3.9"
|
||||
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry"] }
|
||||
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
|
||||
winres = "=0.1.12"
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
incremental = true
|
||||
|
@ -246,13 +245,6 @@ inherits = "release"
|
|||
codegen-units = 128
|
||||
lto = "thin"
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.bench]
|
||||
codegen-units = 1
|
||||
incremental = true
|
||||
lto = true
|
||||
opt-level = 'z' # Optimize for size
|
||||
|
||||
# Key generation is too slow on `debug`
|
||||
[profile.dev.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
|
@ -261,80 +253,6 @@ opt-level = 3
|
|||
[profile.dev.package.v8]
|
||||
opt-level = 1
|
||||
|
||||
# Optimize these packages for performance.
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.bench.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.bench.package.base64-simd]
|
||||
opt-level = 3
|
||||
[profile.bench.package.brotli]
|
||||
opt-level = 3
|
||||
[profile.bench.package.brotli-decompressor]
|
||||
opt-level = 3
|
||||
[profile.bench.package.bytes]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_bench_util]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_core]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_crypto]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_fetch]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_ffi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_napi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_url]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_websocket]
|
||||
opt-level = 3
|
||||
[profile.bench.package.fastwebsockets]
|
||||
opt-level = 3
|
||||
[profile.bench.package.flate2]
|
||||
opt-level = 3
|
||||
[profile.bench.package.futures-util]
|
||||
opt-level = 3
|
||||
[profile.bench.package.hyper]
|
||||
opt-level = 3
|
||||
[profile.bench.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.bench.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
[profile.bench.package.rand]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.test_napi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.tokio]
|
||||
opt-level = 3
|
||||
[profile.bench.package.url]
|
||||
opt-level = 3
|
||||
[profile.bench.package.v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.zstd]
|
||||
opt-level = 3
|
||||
[profile.bench.package.zstd-sys]
|
||||
opt-level = 3
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.release.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.release.package.base64-simd]
|
||||
|
@ -393,6 +311,8 @@ opt-level = 3
|
|||
opt-level = 3
|
||||
[profile.release.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.libsui]
|
||||
opt-level = 3
|
||||
[profile.release.package.test_napi]
|
||||
opt-level = 3
|
||||
[profile.release.package.tokio]
|
||||
|
|
343
Releases.md
343
Releases.md
|
@ -6,6 +6,349 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.0.0 / 2024.10.09
|
||||
|
||||
Read announcement blog post at: https://deno.com/blog/v2
|
||||
|
||||
- BREAKING: `DENO_FUTURE=1` by default, or welcome to Deno 2.0 (#25213)
|
||||
- BREAKING: disallow `new Deno.FsFile()` (#25478)
|
||||
- BREAKING: drop support for Deno.run.{clearEnv,gid,uid} (#25371)
|
||||
- BREAKING: improve types for `Deno.serve` (#25369)
|
||||
- BREAKING: improved error code accuracy (#25383)
|
||||
- BREAKING: make supported compilerOptions an allow list (#25432)
|
||||
- BREAKING: move `width` and `height` options to `UnsafeWindowSurface`
|
||||
constructor (#24200)
|
||||
- BREAKING: remove --allow-hrtime (#25367)
|
||||
- BREAKING: remove "emit" and "map" from deno info output (#25468)
|
||||
- BREAKING: remove `--allow-none` flag (#25337)
|
||||
- BREAKING: remove `--jobs` flag (#25336)
|
||||
- BREAKING: remove `--trace-ops` (#25344)
|
||||
- BREAKING: remove `--ts` flag (#25338)
|
||||
- BREAKING: remove `--unstable` flag (#25522)
|
||||
- BREAKING: remove `deno bundle` (#25339)
|
||||
- BREAKING: remove `deno vendor` (#25343)
|
||||
- BREAKING: remove `Deno.[Tls]Listener.prototype.rid` (#25556)
|
||||
- BREAKING: remove `Deno.{Conn,TlsConn,TcpConn,UnixConn}.prototype.rid` (#25446)
|
||||
- BREAKING: remove `Deno.{Reader,Writer}[Sync]` and `Deno.Closer` (#25524)
|
||||
- BREAKING: remove `Deno.Buffer` (#25441)
|
||||
- BREAKING: remove `Deno.close()` (#25347)
|
||||
- BREAKING: remove `Deno.ConnectTlsOptions.{certChain,certFile,privateKey}` and
|
||||
`Deno.ListenTlsOptions.certChain,certFile,keyFile}` (#25525)
|
||||
- BREAKING: remove `Deno.copy()` (#25345)
|
||||
- BREAKING: remove `Deno.customInspect` (#25348)
|
||||
- BREAKING: remove `Deno.fdatasync[Sync]()` (#25520)
|
||||
- BREAKING: remove `Deno.File` (#25447)
|
||||
- BREAKING: remove `Deno.flock[Sync]()` (#25350)
|
||||
- BREAKING: remove `Deno.FsFile.prototype.rid` (#25499)
|
||||
- BREAKING: remove `Deno.fstat[Sync]()` (#25351)
|
||||
- BREAKING: remove `Deno.FsWatcher.prototype.rid` (#25444)
|
||||
- BREAKING: remove `Deno.fsync[Sync]()` (#25448)
|
||||
- BREAKING: remove `Deno.ftruncate[Sync]()` (#25412)
|
||||
- BREAKING: remove `Deno.funlock[Sync]()` (#25442)
|
||||
- BREAKING: remove `Deno.futime[Sync]()` (#25252)
|
||||
- BREAKING: remove `Deno.iter[Sync]()` (#25346)
|
||||
- BREAKING: remove `Deno.read[Sync]()` (#25409)
|
||||
- BREAKING: remove `Deno.readAll[Sync]()` (#25386)
|
||||
- BREAKING: remove `Deno.seek[Sync]()` (#25449)
|
||||
- BREAKING: remove `Deno.Seeker[Sync]` (#25551)
|
||||
- BREAKING: remove `Deno.shutdown()` (#25253)
|
||||
- BREAKING: remove `Deno.write[Sync]()` (#25408)
|
||||
- BREAKING: remove `Deno.writeAll[Sync]()` (#25407)
|
||||
- BREAKING: remove deprecated `UnsafeFnPointer` constructor type with untyped
|
||||
`Deno.PointerObject` parameter (#25577)
|
||||
- BREAKING: remove deprecated files config (#25535)
|
||||
- BREAKING: Remove obsoleted Temporal APIs part 2 (#25505)
|
||||
- BREAKING: remove remaining web types for compatibility (#25334)
|
||||
- BREAKING: remove support for remote import maps in deno.json (#25836)
|
||||
- BREAKING: rename "deps" remote cache folder to "remote" (#25969)
|
||||
- BREAKING: soft-remove `Deno.isatty()` (#25410)
|
||||
- BREAKING: soft-remove `Deno.run()` (#25403)
|
||||
- BREAKING: soft-remove `Deno.serveHttp()` (#25451)
|
||||
- BREAKING: undeprecate `Deno.FsWatcher.prototype.return()` (#25623)
|
||||
- feat: add `--allow-import` flag (#25469)
|
||||
- feat: Add a hint on error about 'Relative import path ... not prefixed with
|
||||
...' (#25430)
|
||||
- feat: Add better error messages for unstable APIs (#25519)
|
||||
- feat: Add suggestion for packages using Node-API addons (#25975)
|
||||
- feat: Allow importing .cjs files (#25426)
|
||||
- feat: default to TS for file extension and support ext flag in more scenarios
|
||||
(#25472)
|
||||
- feat: deprecate import assertions (#25281)
|
||||
- feat: Don't warn about --allow-script when using esbuild (#25894)
|
||||
- feat: hide several --unstable-* flags (#25378)
|
||||
- feat: improve lockfile v4 to store normalized version constraints and be more
|
||||
terse (#25247)
|
||||
- feat: improve warnings for deprecations and lifecycle script for npm packages
|
||||
(#25694)
|
||||
- feat: include version number in all --json based outputs (#25335)
|
||||
- feat: lockfile v4 by default (#25165)
|
||||
- feat: make 'globalThis.location' a configurable property (#25812)
|
||||
- feat: print `Listening on` messages on stderr instead of stdout (#25491)
|
||||
- feat: remove `--lock-write` flag (#25214)
|
||||
- feat: require jsr prefix for `deno install` and `deno add` (#25698)
|
||||
- feat: require(esm) (#25501)
|
||||
- feat: Show hints when using `window` global (#25805)
|
||||
- feat: stabilize `Deno.createHttpClient()` (#25569)
|
||||
- feat: suggest `deno install --entrypoint` instead of `deno cache` (#25228)
|
||||
- feat: support DENO_LOG env var instead of RUST_LOG (#25356)
|
||||
- feat: TypeScript 5.6 and `npm:@types/node@22` (#25614)
|
||||
- feat: Update no-window lint rule (#25486)
|
||||
- feat: update warning message for --allow-run with no list (#25693)
|
||||
- feat: warn when using `--allow-run` with no allow list (#25215)
|
||||
- feat(add): Add npm packages to package.json if present (#25477)
|
||||
- feat(add): strip package subpath when adding a package (#25419)
|
||||
- feat(add/install): Flag to add dev dependency to package.json (#25495)
|
||||
- feat(byonm): support `deno run npm:<package>` when package is not in
|
||||
package.json (#25981)
|
||||
- feat(check): turn on noImplicitOverride (#25695)
|
||||
- feat(check): turn on useUnknownInCatchVariables (#25465)
|
||||
- feat(cli): evaluate code snippets in JSDoc and markdown (#25220)
|
||||
- feat(cli): give access to `process` global everywhere (#25291)
|
||||
- feat(cli): use NotCapable error for permission errors (#25431)
|
||||
- feat(config): Node modules option for 2.0 (#25299)
|
||||
- feat(ext/crypto): import and export p521 keys (#25789)
|
||||
- feat(ext/crypto): X448 support (#26043)
|
||||
- feat(ext/kv): configurable limit params (#25174)
|
||||
- feat(ext/node): add abort helpers, process & streams fix (#25262)
|
||||
- feat(ext/node): add rootCertificates to node:tls (#25707)
|
||||
- feat(ext/node): buffer.transcode() (#25972)
|
||||
- feat(ext/node): export 'promises' symbol from 'node:timers' (#25589)
|
||||
- feat(ext/node): export missing constants from 'zlib' module (#25584)
|
||||
- feat(ext/node): export missing symbols from domain, puncode, repl, tls
|
||||
(#25585)
|
||||
- feat(ext/node): export more symbols from streams and timers/promises (#25582)
|
||||
- feat(ext/node): expose ES modules for _ modules (#25588)
|
||||
- feat(flags): allow double commas to escape values in path based flags (#25453)
|
||||
- feat(flags): support user provided args in repl subcommand (#25605)
|
||||
- feat(fmt): better error on malfored HTML files (#25853)
|
||||
- feat(fmt): stabilize CSS, HTML and YAML formatters (#25753)
|
||||
- feat(fmt): support vto and njk extensions (#25831)
|
||||
- feat(fmt): upgrade markup_fmt (#25768)
|
||||
- feat(install): deno install with entrypoint (#25411)
|
||||
- feat(install): warn repeatedly about not-run lifecycle scripts on explicit
|
||||
installs (#25878)
|
||||
- feat(lint): add `no-process-global` lint rule (#25709)
|
||||
- feat(lsp): add a message when someone runs 'deno lsp' manually (#26051)
|
||||
- feat(lsp): auto-import types with 'import type' (#25662)
|
||||
- feat(lsp): html/css/yaml file formatting (#25353)
|
||||
- feat(lsp): quick fix for @deno-types="npm:@types/*" (#25954)
|
||||
- feat(lsp): turn on useUnknownInCatchVariables (#25474)
|
||||
- feat(lsp): unstable setting as list (#25552)
|
||||
- feat(permissions): `Deno.mainModule` doesn't require permissions (#25667)
|
||||
- feat(permissions): allow importing from cdn.jsdelivr.net by default (#26013)
|
||||
- feat(serve): Support second parameter in deno serve (#25606)
|
||||
- feat(tools/doc): display subitems in symbol overviews where applicable
|
||||
(#25885)
|
||||
- feat(uninstall): alias to 'deno remove' if -g flag missing (#25461)
|
||||
- feat(upgrade): better error message on failure (#25503)
|
||||
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
|
||||
- feat(upgrade): support LTS release channel (#25123)
|
||||
- fix: add link to env var docs (#25557)
|
||||
- fix: add suggestion how to fix importing CJS module (#21764)
|
||||
- fix: add test ensuring als works across dynamic import (#25593)
|
||||
- fix: better error for Deno.UnsafeWindowSurface, correct HttpClient name,
|
||||
cleanup unused code (#25833)
|
||||
- fix: cjs resolution cases (#25739)
|
||||
- fix: consistent with deno_config and treat `"experimentalDecorators"` as
|
||||
deprecated (#25735)
|
||||
- fix: delete old Deno 1.x headers file when loading cache (#25283)
|
||||
- fix: do not panic running invalid file specifier (#25530)
|
||||
- fix: don't include extensionless files in file collection for lint & fmt by
|
||||
default (#25721)
|
||||
- fix: don't prompt when using `Deno.permissions.request` with `--no-prompt`
|
||||
(#25811)
|
||||
- fix: eagerly error for specifier with empty version constraint (#25944)
|
||||
- fix: enable `Win32_Security` feature in `windows-sys` (#26007)
|
||||
- fix: error on unsupported compiler options (#25714)
|
||||
- fix: error out if a valid flag is passed before a subcommand (#25830)
|
||||
- fix: fix jupyter display function type (#25326)
|
||||
- fix: Float16Array type (#25506)
|
||||
- fix: handle showing warnings while the progress bar is shown (#25187)
|
||||
- fix: Hide 'deno cache' from help output (#25960)
|
||||
- fix: invalid ipv6 hostname on `deno serve` (#25482)
|
||||
- fix: linux canonicalization checks (#24641)
|
||||
- fix: lock down allow-run permissions more (#25370)
|
||||
- fix: make some warnings more standard (#25324)
|
||||
- fix: no cmd prefix in help output go links (#25459)
|
||||
- fix: only enable byonm if workspace root has pkg json (#25379)
|
||||
- fix: panic when require(esm) (#25769)
|
||||
- fix: precompile preserve SVG camelCase attributes (#25945)
|
||||
- fix: reland async context (#25140)
|
||||
- fix: remove --allow-run warning when using deno without args or subcommand
|
||||
(#25684)
|
||||
- fix: remove entrypoint hack for Deno 2.0 (#25332)
|
||||
- fix: remove recently added deno.json node_modules aliasing (#25542)
|
||||
- fix: remove the typo in the help message (#25962)
|
||||
- fix: removed unstable-htttp from deno help (#25216)
|
||||
- fix: replace `npm install` hint with `deno install` hint (#25244)
|
||||
- fix: trim space around DENO_AUTH_TOKENS (#25147)
|
||||
- fix: update deno_doc (#25290)
|
||||
- fix: Update deno_npm to fix `deno install` with crossws (#25837)
|
||||
- fix: update hint for `deno add <package>` (#25455)
|
||||
- fix: update malva in deno to support astro css comments (#25553)
|
||||
- fix: update nodeModulesDir config JSON schema (#25653)
|
||||
- fix: update patchver to 0.2 (#25952)
|
||||
- fix: update sui to 0.4 (#25942)
|
||||
- fix: upgrade deno_ast 0.42 (#25313)
|
||||
- fix: upgrade deno_core to 0.307.0 (#25287)
|
||||
- fix(add/install): default to "latest" tag for npm packages in
|
||||
`deno add npm:pkg` (#25858)
|
||||
- fix(bench): Fix table column alignments and NO_COLOR=1 (#25190)
|
||||
- fix(BREAKING): make dns record types have consistent naming (#25357)
|
||||
- fix(byonm): resolve npm deps of jsr deps (#25399)
|
||||
- fix(check): ignore noImplicitOverrides in remote modules (#25854)
|
||||
- fix(check): move is cjs check from resolving to loading (#25597)
|
||||
- fix(check): properly surface dependency errors in types file of js file
|
||||
(#25860)
|
||||
- fix(cli): `deno task` exit with status 0 (#25637)
|
||||
- fix(cli): Default to auto with --node-modules-dir flag (#25772)
|
||||
- fix(cli): handle edge cases around `export`s in doc tests and default export
|
||||
(#25720)
|
||||
- fix(cli): Map error kind to `PermissionDenied` when symlinking fails due to
|
||||
permissions (#25398)
|
||||
- fix(cli): Only set allow net flag for deno serve if not already allowed all
|
||||
(#25743)
|
||||
- fix(cli): Warn on not-run lifecycle scripts with global cache (#25786)
|
||||
- fix(cli/tools): correct `deno init --serve` template behavior (#25318)
|
||||
- fix(compile): support 'deno compile' in RC and LTS releases (#25875)
|
||||
- fix(config): validate export names (#25436)
|
||||
- fix(coverage): ignore urls from doc testing (#25736)
|
||||
- fix(doc): surface graph errors as warnings (#25888)
|
||||
- fix(dts): stabilize `fetch` declaration for use with `Deno.HttpClient`
|
||||
(#25683)
|
||||
- fix(ext/console): more precision in console.time (#25723)
|
||||
- fix(ext/console): prevent duplicate error printing when the cause is assigned
|
||||
(#25327)
|
||||
- fix(ext/crypto): ensure EC public keys are exported uncompressed (#25766)
|
||||
- fix(ext/crypto): fix identity test for x25519 derive bits (#26011)
|
||||
- fix(ext/crypto): reject empty usages in SubtleCrypto#importKey (#25759)
|
||||
- fix(ext/crypto): support md4 digest algorithm (#25656)
|
||||
- fix(ext/crypto): throw DataError for invalid EC key import (#25181)
|
||||
- fix(ext/fetch): fix lowercase http_proxy classified as https (#25686)
|
||||
- fix(ext/fetch): percent decode userinfo when parsing proxies (#25229)
|
||||
- fix(ext/http): do not set localhost to hostname unnecessarily (#24777)
|
||||
- fix(ext/http): gracefully handle Response.error responses (#25712)
|
||||
- fix(ext/node): add `FileHandle#writeFile` (#25555)
|
||||
- fix(ext/node): add `vm.constants` (#25630)
|
||||
- fix(ext/node): Add missing `node:path` exports (#25567)
|
||||
- fix(ext/node): Add missing node:fs and node:constants exports (#25568)
|
||||
- fix(ext/node): add stubs for `node:trace_events` (#25628)
|
||||
- fix(ext/node): attach console stream properties (#25617)
|
||||
- fix(ext/node): avoid showing `UNKNOWN` error from TCP handle (#25550)
|
||||
- fix(ext/node): close upgraded socket when the underlying http connection is
|
||||
closed (#25387)
|
||||
- fix(ext/node): delay accept() call 2 ticks in net.Server#listen (#25481)
|
||||
- fix(ext/node): don't throw error for unsupported signal binding on windows
|
||||
(#25699)
|
||||
- fix(ext/node): emit `online` event after worker thread is initialized (#25243)
|
||||
- fix(ext/node): export `process.allowedNodeEnvironmentFlags` (#25629)
|
||||
- fix(ext/node): export JWK public key (#25239)
|
||||
- fix(ext/node): export request and response clases from `http2` module (#25592)
|
||||
- fix(ext/node): fix `Cipheriv#update(string, undefined)` (#25571)
|
||||
- fix(ext/node): fix Decipheriv when autoPadding disabled (#25598)
|
||||
- fix(ext/node): fix process.stdin.pause() (#25864)
|
||||
- fix(ext/node): Fix vm sandbox object panic (#24985)
|
||||
- fix(ext/node): http2session ready state (#25143)
|
||||
- fix(ext/node): Implement detached option in `child_process` (#25218)
|
||||
- fix(ext/node): import EC JWK keys (#25266)
|
||||
- fix(ext/node): import JWK octet key pairs (#25180)
|
||||
- fix(ext/node): import RSA JWK keys (#25267)
|
||||
- fix(ext/node): register `node:wasi` built-in (#25134)
|
||||
- fix(ext/node): remove unimplemented promiseHook stubs (#25979)
|
||||
- fix(ext/node): report freemem() on Linux in bytes (#25511)
|
||||
- fix(ext/node): Rewrite `node:v8` serialize/deserialize (#25439)
|
||||
- fix(ext/node): session close during stream setup (#25170)
|
||||
- fix(ext/node): Stream should be instance of EventEmitter (#25527)
|
||||
- fix(ext/node): stub `inspector/promises` (#25635)
|
||||
- fix(ext/node): stub `process.cpuUsage()` (#25462)
|
||||
- fix(ext/node): stub cpu_info() for OpenBSD (#25807)
|
||||
- fix(ext/node): support x509 certificates in `createPublicKey` (#25731)
|
||||
- fix(ext/node): throw when loading `cpu-features` module (#25257)
|
||||
- fix(ext/node): update aead-gcm-stream to 0.3 (#25261)
|
||||
- fix(ext/node): use primordials in `ext/node/polyfills/console.ts` (#25572)
|
||||
- fix(ext/node): use primordials in ext/node/polyfills/wasi.ts (#25608)
|
||||
- fix(ext/node): validate input lengths in `Cipheriv` and `Decipheriv` (#25570)
|
||||
- fix(ext/web): don't ignore capture in EventTarget.removeEventListener (#25788)
|
||||
- fix(ext/webgpu): allow to build on unsupported platforms (#25202)
|
||||
- fix(ext/webgpu): sync category comment (#25580)
|
||||
- fix(ext/webstorage): make `getOwnPropertyDescriptor` with symbol return
|
||||
`undefined` (#13348)
|
||||
- fix(flags): --allow-all should conflict with lower permissions (#25909)
|
||||
- fix(flags): don't treat empty run command as task subcommand (#25708)
|
||||
- fix(flags): move some content from docs.deno.com into help output (#25951)
|
||||
- fix(flags): properly error out for urls (#25770)
|
||||
- fix(flags): require global flag for permission flags in install subcommand
|
||||
(#25391)
|
||||
- fix(fmt): --check was broken for CSS, YAML and HTML (#25848)
|
||||
- fix(fmt): fix incorrect quotes in components (#25249)
|
||||
- fix(fmt): fix tabs in YAML (#25536)
|
||||
- fix(fmt/markdown): fix regression with multi-line footnotes and inline math
|
||||
(#25222)
|
||||
- fix(info): error instead of panic for npm specifiers when using byonm (#25947)
|
||||
- fix(info): move "version" field to top of json output (#25890)
|
||||
- fix(inspector): Fix panic when re-entering runtime ops (#25537)
|
||||
- fix(install): compare versions directly to decide whether to create a child
|
||||
node_modules dir for a workspace member (#26001)
|
||||
- fix(install): Make sure target node_modules exists when symlinking (#25494)
|
||||
- fix(install): recommend using `deno install -g` when using a single http url
|
||||
(#25388)
|
||||
- fix(install): store tags associated with package in node_modules dir (#26000)
|
||||
- fix(install): surface package.json dependency errors (#26023)
|
||||
- fix(install): Use relative symlinks in deno install (#25164)
|
||||
- fix(installl): make bin entries executable even if not put in
|
||||
`node_modules/.bin` (#25873)
|
||||
- fix(jupyter): allow unstable flags (#25483)
|
||||
- fix(lint): correctly handle old jsx in linter (#25902)
|
||||
- fix(lint): support linting jsr pkg without version field (#25230)
|
||||
- fix(lockfile): use loose deserialization for version constraints (#25660)
|
||||
- fix(lsp): encode url parts before parsing as uri (#25509)
|
||||
- fix(lsp): exclude missing import quick fixes with bad resolutions (#26025)
|
||||
- fix(lsp): panic on url_to_uri() (#25238)
|
||||
- fix(lsp): properly resolve jsxImportSource for caching (#25688)
|
||||
- fix(lsp): update diagnostics on npm install (#25352)
|
||||
- fix(napi): Don't run microtasks in napi_resolve_deferred (#25246)
|
||||
- fix(napi): Fix worker threads importing already-loaded NAPI addon (#25245)
|
||||
- fix(no-slow-types): better `override` handling (#25989)
|
||||
- fix(node): Don't error out if we fail to statically analyze CJS re-export
|
||||
(#25748)
|
||||
- fix(node): fix worker_threads issues blocking Angular support (#26024)
|
||||
- fix(node): implement libuv APIs needed to support `npm:sqlite3` (#25893)
|
||||
- fix(node): Include "node" condition during CJS re-export analysis (#25785)
|
||||
- fix(node): Pass NPM_PROCESS_STATE to subprocesses via temp file instead of env
|
||||
var (#25896)
|
||||
- fix(node/byonm): do not accidentally resolve bare node built-ins (#25543)
|
||||
- fix(node/cluster): improve stubs to make log4js work (#25146)
|
||||
- fix(npm): better error handling for remote npm deps (#25670)
|
||||
- fix(npm): root package has peer dependency on itself (#26022)
|
||||
- fix(permissions): disallow any `LD_` or `DYLD_` prefixed env var without full
|
||||
--allow-run permissions (#25271)
|
||||
- fix(permissions): disallow launching subprocess with LD_PRELOAD env var
|
||||
without full run permissions (#25221)
|
||||
- fix(publish): ensure provenance is spec compliant (#25200)
|
||||
- fix(regression): do not expose resolved path in Deno.Command permission denied
|
||||
error (#25434)
|
||||
- fix(runtime): don't error `child.output()` on consumed stream (#25657)
|
||||
- fix(runtime): use more null proto objects again (#25040)
|
||||
- fix(runtime/web_worker): populate `SnapshotOptions` for `WebWorker` when
|
||||
instantiated without snapshot (#25280)
|
||||
- fix(task): correct name for scoped npm package binaries (#25390)
|
||||
- fix(task): support tasks with colons in name in `deno run` (#25233)
|
||||
- fix(task): use current executable for deno even when not named deno (#26019)
|
||||
- fix(types): simplify mtls related types (#25658)
|
||||
- fix(upgrade): more informative information on invalid version (#25319)
|
||||
- fix(windows): Deno.Command - align binary resolution with linux and mac
|
||||
(#25429)
|
||||
- fix(workspace): handle when config has members when specified via --config
|
||||
(#25988)
|
||||
- perf: fast path for cached dyn imports (#25636)
|
||||
- perf: Use -O3 for sui in release builds (#26010)
|
||||
- perf(cache): single cache file for remote modules (#24983)
|
||||
- perf(cache): single cache file for typescript emit (#24994)
|
||||
- perf(ext/fetch): improve decompression throughput by upgrading `tower_http`
|
||||
(#25806)
|
||||
- perf(ext/node): reduce some allocations in require (#25197)
|
||||
- perf(ext/web): optimize performance.measure() (#25774)
|
||||
|
||||
### 1.46.3 / 2024.09.04
|
||||
|
||||
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.163.0"
|
||||
version = "0.165.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.0.0-rc.9"
|
||||
version = "2.0.0"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -65,10 +65,10 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.35.0", features = ["workspace", "sync"] }
|
||||
deno_config = { version = "=0.37.1", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.150.1", features = ["html", "syntect"] }
|
||||
deno_graph = { version = "=0.83.0" }
|
||||
deno_doc = { version = "0.152.0", features = ["html"] }
|
||||
deno_graph = { version = "=0.83.3" }
|
||||
deno_lint = { version = "=0.67.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm.workspace = true
|
||||
|
@ -77,7 +77,7 @@ deno_path_util.workspace = true
|
|||
deno_resolver.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.17.0"
|
||||
deno_task_shell = "=0.18.1"
|
||||
deno_terminal.workspace = true
|
||||
eszip = "=0.79.1"
|
||||
libsui = "0.4.0"
|
||||
|
@ -170,12 +170,14 @@ zstd.workspace = true
|
|||
[target.'cfg(windows)'.dependencies]
|
||||
junction.workspace = true
|
||||
winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] }
|
||||
windows-sys.workspace = true
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
deno_bench_util.workspace = true
|
||||
libuv-sys-lite = "=1.48.2"
|
||||
pretty_assertions.workspace = true
|
||||
test_util.workspace = true
|
||||
|
||||
|
|
|
@ -580,6 +580,15 @@ pub struct UnstableConfig {
|
|||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct InternalFlags {
|
||||
/// Used when the language server is configured with an
|
||||
/// explicit cache option.
|
||||
pub cache_path: Option<PathBuf>,
|
||||
/// Only reads to the lockfile instead of writing to it.
|
||||
pub lockfile_skip_write: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct Flags {
|
||||
/// Vector of CLI arguments - these are user script arguments, all Deno
|
||||
|
@ -591,9 +600,6 @@ pub struct Flags {
|
|||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<CaData>,
|
||||
pub cache_blocklist: Vec<String>,
|
||||
/// This is not exposed as an option in the CLI, it is used internally when
|
||||
/// the language server is configured with an explicit cache option.
|
||||
pub cache_path: Option<PathBuf>,
|
||||
pub cached_only: bool,
|
||||
pub type_check_mode: TypeCheckMode,
|
||||
pub config_flag: ConfigFlag,
|
||||
|
@ -602,6 +608,8 @@ pub struct Flags {
|
|||
pub enable_op_summary_metrics: bool,
|
||||
pub enable_testing_features: bool,
|
||||
pub ext: Option<String>,
|
||||
/// Flags that aren't exposed in the CLI, but are used internally.
|
||||
pub internal: InternalFlags,
|
||||
pub ignore: Vec<String>,
|
||||
pub import_map_path: Option<String>,
|
||||
pub env_file: Option<String>,
|
||||
|
@ -688,9 +696,10 @@ impl PermissionFlags {
|
|||
}
|
||||
|
||||
let builtin_allowed_import_hosts = [
|
||||
"jsr.io:443",
|
||||
"deno.land:443",
|
||||
"esm.sh:443",
|
||||
"jsr.io:443",
|
||||
"cdn.jsdelivr.net:443",
|
||||
"raw.githubusercontent.com:443",
|
||||
"gist.githubusercontent.com:443",
|
||||
];
|
||||
|
@ -2901,6 +2910,7 @@ List all available tasks:
|
|||
.help("Specify the directory to run the task in")
|
||||
.value_hint(ValueHint::DirPath),
|
||||
)
|
||||
.arg(node_modules_dir_arg())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -3253,7 +3263,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
<g>-W, --allow-write[=<<PATH>...]</> Allow file system write access. Optionally specify allowed paths.
|
||||
<p(245)>--allow-write | --allow-write="/etc,/var/log.txt"</>
|
||||
<g>-I, --allow-import[=<<IP_OR_HOSTNAME>...]</> Allow importing from remote hosts. Optionally specify allowed IP addresses and host names, with ports as necessary.
|
||||
Default value: <p(245)>deno.land:443,jsr.io:443,esm.sh:443,raw.githubusercontent.com:443,user.githubusercontent.com:443</>
|
||||
Default value: <p(245)>deno.land:443,jsr.io:443,esm.sh:443,cdn.jsdelivr.net:443,raw.githubusercontent.com:443,user.githubusercontent.com:443</>
|
||||
<p(245)>--allow-import | --allow-import="example.com,github.com"</>
|
||||
<g>-N, --allow-net[=<<IP_OR_HOSTNAME>...]</> Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary.
|
||||
<p(245)>--allow-net | --allow-net="localhost:8080,deno.land"</>
|
||||
|
@ -3663,7 +3673,7 @@ fn allow_import_arg() -> Arg {
|
|||
.require_equals(true)
|
||||
.value_name("IP_OR_HOSTNAME")
|
||||
.help(cstr!(
|
||||
"Allow importing from remote hosts. Optionally specify allowed IP addresses and host names, with ports as necessary. Default value: <p(245)>deno.land:443,jsr.io:443,esm.sh:443,raw.githubusercontent.com:443,user.githubusercontent.com:443</>"
|
||||
"Allow importing from remote hosts. Optionally specify allowed IP addresses and host names, with ports as necessary. Default value: <p(245)>deno.land:443,jsr.io:443,esm.sh:443,cdn.jsdelivr.net:443,raw.githubusercontent.com:443,user.githubusercontent.com:443</>"
|
||||
))
|
||||
.value_parser(flags_net::validator)
|
||||
}
|
||||
|
@ -4965,6 +4975,7 @@ fn task_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
.unwrap_or(ConfigFlag::Discover);
|
||||
|
||||
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
|
||||
node_modules_arg_parse(flags, matches);
|
||||
|
||||
let mut task_flags = TaskFlags {
|
||||
cwd: matches.remove_one::<String>("cwd"),
|
||||
|
|
|
@ -24,11 +24,20 @@ use crate::args::InstallKind;
|
|||
|
||||
use deno_lockfile::Lockfile;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfileReadFromPathOptions {
|
||||
pub file_path: PathBuf,
|
||||
pub frozen: bool,
|
||||
/// Causes the lockfile to only be read from, but not written to.
|
||||
pub skip_write: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfile {
|
||||
lockfile: Mutex<Lockfile>,
|
||||
pub filename: PathBuf,
|
||||
pub frozen: bool,
|
||||
frozen: bool,
|
||||
skip_write: bool,
|
||||
}
|
||||
|
||||
pub struct Guard<'a, T> {
|
||||
|
@ -50,15 +59,6 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
|
|||
}
|
||||
|
||||
impl CliLockfile {
|
||||
pub fn new(lockfile: Lockfile, frozen: bool) -> Self {
|
||||
let filename = lockfile.filename.clone();
|
||||
Self {
|
||||
lockfile: Mutex::new(lockfile),
|
||||
filename,
|
||||
frozen,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the inner deno_lockfile::Lockfile.
|
||||
pub fn lock(&self) -> Guard<Lockfile> {
|
||||
Guard {
|
||||
|
@ -78,6 +78,10 @@ impl CliLockfile {
|
|||
}
|
||||
|
||||
pub fn write_if_changed(&self) -> Result<(), AnyError> {
|
||||
if self.skip_write {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.error_if_changed()?;
|
||||
let mut lockfile = self.lockfile.lock();
|
||||
let Some(bytes) = lockfile.resolve_write_bytes() else {
|
||||
|
@ -142,7 +146,7 @@ impl CliLockfile {
|
|||
return Ok(None);
|
||||
}
|
||||
|
||||
let filename = match flags.lock {
|
||||
let file_path = match flags.lock {
|
||||
Some(ref lock) => PathBuf::from(lock),
|
||||
None => match workspace.resolve_lockfile_path()? {
|
||||
Some(path) => path,
|
||||
|
@ -160,7 +164,11 @@ impl CliLockfile {
|
|||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
let lockfile = Self::read_from_path(filename, frozen)?;
|
||||
let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions {
|
||||
file_path,
|
||||
frozen,
|
||||
skip_write: flags.internal.lockfile_skip_write,
|
||||
})?;
|
||||
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
let root_url = workspace.root_dir();
|
||||
|
@ -212,25 +220,29 @@ impl CliLockfile {
|
|||
}
|
||||
|
||||
pub fn read_from_path(
|
||||
file_path: PathBuf,
|
||||
frozen: bool,
|
||||
opts: CliLockfileReadFromPathOptions,
|
||||
) -> Result<CliLockfile, AnyError> {
|
||||
match std::fs::read_to_string(&file_path) {
|
||||
Ok(text) => Ok(CliLockfile::new(
|
||||
Lockfile::new(deno_lockfile::NewLockfileOptions {
|
||||
file_path,
|
||||
content: &text,
|
||||
overwrite: false,
|
||||
})?,
|
||||
frozen,
|
||||
)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(
|
||||
CliLockfile::new(Lockfile::new_empty(file_path, false), frozen),
|
||||
),
|
||||
Err(err) => Err(err).with_context(|| {
|
||||
format!("Failed reading lockfile '{}'", file_path.display())
|
||||
}),
|
||||
}
|
||||
let lockfile = match std::fs::read_to_string(&opts.file_path) {
|
||||
Ok(text) => Lockfile::new(deno_lockfile::NewLockfileOptions {
|
||||
file_path: opts.file_path,
|
||||
content: &text,
|
||||
overwrite: false,
|
||||
})?,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
Lockfile::new_empty(opts.file_path, false)
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("Failed reading lockfile '{}'", opts.file_path.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
Ok(CliLockfile {
|
||||
filename: lockfile.filename.clone(),
|
||||
lockfile: Mutex::new(lockfile),
|
||||
frozen: opts.frozen,
|
||||
skip_write: opts.skip_write,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn error_if_changed(&self) -> Result<(), AnyError> {
|
||||
|
|
|
@ -44,6 +44,7 @@ pub use deno_config::glob::FilePatterns;
|
|||
pub use deno_json::check_warn_tsconfig;
|
||||
pub use flags::*;
|
||||
pub use lockfile::CliLockfile;
|
||||
pub use lockfile::CliLockfileReadFromPathOptions;
|
||||
pub use package_json::NpmInstallDepsProvider;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
@ -824,11 +825,9 @@ impl CliOptions {
|
|||
}
|
||||
}
|
||||
|
||||
warn_insecure_allow_run_flags(&flags);
|
||||
|
||||
let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache);
|
||||
let deno_dir_provider =
|
||||
Arc::new(DenoDirProvider::new(flags.cache_path.clone()));
|
||||
Arc::new(DenoDirProvider::new(flags.internal.cache_path.clone()));
|
||||
let maybe_node_modules_folder = resolve_node_modules_folder(
|
||||
&initial_cwd,
|
||||
&flags,
|
||||
|
@ -1710,27 +1709,6 @@ impl CliOptions {
|
|||
}
|
||||
}
|
||||
|
||||
/// Warns for specific uses of `--allow-run`. This function is not
|
||||
/// intended to catch every single possible insecure use of `--allow-run`,
|
||||
/// but is just an attempt to discourage some common pitfalls.
|
||||
fn warn_insecure_allow_run_flags(flags: &Flags) {
|
||||
let permissions = &flags.permissions;
|
||||
if permissions.allow_all {
|
||||
return;
|
||||
}
|
||||
let Some(allow_run_list) = permissions.allow_run.as_ref() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// discourage using --allow-run without an allow list
|
||||
if allow_run_list.is_empty() {
|
||||
log::warn!(
|
||||
"{} --allow-run without an allow list is susceptible to exploits. Prefer specifying an allow list (https://docs.deno.com/runtime/fundamentals/security/#running-subprocesses)",
|
||||
colors::yellow("Warning")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves the path to use for a local node_modules folder.
|
||||
fn resolve_node_modules_folder(
|
||||
cwd: &Path,
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::sync::Arc;
|
|||
use deno_config::workspace::Workspace;
|
||||
use deno_core::serde_json;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_package_json::PackageJsonDepValueParseError;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
|
||||
|
@ -26,6 +27,7 @@ pub struct InstallNpmWorkspacePkg {
|
|||
pub struct NpmInstallDepsProvider {
|
||||
remote_pkgs: Vec<InstallNpmRemotePkg>,
|
||||
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
|
||||
pkg_json_dep_errors: Vec<PackageJsonDepValueParseError>,
|
||||
}
|
||||
|
||||
impl NpmInstallDepsProvider {
|
||||
|
@ -37,6 +39,7 @@ impl NpmInstallDepsProvider {
|
|||
// todo(dsherret): estimate capacity?
|
||||
let mut workspace_pkgs = Vec::new();
|
||||
let mut remote_pkgs = Vec::new();
|
||||
let mut pkg_json_dep_errors = Vec::new();
|
||||
let workspace_npm_pkgs = workspace.npm_packages();
|
||||
|
||||
for (_, folder) in workspace.config_folders() {
|
||||
|
@ -83,8 +86,12 @@ impl NpmInstallDepsProvider {
|
|||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
let mut pkg_pkgs = Vec::with_capacity(deps.len());
|
||||
for (alias, dep) in deps {
|
||||
let Ok(dep) = dep else {
|
||||
continue;
|
||||
let dep = match dep {
|
||||
Ok(dep) => dep,
|
||||
Err(err) => {
|
||||
pkg_json_dep_errors.push(err);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
|
@ -131,14 +138,19 @@ impl NpmInstallDepsProvider {
|
|||
Self {
|
||||
remote_pkgs,
|
||||
workspace_pkgs,
|
||||
pkg_json_dep_errors,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remote_pkgs(&self) -> &Vec<InstallNpmRemotePkg> {
|
||||
pub fn remote_pkgs(&self) -> &[InstallNpmRemotePkg] {
|
||||
&self.remote_pkgs
|
||||
}
|
||||
|
||||
pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> {
|
||||
pub fn workspace_pkgs(&self) -> &[InstallNpmWorkspacePkg] {
|
||||
&self.workspace_pkgs
|
||||
}
|
||||
|
||||
pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] {
|
||||
&self.pkg_json_dep_errors
|
||||
}
|
||||
}
|
||||
|
|
|
@ -387,6 +387,8 @@ fn main() {
|
|||
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
|
||||
);
|
||||
|
||||
println!("cargo:rustc-rerun-if-changed={}", symbols_path.display());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
|
||||
|
|
|
@ -44,6 +44,7 @@ use std::cmp::Ordering;
|
|||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use text_lines::LineAndColumnIndex;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
use tower_lsp::lsp_types::Position;
|
||||
use tower_lsp::lsp_types::Range;
|
||||
|
@ -228,6 +229,7 @@ pub struct TsResponseImportMapper<'a> {
|
|||
documents: &'a Documents,
|
||||
maybe_import_map: Option<&'a ImportMap>,
|
||||
resolver: &'a LspResolver,
|
||||
file_referrer: ModuleSpecifier,
|
||||
}
|
||||
|
||||
impl<'a> TsResponseImportMapper<'a> {
|
||||
|
@ -235,11 +237,13 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
documents: &'a Documents,
|
||||
maybe_import_map: Option<&'a ImportMap>,
|
||||
resolver: &'a LspResolver,
|
||||
file_referrer: &ModuleSpecifier,
|
||||
) -> Self {
|
||||
Self {
|
||||
documents,
|
||||
maybe_import_map,
|
||||
resolver,
|
||||
file_referrer: file_referrer.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -260,8 +264,6 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
let file_referrer = self.documents.get_file_referrer(referrer);
|
||||
|
||||
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
|
||||
{
|
||||
let mut segments = jsr_path.split('/');
|
||||
|
@ -276,7 +278,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
let export = self.resolver.jsr_lookup_export_for_path(
|
||||
&nv,
|
||||
&path,
|
||||
file_referrer.as_deref(),
|
||||
Some(&self.file_referrer),
|
||||
)?;
|
||||
let sub_path = (export != ".").then_some(export);
|
||||
let mut req = None;
|
||||
|
@ -302,7 +304,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
req = req.or_else(|| {
|
||||
self
|
||||
.resolver
|
||||
.jsr_lookup_req_for_nv(&nv, file_referrer.as_deref())
|
||||
.jsr_lookup_req_for_nv(&nv, Some(&self.file_referrer))
|
||||
});
|
||||
let spec_str = if let Some(req) = req {
|
||||
let req_ref = PackageReqReference { req, sub_path };
|
||||
|
@ -332,7 +334,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
|
||||
if let Some(npm_resolver) = self
|
||||
.resolver
|
||||
.maybe_managed_npm_resolver(file_referrer.as_deref())
|
||||
.maybe_managed_npm_resolver(Some(&self.file_referrer))
|
||||
{
|
||||
if npm_resolver.in_npm_package(specifier) {
|
||||
if let Ok(Some(pkg_id)) =
|
||||
|
@ -468,6 +470,26 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn is_valid_import(
|
||||
&self,
|
||||
specifier_text: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> bool {
|
||||
self
|
||||
.resolver
|
||||
.as_graph_resolver(Some(&self.file_referrer))
|
||||
.resolve(
|
||||
specifier_text,
|
||||
&deno_graph::Range {
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
deno_graph::source::ResolutionMode::Types,
|
||||
)
|
||||
.is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
fn try_reverse_map_package_json_exports(
|
||||
|
@ -580,7 +602,7 @@ fn fix_ts_import_action(
|
|||
referrer: &ModuleSpecifier,
|
||||
action: &tsc::CodeFixAction,
|
||||
import_mapper: &TsResponseImportMapper,
|
||||
) -> Result<tsc::CodeFixAction, AnyError> {
|
||||
) -> Result<Option<tsc::CodeFixAction>, AnyError> {
|
||||
if matches!(
|
||||
action.fix_name.as_str(),
|
||||
"import" | "fixMissingFunctionDeclaration"
|
||||
|
@ -623,19 +645,21 @@ fn fix_ts_import_action(
|
|||
})
|
||||
.collect();
|
||||
|
||||
return Ok(tsc::CodeFixAction {
|
||||
return Ok(Some(tsc::CodeFixAction {
|
||||
description,
|
||||
changes,
|
||||
commands: None,
|
||||
fix_name: action.fix_name.clone(),
|
||||
fix_id: None,
|
||||
fix_all_description: None,
|
||||
});
|
||||
}));
|
||||
} else if !import_mapper.is_valid_import(specifier, referrer) {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(action.clone())
|
||||
Ok(Some(action.clone()))
|
||||
}
|
||||
|
||||
/// Determines if two TypeScript diagnostic codes are effectively equivalent.
|
||||
|
@ -976,11 +1000,14 @@ impl CodeActionCollection {
|
|||
"The action returned from TypeScript is unsupported.",
|
||||
));
|
||||
}
|
||||
let action = fix_ts_import_action(
|
||||
let Some(action) = fix_ts_import_action(
|
||||
specifier,
|
||||
action,
|
||||
&language_server.get_ts_response_import_mapper(specifier),
|
||||
)?;
|
||||
)?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
let edit = ts_changes_to_edit(&action.changes, language_server)?;
|
||||
let code_action = lsp::CodeAction {
|
||||
title: action.description.clone(),
|
||||
|
@ -1161,6 +1188,34 @@ impl CodeActionCollection {
|
|||
range: &lsp::Range,
|
||||
language_server: &language_server::Inner,
|
||||
) {
|
||||
fn import_start_from_specifier(
|
||||
document: &Document,
|
||||
import: &deno_graph::Import,
|
||||
) -> Option<LineAndColumnIndex> {
|
||||
// find the top level statement that contains the specifier
|
||||
let parsed_source = document.maybe_parsed_source()?.as_ref().ok()?;
|
||||
let text_info = parsed_source.text_info_lazy();
|
||||
let specifier_range = SourceRange::new(
|
||||
text_info.loc_to_source_pos(LineAndColumnIndex {
|
||||
line_index: import.specifier_range.start.line,
|
||||
column_index: import.specifier_range.start.character,
|
||||
}),
|
||||
text_info.loc_to_source_pos(LineAndColumnIndex {
|
||||
line_index: import.specifier_range.end.line,
|
||||
column_index: import.specifier_range.end.character,
|
||||
}),
|
||||
);
|
||||
|
||||
match parsed_source.program_ref() {
|
||||
deno_ast::swc::ast::Program::Module(module) => module
|
||||
.body
|
||||
.iter()
|
||||
.find(|i| i.range().contains(&specifier_range))
|
||||
.map(|i| text_info.line_and_column_index(i.range().start)),
|
||||
deno_ast::swc::ast::Program::Script(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
async fn deno_types_for_npm_action(
|
||||
document: &Document,
|
||||
range: &lsp::Range,
|
||||
|
@ -1181,14 +1236,15 @@ impl CodeActionCollection {
|
|||
range.end.line as usize,
|
||||
range.end.character as usize,
|
||||
);
|
||||
let import_range = dependency.imports.iter().find_map(|i| {
|
||||
let import_start = dependency.imports.iter().find_map(|i| {
|
||||
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
|
||||
return None;
|
||||
}
|
||||
if !i.specifier_range.includes(&position) {
|
||||
return None;
|
||||
}
|
||||
i.full_range.as_ref()
|
||||
|
||||
import_start_from_specifier(document, i)
|
||||
})?;
|
||||
let referrer = document.specifier();
|
||||
let file_referrer = document.file_referrer();
|
||||
|
@ -1275,8 +1331,8 @@ impl CodeActionCollection {
|
|||
.specifier_to_uri(referrer, file_referrer)
|
||||
.ok()?;
|
||||
let position = lsp::Position {
|
||||
line: import_range.start.line as u32,
|
||||
character: import_range.start.character as u32,
|
||||
line: import_start.line_index as u32,
|
||||
character: import_start.column_index as u32,
|
||||
};
|
||||
let new_text = format!(
|
||||
"{}// @deno-types=\"{}\"\n",
|
||||
|
|
|
@ -53,6 +53,7 @@ use super::logging::lsp_log;
|
|||
use crate::args::discover_npmrc_from_workspace;
|
||||
use crate::args::has_flag_env_var;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliLockfileReadFromPathOptions;
|
||||
use crate::args::ConfigFile;
|
||||
use crate::args::LintFlags;
|
||||
use crate::args::LintOptions;
|
||||
|
@ -1931,7 +1932,11 @@ fn resolve_lockfile_from_path(
|
|||
lockfile_path: PathBuf,
|
||||
frozen: bool,
|
||||
) -> Option<CliLockfile> {
|
||||
match CliLockfile::read_from_path(lockfile_path, frozen) {
|
||||
match CliLockfile::read_from_path(CliLockfileReadFromPathOptions {
|
||||
file_path: lockfile_path,
|
||||
frozen,
|
||||
skip_write: false,
|
||||
}) {
|
||||
Ok(value) => {
|
||||
if value.filename.exists() {
|
||||
if let Ok(specifier) = ModuleSpecifier::from_file_path(&value.filename)
|
||||
|
|
|
@ -96,6 +96,7 @@ use crate::args::CaData;
|
|||
use crate::args::CacheSetting;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InternalFlags;
|
||||
use crate::args::UnstableFmtOptions;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
@ -1917,6 +1918,7 @@ impl Inner {
|
|||
// as the import map is an implementation detail
|
||||
.and_then(|d| d.resolver.maybe_import_map()),
|
||||
self.resolver.as_ref(),
|
||||
file_referrer,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -3605,7 +3607,10 @@ impl Inner {
|
|||
};
|
||||
let cli_options = CliOptions::new(
|
||||
Arc::new(Flags {
|
||||
cache_path: Some(self.cache.deno_dir().root.clone()),
|
||||
internal: InternalFlags {
|
||||
cache_path: Some(self.cache.deno_dir().root.clone()),
|
||||
..Default::default()
|
||||
},
|
||||
ca_stores: workspace_settings.certificate_stores.clone(),
|
||||
ca_data: workspace_settings.tls_certificate.clone().map(CaData::File),
|
||||
unsafely_ignore_certificate_errors: workspace_settings
|
||||
|
|
32
cli/main.rs
32
cli/main.rs
|
@ -37,6 +37,7 @@ use crate::util::v8::get_v8_flags_from_env;
|
|||
use crate::util::v8::init_v8_flags;
|
||||
|
||||
use args::TaskFlags;
|
||||
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
|
||||
|
||||
|
@ -51,10 +52,12 @@ use deno_runtime::fmt_errors::FixSuggestion;
|
|||
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
|
||||
use deno_terminal::colors;
|
||||
use factory::CliFactory;
|
||||
use npm::ResolvePkgFolderFromDenoReqError;
|
||||
use standalone::MODULE_NOT_FOUND;
|
||||
use standalone::UNSUPPORTED_SCHEME;
|
||||
use std::env;
|
||||
use std::future::Future;
|
||||
use std::io::IsTerminal;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -159,7 +162,19 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
DenoSubcommand::Uninstall(uninstall_flags) => spawn_subcommand(async {
|
||||
tools::installer::uninstall(flags, uninstall_flags).await
|
||||
}),
|
||||
DenoSubcommand::Lsp => spawn_subcommand(async { lsp::start().await }),
|
||||
DenoSubcommand::Lsp => spawn_subcommand(async {
|
||||
if std::io::stderr().is_terminal() {
|
||||
log::warn!(
|
||||
"{} command is intended to be run by text editors and IDEs and shouldn't be run manually.
|
||||
|
||||
Visit https://docs.deno.com/runtime/getting_started/setup_your_environment/ for instruction
|
||||
how to setup your favorite text editor.
|
||||
|
||||
Press Ctrl+C to exit.
|
||||
", colors::cyan("deno lsp"));
|
||||
}
|
||||
lsp::start().await
|
||||
}),
|
||||
DenoSubcommand::Lint(lint_flags) => spawn_subcommand(async {
|
||||
if lint_flags.rules {
|
||||
tools::lint::print_rules_list(
|
||||
|
@ -182,6 +197,21 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
match result {
|
||||
Ok(v) => Ok(v),
|
||||
Err(script_err) => {
|
||||
if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = script_err.downcast_ref::<ResolvePkgFolderFromDenoReqError>() {
|
||||
if flags.node_modules_dir.is_none() {
|
||||
let mut flags = flags.deref().clone();
|
||||
let watch = match &flags.subcommand {
|
||||
DenoSubcommand::Run(run_flags) => run_flags.watch.clone(),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
flags.node_modules_dir = Some(deno_config::deno_json::NodeModulesDirMode::None);
|
||||
// use the current lockfile, but don't write it out
|
||||
if flags.frozen_lockfile.is_none() {
|
||||
flags.internal.lockfile_skip_write = true;
|
||||
}
|
||||
return tools::run::run_script(WorkerExecutionMode::Run, Arc::new(flags), watch).await;
|
||||
}
|
||||
}
|
||||
let script_err_msg = script_err.to_string();
|
||||
if script_err_msg.starts_with(MODULE_NOT_FOUND) || script_err_msg.starts_with(UNSUPPORTED_SCHEME) {
|
||||
if run_flags.bare {
|
||||
|
|
|
@ -1 +1 @@
|
|||
{ "node_api_create_syntax_error"; "napi_make_callback"; "napi_has_named_property"; "napi_async_destroy"; "napi_coerce_to_object"; "napi_get_arraybuffer_info"; "napi_detach_arraybuffer"; "napi_get_undefined"; "napi_reference_unref"; "napi_fatal_error"; "napi_open_callback_scope"; "napi_close_callback_scope"; "napi_get_value_uint32"; "napi_create_function"; "napi_create_arraybuffer"; "napi_get_value_int64"; "napi_get_all_property_names"; "napi_resolve_deferred"; "napi_is_detached_arraybuffer"; "napi_create_string_utf8"; "napi_create_threadsafe_function"; "node_api_throw_syntax_error"; "napi_create_bigint_int64"; "napi_wrap"; "napi_set_property"; "napi_get_value_bigint_int64"; "napi_open_handle_scope"; "napi_create_error"; "napi_create_buffer"; "napi_cancel_async_work"; "napi_is_exception_pending"; "napi_acquire_threadsafe_function"; "napi_create_external"; "napi_get_threadsafe_function_context"; "napi_get_null"; "napi_create_string_utf16"; "node_api_create_external_string_utf16"; "napi_get_value_bigint_uint64"; "napi_module_register"; "napi_is_typedarray"; "napi_create_external_buffer"; "napi_get_new_target"; "napi_get_instance_data"; "napi_close_handle_scope"; "napi_get_value_string_utf16"; "napi_get_property_names"; "napi_is_arraybuffer"; "napi_get_cb_info"; "napi_define_properties"; "napi_add_env_cleanup_hook"; "node_api_get_module_file_name"; "napi_get_node_version"; "napi_create_int64"; "napi_create_double"; "napi_get_and_clear_last_exception"; "napi_create_reference"; "napi_get_typedarray_info"; "napi_call_threadsafe_function"; "napi_get_last_error_info"; "napi_create_array_with_length"; "napi_coerce_to_number"; "napi_get_global"; "napi_is_error"; "napi_set_instance_data"; "napi_create_typedarray"; "napi_throw_type_error"; "napi_has_property"; "napi_get_value_external"; "napi_create_range_error"; "napi_typeof"; "napi_ref_threadsafe_function"; "napi_create_bigint_uint64"; "napi_get_prototype"; "napi_adjust_external_memory"; "napi_release_threadsafe_function"; "napi_delete_async_work"; "napi_create_string_latin1"; "node_api_create_external_string_latin1"; "napi_is_array"; "napi_unref_threadsafe_function"; "napi_throw_error"; "napi_has_own_property"; "napi_get_reference_value"; "napi_remove_env_cleanup_hook"; "napi_get_value_string_utf8"; "napi_is_promise"; "napi_get_boolean"; "napi_run_script"; "napi_get_element"; "napi_get_named_property"; "napi_get_buffer_info"; "napi_get_value_bool"; "napi_reference_ref"; "napi_create_object"; "napi_create_promise"; "napi_create_int32"; "napi_escape_handle"; "napi_open_escapable_handle_scope"; "napi_throw"; "napi_get_value_double"; "napi_set_named_property"; "napi_call_function"; "napi_create_date"; "napi_object_freeze"; "napi_get_uv_event_loop"; "napi_get_value_string_latin1"; "napi_reject_deferred"; "napi_add_finalizer"; "napi_create_array"; "napi_delete_reference"; "napi_get_date_value"; "napi_create_dataview"; "napi_get_version"; "napi_define_class"; "napi_is_date"; "napi_remove_wrap"; "napi_delete_property"; "napi_instanceof"; "napi_create_buffer_copy"; "napi_delete_element"; "napi_object_seal"; "napi_queue_async_work"; "napi_get_value_bigint_words"; "napi_is_buffer"; "napi_get_array_length"; "napi_get_property"; "napi_new_instance"; "napi_set_element"; "napi_create_bigint_words"; "napi_strict_equals"; "napi_is_dataview"; "napi_close_escapable_handle_scope"; "napi_get_dataview_info"; "napi_get_value_int32"; "napi_unwrap"; "napi_throw_range_error"; "napi_coerce_to_bool"; "napi_create_uint32"; "napi_has_element"; "napi_create_external_arraybuffer"; "napi_create_symbol"; "node_api_symbol_for"; "napi_coerce_to_string"; "napi_create_type_error"; "napi_fatal_exception"; "napi_create_async_work"; "napi_async_init"; "node_api_create_property_key_utf16"; "napi_type_tag_object"; "napi_check_object_type_tag"; "node_api_post_finalizer"; "napi_add_async_cleanup_hook"; "napi_remove_async_cleanup_hook"; };
|
||||
{ "node_api_create_syntax_error"; "napi_make_callback"; "napi_has_named_property"; "napi_async_destroy"; "napi_coerce_to_object"; "napi_get_arraybuffer_info"; "napi_detach_arraybuffer"; "napi_get_undefined"; "napi_reference_unref"; "napi_fatal_error"; "napi_open_callback_scope"; "napi_close_callback_scope"; "napi_get_value_uint32"; "napi_create_function"; "napi_create_arraybuffer"; "napi_get_value_int64"; "napi_get_all_property_names"; "napi_resolve_deferred"; "napi_is_detached_arraybuffer"; "napi_create_string_utf8"; "napi_create_threadsafe_function"; "node_api_throw_syntax_error"; "napi_create_bigint_int64"; "napi_wrap"; "napi_set_property"; "napi_get_value_bigint_int64"; "napi_open_handle_scope"; "napi_create_error"; "napi_create_buffer"; "napi_cancel_async_work"; "napi_is_exception_pending"; "napi_acquire_threadsafe_function"; "napi_create_external"; "napi_get_threadsafe_function_context"; "napi_get_null"; "napi_create_string_utf16"; "node_api_create_external_string_utf16"; "napi_get_value_bigint_uint64"; "napi_module_register"; "napi_is_typedarray"; "napi_create_external_buffer"; "napi_get_new_target"; "napi_get_instance_data"; "napi_close_handle_scope"; "napi_get_value_string_utf16"; "napi_get_property_names"; "napi_is_arraybuffer"; "napi_get_cb_info"; "napi_define_properties"; "napi_add_env_cleanup_hook"; "node_api_get_module_file_name"; "napi_get_node_version"; "napi_create_int64"; "napi_create_double"; "napi_get_and_clear_last_exception"; "napi_create_reference"; "napi_get_typedarray_info"; "napi_call_threadsafe_function"; "napi_get_last_error_info"; "napi_create_array_with_length"; "napi_coerce_to_number"; "napi_get_global"; "napi_is_error"; "napi_set_instance_data"; "napi_create_typedarray"; "napi_throw_type_error"; "napi_has_property"; "napi_get_value_external"; "napi_create_range_error"; "napi_typeof"; "napi_ref_threadsafe_function"; "napi_create_bigint_uint64"; "napi_get_prototype"; "napi_adjust_external_memory"; "napi_release_threadsafe_function"; "napi_delete_async_work"; "napi_create_string_latin1"; "node_api_create_external_string_latin1"; "napi_is_array"; "napi_unref_threadsafe_function"; "napi_throw_error"; "napi_has_own_property"; "napi_get_reference_value"; "napi_remove_env_cleanup_hook"; "napi_get_value_string_utf8"; "napi_is_promise"; "napi_get_boolean"; "napi_run_script"; "napi_get_element"; "napi_get_named_property"; "napi_get_buffer_info"; "napi_get_value_bool"; "napi_reference_ref"; "napi_create_object"; "napi_create_promise"; "napi_create_int32"; "napi_escape_handle"; "napi_open_escapable_handle_scope"; "napi_throw"; "napi_get_value_double"; "napi_set_named_property"; "napi_call_function"; "napi_create_date"; "napi_object_freeze"; "napi_get_uv_event_loop"; "napi_get_value_string_latin1"; "napi_reject_deferred"; "napi_add_finalizer"; "napi_create_array"; "napi_delete_reference"; "napi_get_date_value"; "napi_create_dataview"; "napi_get_version"; "napi_define_class"; "napi_is_date"; "napi_remove_wrap"; "napi_delete_property"; "napi_instanceof"; "napi_create_buffer_copy"; "napi_delete_element"; "napi_object_seal"; "napi_queue_async_work"; "napi_get_value_bigint_words"; "napi_is_buffer"; "napi_get_array_length"; "napi_get_property"; "napi_new_instance"; "napi_set_element"; "napi_create_bigint_words"; "napi_strict_equals"; "napi_is_dataview"; "napi_close_escapable_handle_scope"; "napi_get_dataview_info"; "napi_get_value_int32"; "napi_unwrap"; "napi_throw_range_error"; "napi_coerce_to_bool"; "napi_create_uint32"; "napi_has_element"; "napi_create_external_arraybuffer"; "napi_create_symbol"; "node_api_symbol_for"; "napi_coerce_to_string"; "napi_create_type_error"; "napi_fatal_exception"; "napi_create_async_work"; "napi_async_init"; "node_api_create_property_key_utf16"; "napi_type_tag_object"; "napi_check_object_type_tag"; "node_api_post_finalizer"; "napi_add_async_cleanup_hook"; "napi_remove_async_cleanup_hook"; "uv_mutex_init"; "uv_mutex_lock"; "uv_mutex_unlock"; "uv_mutex_destroy"; "uv_async_init"; "uv_async_send"; "uv_close"; };
|
|
@ -150,4 +150,11 @@ _napi_type_tag_object
|
|||
_napi_check_object_type_tag
|
||||
_node_api_post_finalizer
|
||||
_napi_add_async_cleanup_hook
|
||||
_napi_remove_async_cleanup_hook
|
||||
_napi_remove_async_cleanup_hook
|
||||
_uv_mutex_init
|
||||
_uv_mutex_lock
|
||||
_uv_mutex_unlock
|
||||
_uv_mutex_destroy
|
||||
_uv_async_init
|
||||
_uv_async_send
|
||||
_uv_close
|
|
@ -152,4 +152,11 @@ EXPORTS
|
|||
napi_check_object_type_tag
|
||||
node_api_post_finalizer
|
||||
napi_add_async_cleanup_hook
|
||||
napi_remove_async_cleanup_hook
|
||||
napi_remove_async_cleanup_hook
|
||||
uv_mutex_init
|
||||
uv_mutex_lock
|
||||
uv_mutex_unlock
|
||||
uv_mutex_destroy
|
||||
uv_async_init
|
||||
uv_async_send
|
||||
uv_close
|
|
@ -18,3 +18,4 @@
|
|||
pub mod js_native_api;
|
||||
pub mod node_api;
|
||||
pub mod util;
|
||||
pub mod uv;
|
||||
|
|
|
@ -547,11 +547,16 @@ fn napi_delete_async_work(env: *mut Env, work: napi_async_work) -> napi_status {
|
|||
}
|
||||
|
||||
#[napi_sym]
|
||||
fn napi_get_uv_event_loop(env: *mut Env, uv_loop: *mut *mut ()) -> napi_status {
|
||||
let env = check_env!(env);
|
||||
fn napi_get_uv_event_loop(
|
||||
env_ptr: *mut Env,
|
||||
uv_loop: *mut *mut (),
|
||||
) -> napi_status {
|
||||
let env = check_env!(env_ptr);
|
||||
check_arg!(env, uv_loop);
|
||||
// There is no uv_loop in Deno
|
||||
napi_set_last_error(env, napi_generic_failure)
|
||||
unsafe {
|
||||
*uv_loop = env_ptr.cast();
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
#[napi_sym]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.99.0"
|
||||
version = "0.101.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -152,6 +152,13 @@
|
|||
"napi_check_object_type_tag",
|
||||
"node_api_post_finalizer",
|
||||
"napi_add_async_cleanup_hook",
|
||||
"napi_remove_async_cleanup_hook"
|
||||
"napi_remove_async_cleanup_hook",
|
||||
"uv_mutex_init",
|
||||
"uv_mutex_lock",
|
||||
"uv_mutex_unlock",
|
||||
"uv_mutex_destroy",
|
||||
"uv_async_init",
|
||||
"uv_async_send",
|
||||
"uv_close"
|
||||
]
|
||||
}
|
||||
|
|
231
cli/napi/uv.rs
Normal file
231
cli/napi/uv.rs
Normal file
|
@ -0,0 +1,231 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_runtime::deno_napi::*;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::ptr::addr_of_mut;
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn assert_ok(res: c_int) -> c_int {
|
||||
if res != 0 {
|
||||
eprintln!("bad result in uv polyfill: {res}");
|
||||
// don't panic because that might unwind into
|
||||
// c/c++
|
||||
std::process::abort();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
use crate::napi::js_native_api::napi_create_string_utf8;
|
||||
use crate::napi::node_api::napi_create_async_work;
|
||||
use crate::napi::node_api::napi_delete_async_work;
|
||||
use crate::napi::node_api::napi_queue_async_work;
|
||||
use std::ffi::c_int;
|
||||
|
||||
const UV_MUTEX_SIZE: usize = {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
std::mem::size_of::<libc::pthread_mutex_t>()
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
std::mem::size_of::<windows_sys::Win32::System::Threading::CRITICAL_SECTION>(
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
#[repr(C)]
|
||||
struct uv_mutex_t {
|
||||
mutex: Mutex<()>,
|
||||
_padding: [MaybeUninit<usize>; const {
|
||||
(UV_MUTEX_SIZE - size_of::<Mutex<()>>()) / size_of::<usize>()
|
||||
}],
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_mutex_init(lock: *mut uv_mutex_t) -> c_int {
|
||||
unsafe {
|
||||
addr_of_mut!((*lock).mutex).write(Mutex::new(()));
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_mutex_lock(lock: *mut uv_mutex_t) {
|
||||
unsafe {
|
||||
let guard = (*lock).mutex.lock();
|
||||
// forget the guard so it doesn't unlock when it goes out of scope.
|
||||
// we're going to unlock it manually
|
||||
std::mem::forget(guard);
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_mutex_unlock(lock: *mut uv_mutex_t) {
|
||||
unsafe {
|
||||
(*lock).mutex.force_unlock();
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_mutex_destroy(_lock: *mut uv_mutex_t) {
|
||||
// no cleanup required
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[allow(dead_code)]
|
||||
enum uv_handle_type {
|
||||
UV_UNKNOWN_HANDLE = 0,
|
||||
UV_ASYNC,
|
||||
UV_CHECK,
|
||||
UV_FS_EVENT,
|
||||
UV_FS_POLL,
|
||||
UV_HANDLE,
|
||||
UV_IDLE,
|
||||
UV_NAMED_PIPE,
|
||||
UV_POLL,
|
||||
UV_PREPARE,
|
||||
UV_PROCESS,
|
||||
UV_STREAM,
|
||||
UV_TCP,
|
||||
UV_TIMER,
|
||||
UV_TTY,
|
||||
UV_UDP,
|
||||
UV_SIGNAL,
|
||||
UV_FILE,
|
||||
UV_HANDLE_TYPE_MAX,
|
||||
}
|
||||
|
||||
const UV_HANDLE_SIZE: usize = 96;
|
||||
|
||||
#[repr(C)]
|
||||
struct uv_handle_t {
|
||||
// public members
|
||||
pub data: *mut c_void,
|
||||
pub r#loop: *mut uv_loop_t,
|
||||
pub r#type: uv_handle_type,
|
||||
|
||||
_padding: [MaybeUninit<usize>; const {
|
||||
(UV_HANDLE_SIZE
|
||||
- size_of::<*mut c_void>()
|
||||
- size_of::<*mut uv_loop_t>()
|
||||
- size_of::<uv_handle_type>())
|
||||
/ size_of::<usize>()
|
||||
}],
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
const UV_ASYNC_SIZE: usize = 128;
|
||||
|
||||
#[cfg(windows)]
|
||||
const UV_ASYNC_SIZE: usize = 224;
|
||||
|
||||
#[repr(C)]
|
||||
struct uv_async_t {
|
||||
// public members
|
||||
pub data: *mut c_void,
|
||||
pub r#loop: *mut uv_loop_t,
|
||||
pub r#type: uv_handle_type,
|
||||
// private
|
||||
async_cb: uv_async_cb,
|
||||
work: napi_async_work,
|
||||
_padding: [MaybeUninit<usize>; const {
|
||||
(UV_ASYNC_SIZE
|
||||
- size_of::<*mut c_void>()
|
||||
- size_of::<*mut uv_loop_t>()
|
||||
- size_of::<uv_handle_type>()
|
||||
- size_of::<uv_async_cb>()
|
||||
- size_of::<napi_async_work>())
|
||||
/ size_of::<usize>()
|
||||
}],
|
||||
}
|
||||
|
||||
type uv_loop_t = Env;
|
||||
type uv_async_cb = extern "C" fn(handle: *mut uv_async_t);
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_async_init(
|
||||
r#loop: *mut uv_loop_t,
|
||||
// probably uninitialized
|
||||
r#async: *mut uv_async_t,
|
||||
async_cb: uv_async_cb,
|
||||
) -> c_int {
|
||||
unsafe {
|
||||
addr_of_mut!((*r#async).r#loop).write(r#loop);
|
||||
addr_of_mut!((*r#async).r#type).write(uv_handle_type::UV_ASYNC);
|
||||
addr_of_mut!((*r#async).async_cb).write(async_cb);
|
||||
|
||||
let mut resource_name: MaybeUninit<napi_value> = MaybeUninit::uninit();
|
||||
assert_ok(napi_create_string_utf8(
|
||||
r#loop,
|
||||
c"uv_async".as_ptr(),
|
||||
usize::MAX,
|
||||
resource_name.as_mut_ptr(),
|
||||
));
|
||||
let resource_name = resource_name.assume_init();
|
||||
|
||||
let res = napi_create_async_work(
|
||||
r#loop,
|
||||
None::<v8::Local<'static, v8::Value>>.into(),
|
||||
resource_name,
|
||||
Some(async_exec_wrap),
|
||||
None,
|
||||
r#async.cast(),
|
||||
addr_of_mut!((*r#async).work),
|
||||
);
|
||||
-res
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_async_send(handle: *mut uv_async_t) -> c_int {
|
||||
unsafe { -napi_queue_async_work((*handle).r#loop, (*handle).work) }
|
||||
}
|
||||
|
||||
type uv_close_cb = unsafe extern "C" fn(*mut uv_handle_t);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn uv_close(handle: *mut uv_handle_t, close: uv_close_cb) {
|
||||
unsafe {
|
||||
if handle.is_null() {
|
||||
close(handle);
|
||||
return;
|
||||
}
|
||||
if let uv_handle_type::UV_ASYNC = (*handle).r#type {
|
||||
let handle: *mut uv_async_t = handle.cast();
|
||||
napi_delete_async_work((*handle).r#loop, (*handle).work);
|
||||
}
|
||||
close(handle);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe extern "C" fn async_exec_wrap(_env: napi_env, data: *mut c_void) {
|
||||
let data: *mut uv_async_t = data.cast();
|
||||
unsafe {
|
||||
((*data).async_cb)(data);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn sizes() {
|
||||
assert_eq!(
|
||||
std::mem::size_of::<libuv_sys_lite::uv_mutex_t>(),
|
||||
UV_MUTEX_SIZE
|
||||
);
|
||||
assert_eq!(
|
||||
std::mem::size_of::<libuv_sys_lite::uv_handle_t>(),
|
||||
UV_HANDLE_SIZE
|
||||
);
|
||||
assert_eq!(
|
||||
std::mem::size_of::<libuv_sys_lite::uv_async_t>(),
|
||||
UV_ASYNC_SIZE
|
||||
);
|
||||
assert_eq!(std::mem::size_of::<uv_mutex_t>(), UV_MUTEX_SIZE);
|
||||
assert_eq!(std::mem::size_of::<uv_handle_t>(), UV_HANDLE_SIZE);
|
||||
assert_eq!(std::mem::size_of::<uv_async_t>(), UV_ASYNC_SIZE);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -21,6 +22,7 @@ use crate::resolver::CliDenoResolverFs;
|
|||
|
||||
use super::CliNpmResolver;
|
||||
use super::InnerCliNpmResolverRef;
|
||||
use super::ResolvePkgFolderFromDenoReqError;
|
||||
|
||||
pub type CliByonmNpmResolverCreateOptions =
|
||||
ByonmNpmResolverCreateOptions<CliDenoResolverFs>;
|
||||
|
@ -31,18 +33,19 @@ pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>;
|
|||
struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
|
||||
|
||||
impl NodeRequireResolver for CliByonmWrapper {
|
||||
fn ensure_read_permission(
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
if !path
|
||||
.components()
|
||||
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
|
||||
{
|
||||
_ = permissions.check_read_path(path)?;
|
||||
permissions.check_read_path(path)
|
||||
} else {
|
||||
Ok(Cow::Borrowed(path))
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,10 +93,11 @@ impl CliNpmResolver for CliByonmNpmResolver {
|
|||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
|
||||
ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req(
|
||||
self, req, referrer,
|
||||
)
|
||||
.map_err(ResolvePkgFolderFromDenoReqError::Byonm)
|
||||
}
|
||||
|
||||
fn check_state_hash(&self) -> Option<u64> {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -20,6 +21,7 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
|||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
|
@ -51,6 +53,7 @@ use self::resolvers::NpmPackageFsResolver;
|
|||
|
||||
use super::CliNpmResolver;
|
||||
use super::InnerCliNpmResolverRef;
|
||||
use super::ResolvePkgFolderFromDenoReqError;
|
||||
|
||||
mod cache;
|
||||
mod registry;
|
||||
|
@ -477,6 +480,25 @@ impl ManagedCliNpmResolver {
|
|||
self.resolution.resolve_pkg_id_from_pkg_req(req)
|
||||
}
|
||||
|
||||
pub fn ensure_no_pkg_json_dep_errors(&self) -> Result<(), AnyError> {
|
||||
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
|
||||
match err {
|
||||
deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => {
|
||||
return Err(
|
||||
AnyError::from(err.clone())
|
||||
.context("Failed to install from package.json"),
|
||||
);
|
||||
}
|
||||
deno_package_json::PackageJsonDepValueParseError::Unsupported {
|
||||
..
|
||||
} => {
|
||||
log::warn!("{} {} in package.json", colors::yellow("Warning"), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensures that the top level `package.json` dependencies are installed.
|
||||
/// This may set up the `node_modules` directory.
|
||||
///
|
||||
|
@ -488,6 +510,7 @@ impl ManagedCliNpmResolver {
|
|||
if !self.top_level_install_flag.raise() {
|
||||
return Ok(false); // already did this
|
||||
}
|
||||
|
||||
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
|
||||
if pkg_json_remote_pkgs.is_empty() {
|
||||
return Ok(false);
|
||||
|
@ -571,11 +594,11 @@ impl NpmResolver for ManagedCliNpmResolver {
|
|||
}
|
||||
|
||||
impl NodeRequireResolver for ManagedCliNpmResolver {
|
||||
fn ensure_read_permission(
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
self.fs_resolver.ensure_read_permission(permissions, path)
|
||||
}
|
||||
}
|
||||
|
@ -649,9 +672,13 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
&self,
|
||||
req: &PackageReq,
|
||||
_referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let pkg_id = self.resolve_pkg_id_from_pkg_req(req)?;
|
||||
self.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
|
||||
let pkg_id = self
|
||||
.resolve_pkg_id_from_pkg_req(req)
|
||||
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
|
||||
self
|
||||
.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
|
||||
}
|
||||
|
||||
fn check_state_hash(&self) -> Option<u64> {
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
pub mod bin_entries;
|
||||
pub mod lifecycle_scripts;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
|
@ -62,11 +63,12 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
|
||||
async fn cache_packages(&self) -> Result<(), AnyError>;
|
||||
|
||||
fn ensure_read_permission(
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError>;
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -85,11 +87,15 @@ impl RegistryReadPermissionChecker {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ensure_registry_read_permission(
|
||||
pub fn ensure_registry_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
if permissions.query_read_all() {
|
||||
return Ok(Cow::Borrowed(path)); // skip permissions checks below
|
||||
}
|
||||
|
||||
// allow reading if it's in the node_modules
|
||||
let is_path_in_node_modules = path.starts_with(&self.registry_path)
|
||||
&& path
|
||||
|
@ -118,20 +124,20 @@ impl RegistryReadPermissionChecker {
|
|||
},
|
||||
}
|
||||
};
|
||||
let Some(registry_path_canon) = canonicalize(&self.registry_path)? else {
|
||||
return Ok(()); // not exists, allow reading
|
||||
};
|
||||
let Some(path_canon) = canonicalize(path)? else {
|
||||
return Ok(()); // not exists, allow reading
|
||||
};
|
||||
|
||||
if path_canon.starts_with(registry_path_canon) {
|
||||
return Ok(());
|
||||
if let Some(registry_path_canon) = canonicalize(&self.registry_path)? {
|
||||
if let Some(path_canon) = canonicalize(path)? {
|
||||
if path_canon.starts_with(registry_path_canon) {
|
||||
return Ok(Cow::Owned(path_canon));
|
||||
}
|
||||
} else if path.starts_with(registry_path_canon)
|
||||
|| path.starts_with(&self.registry_path)
|
||||
{
|
||||
return Ok(Cow::Borrowed(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ = permissions.check_read_path(path)?;
|
||||
Ok(())
|
||||
permissions.check_read_path(path)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -183,11 +183,11 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_read_permission(
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
self
|
||||
.registry_read_permission_checker
|
||||
.ensure_registry_read_permission(permissions, path)
|
||||
|
|
|
@ -257,11 +257,11 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
.await
|
||||
}
|
||||
|
||||
fn ensure_read_permission(
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
self
|
||||
.registry_read_permission_checker
|
||||
.ensure_registry_read_permission(permissions, path)
|
||||
|
@ -343,6 +343,14 @@ async fn sync_resolution_with_fs(
|
|||
},
|
||||
);
|
||||
let packages_with_deprecation_warnings = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let mut package_tags: HashMap<&PackageNv, Vec<&str>> = HashMap::new();
|
||||
for (package_req, package_nv) in snapshot.package_reqs() {
|
||||
if let Some(tag) = package_req.version_req.tag() {
|
||||
package_tags.entry(package_nv).or_default().push(tag);
|
||||
}
|
||||
}
|
||||
|
||||
for package in &package_partitions.packages {
|
||||
if let Some(current_pkg) =
|
||||
newest_packages_by_name.get_mut(&package.id.nv.name)
|
||||
|
@ -357,11 +365,29 @@ async fn sync_resolution_with_fs(
|
|||
let package_folder_name =
|
||||
get_package_folder_id_folder_name(&package.get_package_cache_folder_id());
|
||||
let folder_path = deno_local_registry_dir.join(&package_folder_name);
|
||||
let tags = package_tags
|
||||
.get(&package.id.nv)
|
||||
.map(|tags| tags.join(","))
|
||||
.unwrap_or_default();
|
||||
enum PackageFolderState {
|
||||
UpToDate,
|
||||
Uninitialized,
|
||||
TagsOutdated,
|
||||
}
|
||||
let initialized_file = folder_path.join(".initialized");
|
||||
let package_state = std::fs::read_to_string(&initialized_file)
|
||||
.map(|s| {
|
||||
if s != tags {
|
||||
PackageFolderState::TagsOutdated
|
||||
} else {
|
||||
PackageFolderState::UpToDate
|
||||
}
|
||||
})
|
||||
.unwrap_or(PackageFolderState::Uninitialized);
|
||||
if !cache
|
||||
.cache_setting()
|
||||
.should_use_for_npm_package(&package.id.nv.name)
|
||||
|| !initialized_file.exists()
|
||||
|| matches!(package_state, PackageFolderState::Uninitialized)
|
||||
{
|
||||
// cache bust the dep from the dep setup cache so the symlinks
|
||||
// are forced to be recreated
|
||||
|
@ -371,6 +397,7 @@ async fn sync_resolution_with_fs(
|
|||
let bin_entries_to_setup = bin_entries.clone();
|
||||
let packages_with_deprecation_warnings =
|
||||
packages_with_deprecation_warnings.clone();
|
||||
|
||||
cache_futures.push(async move {
|
||||
tarball_cache
|
||||
.ensure_package(&package.id.nv, &package.dist)
|
||||
|
@ -389,7 +416,7 @@ async fn sync_resolution_with_fs(
|
|||
move || {
|
||||
clone_dir_recursive(&cache_folder, &package_path)?;
|
||||
// write out a file that indicates this folder has been initialized
|
||||
fs::write(initialized_file, "")?;
|
||||
fs::write(initialized_file, tags)?;
|
||||
|
||||
Ok::<_, AnyError>(())
|
||||
}
|
||||
|
@ -410,6 +437,8 @@ async fn sync_resolution_with_fs(
|
|||
drop(pb_guard); // explicit for clarity
|
||||
Ok::<_, AnyError>(())
|
||||
});
|
||||
} else if matches!(package_state, PackageFolderState::TagsOutdated) {
|
||||
fs::write(initialized_file, tags)?;
|
||||
}
|
||||
|
||||
let sub_node_modules = folder_path.join("node_modules");
|
||||
|
@ -518,9 +547,9 @@ async fn sync_resolution_with_fs(
|
|||
// linked into the root
|
||||
match found_names.entry(remote_alias) {
|
||||
Entry::Occupied(nv) => {
|
||||
alias_clashes
|
||||
|| remote.req.name != nv.get().name // alias to a different package (in case of duplicate aliases)
|
||||
|| !remote.req.version_req.matches(&nv.get().version) // incompatible version
|
||||
// alias to a different package (in case of duplicate aliases)
|
||||
// or the version doesn't match the version in the root node_modules
|
||||
alias_clashes || &remote_pkg.id.nv != *nv.get()
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(&remote_pkg.id.nv);
|
||||
|
|
|
@ -14,11 +14,13 @@ use deno_core::error::AnyError;
|
|||
use deno_core::serde_json;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::NpmResolver;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::args::npm_registry_url;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
@ -29,6 +31,14 @@ pub use self::managed::CliNpmResolverManagedCreateOptions;
|
|||
pub use self::managed::CliNpmResolverManagedSnapshotOption;
|
||||
pub use self::managed::ManagedCliNpmResolver;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ResolvePkgFolderFromDenoReqError {
|
||||
#[error(transparent)]
|
||||
Managed(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Byonm(#[from] ByonmResolvePkgFolderFromDenoReqError),
|
||||
}
|
||||
|
||||
pub enum CliNpmResolverCreateOptions {
|
||||
Managed(CliNpmResolverManagedCreateOptions),
|
||||
Byonm(CliByonmNpmResolverCreateOptions),
|
||||
|
@ -93,7 +103,7 @@ pub trait CliNpmResolver: NpmResolver {
|
|||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>;
|
||||
|
||||
/// Returns a hash returning the state of the npm resolver
|
||||
/// or `None` if the state currently can't be determined.
|
||||
|
|
|
@ -195,7 +195,7 @@ pub async fn doc(
|
|||
kind_with_drilldown:
|
||||
deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind()),
|
||||
inner: Rc::new(node),
|
||||
drilldown_parent_kind: None,
|
||||
drilldown_name: None,
|
||||
parent: None,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
|
@ -262,7 +262,7 @@ pub async fn doc(
|
|||
}
|
||||
|
||||
struct DocResolver {
|
||||
deno_ns: std::collections::HashSet<Vec<String>>,
|
||||
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
|
||||
strip_trailing_html: bool,
|
||||
}
|
||||
|
||||
|
@ -286,7 +286,7 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
}
|
||||
|
||||
fn resolve_global_symbol(&self, symbol: &[String]) -> Option<String> {
|
||||
if self.deno_ns.contains(symbol) {
|
||||
if self.deno_ns.contains_key(symbol) {
|
||||
Some(format!(
|
||||
"https://deno.land/api@v{}?s={}",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
|
@ -455,7 +455,7 @@ impl deno_doc::html::HrefResolver for NodeDocResolver {
|
|||
fn generate_docs_directory(
|
||||
doc_nodes_by_url: IndexMap<ModuleSpecifier, Vec<doc::DocNode>>,
|
||||
html_options: &DocHtmlFlag,
|
||||
deno_ns: std::collections::HashSet<Vec<String>>,
|
||||
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
|
||||
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
|
||||
) -> Result<(), AnyError> {
|
||||
let cwd = std::env::current_dir().context("Failed to get CWD")?;
|
||||
|
@ -513,7 +513,6 @@ fn generate_docs_directory(
|
|||
rewrite_map,
|
||||
href_resolver,
|
||||
usage_composer: None,
|
||||
composable_output: false,
|
||||
category_docs,
|
||||
disable_search: internal_env.is_some(),
|
||||
symbol_redirect_map,
|
||||
|
|
|
@ -298,6 +298,10 @@ async fn install_local(
|
|||
}
|
||||
InstallFlagsLocal::TopLevel => {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
// surface any errors in the package.json
|
||||
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
|
||||
npm_resolver.ensure_no_pkg_json_dep_errors()?;
|
||||
}
|
||||
crate::tools::registry::cache_top_level_deps(&factory, None).await?;
|
||||
|
||||
if let Some(lockfile) = factory.cli_options()?.maybe_lockfile() {
|
||||
|
|
|
@ -357,56 +357,74 @@ pub struct JupyterReplSession {
|
|||
|
||||
impl JupyterReplSession {
|
||||
pub async fn start(&mut self) {
|
||||
let mut poll_worker = true;
|
||||
loop {
|
||||
let Some(msg) = self.rx.recv().await else {
|
||||
break;
|
||||
};
|
||||
let resp = match msg {
|
||||
JupyterReplRequest::LspCompletions {
|
||||
line_text,
|
||||
position,
|
||||
} => JupyterReplResponse::LspCompletions(
|
||||
self.lsp_completions(&line_text, position).await,
|
||||
),
|
||||
JupyterReplRequest::JsGetProperties { object_id } => {
|
||||
JupyterReplResponse::JsGetProperties(
|
||||
self.get_properties(object_id).await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsEvaluate { expr } => {
|
||||
JupyterReplResponse::JsEvaluate(self.evaluate(expr).await)
|
||||
}
|
||||
JupyterReplRequest::JsGlobalLexicalScopeNames => {
|
||||
JupyterReplResponse::JsGlobalLexicalScopeNames(
|
||||
self.global_lexical_scope_names().await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line } => {
|
||||
JupyterReplResponse::JsEvaluateLineWithObjectWrapping(
|
||||
self.evaluate_line_with_object_wrapping(&line).await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsCallFunctionOnArgs {
|
||||
function_declaration,
|
||||
args,
|
||||
} => JupyterReplResponse::JsCallFunctionOnArgs(
|
||||
self
|
||||
.call_function_on_args(function_declaration, &args)
|
||||
.await,
|
||||
),
|
||||
JupyterReplRequest::JsCallFunctionOn { arg0, arg1 } => {
|
||||
JupyterReplResponse::JsCallFunctionOn(
|
||||
self.call_function_on(arg0, arg1).await,
|
||||
)
|
||||
}
|
||||
};
|
||||
tokio::select! {
|
||||
biased;
|
||||
|
||||
let Ok(()) = self.tx.send(resp) else {
|
||||
break;
|
||||
};
|
||||
maybe_message = self.rx.recv() => {
|
||||
let Some(msg) = maybe_message else {
|
||||
break;
|
||||
};
|
||||
if self.handle_message(msg).await.is_err() {
|
||||
break;
|
||||
}
|
||||
poll_worker = true;
|
||||
},
|
||||
_ = self.repl_session.run_event_loop(), if poll_worker => {
|
||||
poll_worker = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_message(
|
||||
&mut self,
|
||||
msg: JupyterReplRequest,
|
||||
) -> Result<(), AnyError> {
|
||||
let resp = match msg {
|
||||
JupyterReplRequest::LspCompletions {
|
||||
line_text,
|
||||
position,
|
||||
} => JupyterReplResponse::LspCompletions(
|
||||
self.lsp_completions(&line_text, position).await,
|
||||
),
|
||||
JupyterReplRequest::JsGetProperties { object_id } => {
|
||||
JupyterReplResponse::JsGetProperties(
|
||||
self.get_properties(object_id).await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsEvaluate { expr } => {
|
||||
JupyterReplResponse::JsEvaluate(self.evaluate(expr).await)
|
||||
}
|
||||
JupyterReplRequest::JsGlobalLexicalScopeNames => {
|
||||
JupyterReplResponse::JsGlobalLexicalScopeNames(
|
||||
self.global_lexical_scope_names().await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line } => {
|
||||
JupyterReplResponse::JsEvaluateLineWithObjectWrapping(
|
||||
self.evaluate_line_with_object_wrapping(&line).await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsCallFunctionOnArgs {
|
||||
function_declaration,
|
||||
args,
|
||||
} => JupyterReplResponse::JsCallFunctionOnArgs(
|
||||
self
|
||||
.call_function_on_args(function_declaration, &args)
|
||||
.await,
|
||||
),
|
||||
JupyterReplRequest::JsCallFunctionOn { arg0, arg1 } => {
|
||||
JupyterReplResponse::JsCallFunctionOn(
|
||||
self.call_function_on(arg0, arg1).await,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
self.tx.send(resp).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
pub async fn lsp_completions(
|
||||
&mut self,
|
||||
line_text: &str,
|
||||
|
|
|
@ -558,12 +558,7 @@ pub async fn add(
|
|||
result.context("Failed to update configuration file")?;
|
||||
}
|
||||
|
||||
// clear the previously cached package.json from memory before reloading it
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
// make a new CliFactory to pick up the updated config file
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
// cache deps
|
||||
cache_deps::cache_top_level_deps(&cli_factory, Some(jsr_resolver)).await?;
|
||||
npm_install_after_modification(flags, Some(jsr_resolver)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -786,15 +781,33 @@ pub async fn remove(
|
|||
config.commit().await?;
|
||||
}
|
||||
|
||||
// Update deno.lock
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
cache_deps::cache_top_level_deps(&cli_factory, None).await?;
|
||||
npm_install_after_modification(flags, None).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn npm_install_after_modification(
|
||||
flags: Arc<Flags>,
|
||||
// explicitly provided to prevent redownloading
|
||||
jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>,
|
||||
) -> Result<(), AnyError> {
|
||||
// clear the previously cached package.json from memory before reloading it
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
|
||||
// make a new CliFactory to pick up the updated config file
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
// surface any errors in the package.json
|
||||
let npm_resolver = cli_factory.npm_resolver().await?;
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_no_pkg_json_dep_errors()?;
|
||||
}
|
||||
// npm install
|
||||
cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_config_file_content<
|
||||
I: IntoIterator<Item = (&'static str, Option<String>)>,
|
||||
>(
|
||||
|
|
|
@ -11,6 +11,7 @@ use deno_core::futures::StreamExt;
|
|||
use deno_semver::package::PackageReq;
|
||||
|
||||
pub async fn cache_top_level_deps(
|
||||
// todo(dsherret): don't pass the factory into this function. Instead use ctor deps
|
||||
factory: &CliFactory,
|
||||
jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>,
|
||||
) -> Result<(), AnyError> {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.163.0"
|
||||
version = "0.165.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.101.0"
|
||||
version = "0.103.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.38.0"
|
||||
version = "0.40.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -1301,7 +1301,9 @@ function getKeys(value, showHidden) {
|
|||
ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter));
|
||||
}
|
||||
}
|
||||
keys = ArrayPrototypeFilter(keys, (key) => key !== "cause");
|
||||
if (ObjectPrototypeIsPrototypeOf(ErrorPrototype, value)) {
|
||||
keys = ArrayPrototypeFilter(keys, (key) => key !== "cause");
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.169.0"
|
||||
version = "0.171.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.49.0"
|
||||
version = "0.51.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -18,21 +18,27 @@ import {
|
|||
op_crypto_decrypt,
|
||||
op_crypto_derive_bits,
|
||||
op_crypto_derive_bits_x25519,
|
||||
op_crypto_derive_bits_x448,
|
||||
op_crypto_encrypt,
|
||||
op_crypto_export_key,
|
||||
op_crypto_export_pkcs8_ed25519,
|
||||
op_crypto_export_pkcs8_x25519,
|
||||
op_crypto_export_pkcs8_x448,
|
||||
op_crypto_export_spki_ed25519,
|
||||
op_crypto_export_spki_x25519,
|
||||
op_crypto_export_spki_x448,
|
||||
op_crypto_generate_ed25519_keypair,
|
||||
op_crypto_generate_key,
|
||||
op_crypto_generate_x25519_keypair,
|
||||
op_crypto_generate_x448_keypair,
|
||||
op_crypto_get_random_values,
|
||||
op_crypto_import_key,
|
||||
op_crypto_import_pkcs8_ed25519,
|
||||
op_crypto_import_pkcs8_x25519,
|
||||
op_crypto_import_pkcs8_x448,
|
||||
op_crypto_import_spki_ed25519,
|
||||
op_crypto_import_spki_x25519,
|
||||
op_crypto_import_spki_x448,
|
||||
op_crypto_jwk_x_ed25519,
|
||||
op_crypto_random_uuid,
|
||||
op_crypto_sign_ed25519,
|
||||
|
@ -134,6 +140,7 @@ const supportedAlgorithms = {
|
|||
"AES-KW": "AesKeyGenParams",
|
||||
"HMAC": "HmacKeyGenParams",
|
||||
"X25519": null,
|
||||
"X448": null,
|
||||
"Ed25519": null,
|
||||
},
|
||||
"sign": {
|
||||
|
@ -165,12 +172,14 @@ const supportedAlgorithms = {
|
|||
"AES-KW": null,
|
||||
"Ed25519": null,
|
||||
"X25519": null,
|
||||
"X448": null,
|
||||
},
|
||||
"deriveBits": {
|
||||
"HKDF": "HkdfParams",
|
||||
"PBKDF2": "Pbkdf2Params",
|
||||
"ECDH": "EcdhKeyDeriveParams",
|
||||
"X25519": "EcdhKeyDeriveParams",
|
||||
"X448": "EcdhKeyDeriveParams",
|
||||
},
|
||||
"encrypt": {
|
||||
"RSA-OAEP": "RsaOaepParams",
|
||||
|
@ -1037,6 +1046,10 @@ class SubtleCrypto {
|
|||
result = exportKeyEd25519(format, key, innerKey);
|
||||
break;
|
||||
}
|
||||
case "X448": {
|
||||
result = exportKeyX448(format, key, innerKey);
|
||||
break;
|
||||
}
|
||||
case "X25519": {
|
||||
result = exportKeyX25519(format, key, innerKey);
|
||||
break;
|
||||
|
@ -1954,6 +1967,48 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
|
||||
return generateKeyAES(normalizedAlgorithm, extractable, usages);
|
||||
}
|
||||
case "X448": {
|
||||
if (
|
||||
ArrayPrototypeFind(
|
||||
usages,
|
||||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
const privateKeyData = new Uint8Array(56);
|
||||
const publicKeyData = new Uint8Array(56);
|
||||
|
||||
op_crypto_generate_x448_keypair(privateKeyData, publicKeyData);
|
||||
|
||||
const handle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData);
|
||||
|
||||
const publicHandle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, publicHandle, publicKeyData);
|
||||
|
||||
const algorithm = {
|
||||
name: algorithmName,
|
||||
};
|
||||
|
||||
const publicKey = constructKey(
|
||||
"public",
|
||||
true,
|
||||
usageIntersection(usages, []),
|
||||
algorithm,
|
||||
publicHandle,
|
||||
);
|
||||
|
||||
const privateKey = constructKey(
|
||||
"private",
|
||||
extractable,
|
||||
usageIntersection(usages, ["deriveKey", "deriveBits"]),
|
||||
algorithm,
|
||||
handle,
|
||||
);
|
||||
|
||||
return { publicKey, privateKey };
|
||||
}
|
||||
case "X25519": {
|
||||
if (
|
||||
ArrayPrototypeFind(
|
||||
|
@ -2100,6 +2155,211 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
}
|
||||
}
|
||||
|
||||
function importKeyX448(
|
||||
format,
|
||||
keyData,
|
||||
extractable,
|
||||
keyUsages,
|
||||
) {
|
||||
switch (format) {
|
||||
case "raw": {
|
||||
// 1.
|
||||
if (keyUsages.length > 0) {
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, handle, keyData);
|
||||
|
||||
// 2-3.
|
||||
const algorithm = {
|
||||
name: "X448",
|
||||
};
|
||||
|
||||
// 4-6.
|
||||
return constructKey(
|
||||
"public",
|
||||
extractable,
|
||||
[],
|
||||
algorithm,
|
||||
handle,
|
||||
);
|
||||
}
|
||||
case "spki": {
|
||||
// 1.
|
||||
if (keyUsages.length > 0) {
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const publicKeyData = new Uint8Array(56);
|
||||
if (!op_crypto_import_spki_x448(keyData, publicKeyData)) {
|
||||
throw new DOMException("Invalid key data", "DataError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, handle, publicKeyData);
|
||||
|
||||
const algorithm = {
|
||||
name: "X448",
|
||||
};
|
||||
|
||||
return constructKey(
|
||||
"public",
|
||||
extractable,
|
||||
[],
|
||||
algorithm,
|
||||
handle,
|
||||
);
|
||||
}
|
||||
case "pkcs8": {
|
||||
// 1.
|
||||
if (
|
||||
ArrayPrototypeFind(
|
||||
keyUsages,
|
||||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const privateKeyData = new Uint8Array(32);
|
||||
if (!op_crypto_import_pkcs8_x448(keyData, privateKeyData)) {
|
||||
throw new DOMException("Invalid key data", "DataError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData);
|
||||
|
||||
const algorithm = {
|
||||
name: "X448",
|
||||
};
|
||||
|
||||
return constructKey(
|
||||
"private",
|
||||
extractable,
|
||||
usageIntersection(keyUsages, recognisedUsages),
|
||||
algorithm,
|
||||
handle,
|
||||
);
|
||||
}
|
||||
case "jwk": {
|
||||
// 1.
|
||||
const jwk = keyData;
|
||||
|
||||
// 2.
|
||||
if (jwk.d !== undefined) {
|
||||
if (
|
||||
ArrayPrototypeFind(
|
||||
keyUsages,
|
||||
(u) =>
|
||||
!ArrayPrototypeIncludes(
|
||||
["deriveKey", "deriveBits"],
|
||||
u,
|
||||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
}
|
||||
|
||||
// 3.
|
||||
if (jwk.d === undefined && keyUsages.length > 0) {
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 4.
|
||||
if (jwk.kty !== "OKP") {
|
||||
throw new DOMException("Invalid key type", "DataError");
|
||||
}
|
||||
|
||||
// 5.
|
||||
if (jwk.crv !== "X448") {
|
||||
throw new DOMException("Invalid curve", "DataError");
|
||||
}
|
||||
|
||||
// 6.
|
||||
if (keyUsages.length > 0 && jwk.use !== undefined) {
|
||||
if (jwk.use !== "enc") {
|
||||
throw new DOMException("Invalid key use", "DataError");
|
||||
}
|
||||
}
|
||||
|
||||
// 7.
|
||||
if (jwk.key_ops !== undefined) {
|
||||
if (
|
||||
ArrayPrototypeFind(
|
||||
jwk.key_ops,
|
||||
(u) => !ArrayPrototypeIncludes(recognisedUsages, u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException(
|
||||
"'key_ops' property of JsonWebKey is invalid",
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
!ArrayPrototypeEvery(
|
||||
jwk.key_ops,
|
||||
(u) => ArrayPrototypeIncludes(keyUsages, u),
|
||||
)
|
||||
) {
|
||||
throw new DOMException(
|
||||
"'key_ops' property of JsonWebKey is invalid",
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 8.
|
||||
if (jwk.ext !== undefined && jwk.ext === false && extractable) {
|
||||
throw new DOMException("Invalid key extractability", "DataError");
|
||||
}
|
||||
|
||||
// 9.
|
||||
if (jwk.d !== undefined) {
|
||||
// https://www.rfc-editor.org/rfc/rfc8037#section-2
|
||||
const privateKeyData = op_crypto_base64url_decode(jwk.d);
|
||||
|
||||
const handle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData);
|
||||
|
||||
const algorithm = {
|
||||
name: "X448",
|
||||
};
|
||||
|
||||
return constructKey(
|
||||
"private",
|
||||
extractable,
|
||||
usageIntersection(keyUsages, ["deriveKey", "deriveBits"]),
|
||||
algorithm,
|
||||
handle,
|
||||
);
|
||||
} else {
|
||||
// https://www.rfc-editor.org/rfc/rfc8037#section-2
|
||||
const publicKeyData = op_crypto_base64url_decode(jwk.x);
|
||||
|
||||
const handle = {};
|
||||
WeakMapPrototypeSet(KEY_STORE, handle, publicKeyData);
|
||||
|
||||
const algorithm = {
|
||||
name: "X448",
|
||||
};
|
||||
|
||||
return constructKey(
|
||||
"public",
|
||||
extractable,
|
||||
[],
|
||||
algorithm,
|
||||
handle,
|
||||
);
|
||||
}
|
||||
}
|
||||
default:
|
||||
throw new DOMException("Not implemented", "NotSupportedError");
|
||||
}
|
||||
}
|
||||
|
||||
function importKeyEd25519(
|
||||
format,
|
||||
keyData,
|
||||
|
@ -3358,6 +3618,14 @@ async function importKeyInner(
|
|||
["wrapKey", "unwrapKey"],
|
||||
);
|
||||
}
|
||||
case "X448": {
|
||||
return importKeyX448(
|
||||
format,
|
||||
keyData,
|
||||
extractable,
|
||||
keyUsages,
|
||||
);
|
||||
}
|
||||
case "X25519": {
|
||||
return importKeyX25519(
|
||||
format,
|
||||
|
@ -4162,6 +4430,66 @@ function exportKeyEd25519(format, key, innerKey) {
|
|||
}
|
||||
}
|
||||
|
||||
function exportKeyX448(format, key, innerKey) {
|
||||
switch (format) {
|
||||
case "raw": {
|
||||
// 1.
|
||||
if (key[_type] !== "public") {
|
||||
throw new DOMException(
|
||||
"Key is not a public key",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
||||
// 2-3.
|
||||
return TypedArrayPrototypeGetBuffer(innerKey);
|
||||
}
|
||||
case "spki": {
|
||||
// 1.
|
||||
if (key[_type] !== "public") {
|
||||
throw new DOMException(
|
||||
"Key is not a public key",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
||||
const spkiDer = op_crypto_export_spki_x448(innerKey);
|
||||
return TypedArrayPrototypeGetBuffer(spkiDer);
|
||||
}
|
||||
case "pkcs8": {
|
||||
// 1.
|
||||
if (key[_type] !== "private") {
|
||||
throw new DOMException(
|
||||
"Key is not a private key",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
||||
const pkcs8Der = op_crypto_export_pkcs8_x448(
|
||||
new Uint8Array([0x04, 0x22, ...new SafeArrayIterator(innerKey)]),
|
||||
);
|
||||
pkcs8Der[15] = 0x20;
|
||||
return TypedArrayPrototypeGetBuffer(pkcs8Der);
|
||||
}
|
||||
case "jwk": {
|
||||
if (key[_type] === "private") {
|
||||
throw new DOMException("Not implemented", "NotSupportedError");
|
||||
}
|
||||
const x = op_crypto_base64url_encode(innerKey);
|
||||
const jwk = {
|
||||
kty: "OKP",
|
||||
crv: "X448",
|
||||
x,
|
||||
"key_ops": key.usages,
|
||||
ext: key[_extractable],
|
||||
};
|
||||
return jwk;
|
||||
}
|
||||
default:
|
||||
throw new DOMException("Not implemented", "NotSupportedError");
|
||||
}
|
||||
}
|
||||
|
||||
function exportKeyX25519(format, key, innerKey) {
|
||||
switch (format) {
|
||||
case "raw": {
|
||||
|
@ -4519,6 +4847,55 @@ async function deriveBits(normalizedAlgorithm, baseKey, length) {
|
|||
|
||||
return TypedArrayPrototypeGetBuffer(buf);
|
||||
}
|
||||
case "X448": {
|
||||
// 1.
|
||||
if (baseKey[_type] !== "private") {
|
||||
throw new DOMException("Invalid key type", "InvalidAccessError");
|
||||
}
|
||||
// 2.
|
||||
const publicKey = normalizedAlgorithm.public;
|
||||
// 3.
|
||||
if (publicKey[_type] !== "public") {
|
||||
throw new DOMException("Invalid key type", "InvalidAccessError");
|
||||
}
|
||||
// 4.
|
||||
if (publicKey[_algorithm].name !== baseKey[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Algorithm mismatch",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
||||
// 5.
|
||||
const kHandle = baseKey[_handle];
|
||||
const k = WeakMapPrototypeGet(KEY_STORE, kHandle);
|
||||
|
||||
const uHandle = publicKey[_handle];
|
||||
const u = WeakMapPrototypeGet(KEY_STORE, uHandle);
|
||||
|
||||
const secret = new Uint8Array(56);
|
||||
const isIdentity = op_crypto_derive_bits_x448(k, u, secret);
|
||||
|
||||
// 6.
|
||||
if (isIdentity) {
|
||||
throw new DOMException("Invalid key", "OperationError");
|
||||
}
|
||||
|
||||
// 7.
|
||||
if (length === null) {
|
||||
return TypedArrayPrototypeGetBuffer(secret);
|
||||
} else if (
|
||||
TypedArrayPrototypeGetByteLength(secret) * 8 < length
|
||||
) {
|
||||
throw new DOMException("Invalid length", "OperationError");
|
||||
} else {
|
||||
return ArrayBufferPrototypeSlice(
|
||||
TypedArrayPrototypeGetBuffer(secret),
|
||||
0,
|
||||
MathCeil(length / 8),
|
||||
);
|
||||
}
|
||||
}
|
||||
case "X25519": {
|
||||
// 1.
|
||||
if (baseKey[_type] !== "private") {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.183.0"
|
||||
version = "0.185.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -24,6 +24,7 @@ ctr = "0.9.1"
|
|||
curve25519-dalek = "4.1.3"
|
||||
deno_core.workspace = true
|
||||
deno_web.workspace = true
|
||||
ed448-goldilocks = { version = "0.8.3", features = ["zeroize"] }
|
||||
elliptic-curve = { version = "0.13.1", features = ["std", "pem"] }
|
||||
num-traits = "0.2.14"
|
||||
once_cell.workspace = true
|
||||
|
|
|
@ -64,6 +64,7 @@ mod import_key;
|
|||
mod key;
|
||||
mod shared;
|
||||
mod x25519;
|
||||
mod x448;
|
||||
|
||||
pub use crate::decrypt::op_crypto_decrypt;
|
||||
pub use crate::encrypt::op_crypto_encrypt;
|
||||
|
@ -98,6 +99,14 @@ deno_core::extension!(deno_crypto,
|
|||
x25519::op_crypto_derive_bits_x25519,
|
||||
x25519::op_crypto_import_spki_x25519,
|
||||
x25519::op_crypto_import_pkcs8_x25519,
|
||||
x25519::op_crypto_export_spki_x25519,
|
||||
x25519::op_crypto_export_pkcs8_x25519,
|
||||
x448::op_crypto_generate_x448_keypair,
|
||||
x448::op_crypto_derive_bits_x448,
|
||||
x448::op_crypto_import_spki_x448,
|
||||
x448::op_crypto_import_pkcs8_x448,
|
||||
x448::op_crypto_export_spki_x448,
|
||||
x448::op_crypto_export_pkcs8_x448,
|
||||
ed25519::op_crypto_generate_ed25519_keypair,
|
||||
ed25519::op_crypto_import_spki_ed25519,
|
||||
ed25519::op_crypto_import_pkcs8_ed25519,
|
||||
|
@ -106,8 +115,6 @@ deno_core::extension!(deno_crypto,
|
|||
ed25519::op_crypto_export_spki_ed25519,
|
||||
ed25519::op_crypto_export_pkcs8_ed25519,
|
||||
ed25519::op_crypto_jwk_x_ed25519,
|
||||
x25519::op_crypto_export_spki_x25519,
|
||||
x25519::op_crypto_export_pkcs8_x25519,
|
||||
],
|
||||
esm = [ "00_crypto.js" ],
|
||||
options = {
|
||||
|
|
|
@ -47,10 +47,10 @@ pub fn op_crypto_derive_bits_x25519(
|
|||
let sh_sec = x25519_dalek::x25519(k, u);
|
||||
let point = MontgomeryPoint(sh_sec);
|
||||
if point.ct_eq(&MONTGOMERY_IDENTITY).unwrap_u8() == 1 {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
secret.copy_from_slice(&sh_sec);
|
||||
true
|
||||
false
|
||||
}
|
||||
|
||||
// id-X25519 OBJECT IDENTIFIER ::= { 1 3 101 110 }
|
||||
|
|
147
ext/crypto/x448.rs
Normal file
147
ext/crypto/x448.rs
Normal file
|
@ -0,0 +1,147 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::ToJsBuffer;
|
||||
use ed448_goldilocks::curve::MontgomeryPoint;
|
||||
use ed448_goldilocks::Scalar;
|
||||
use elliptic_curve::pkcs8::PrivateKeyInfo;
|
||||
use elliptic_curve::subtle::ConstantTimeEq;
|
||||
use rand::rngs::OsRng;
|
||||
use rand::RngCore;
|
||||
use spki::der::asn1::BitString;
|
||||
use spki::der::Decode;
|
||||
use spki::der::Encode;
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_crypto_generate_x448_keypair(
|
||||
#[buffer] pkey: &mut [u8],
|
||||
#[buffer] pubkey: &mut [u8],
|
||||
) {
|
||||
let mut rng = OsRng;
|
||||
rng.fill_bytes(pkey);
|
||||
|
||||
// x448(pkey, 5)
|
||||
let point = &MontgomeryPoint::generator()
|
||||
* &Scalar::from_bytes(pkey.try_into().unwrap());
|
||||
pubkey.copy_from_slice(&point.0);
|
||||
}
|
||||
|
||||
const MONTGOMERY_IDENTITY: MontgomeryPoint = MontgomeryPoint([0; 56]);
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_crypto_derive_bits_x448(
|
||||
#[buffer] k: &[u8],
|
||||
#[buffer] u: &[u8],
|
||||
#[buffer] secret: &mut [u8],
|
||||
) -> bool {
|
||||
let k: [u8; 56] = k.try_into().expect("Expected byteLength 56");
|
||||
let u: [u8; 56] = u.try_into().expect("Expected byteLength 56");
|
||||
|
||||
// x448(k, u)
|
||||
let point = &MontgomeryPoint(u) * &Scalar::from_bytes(k);
|
||||
if point.ct_eq(&MONTGOMERY_IDENTITY).unwrap_u8() == 1 {
|
||||
return true;
|
||||
}
|
||||
|
||||
secret.copy_from_slice(&point.0);
|
||||
false
|
||||
}
|
||||
|
||||
// id-X448 OBJECT IDENTIFIER ::= { 1 3 101 111 }
|
||||
const X448_OID: const_oid::ObjectIdentifier =
|
||||
const_oid::ObjectIdentifier::new_unwrap("1.3.101.111");
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_crypto_export_spki_x448(
|
||||
#[buffer] pubkey: &[u8],
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
let key_info = spki::SubjectPublicKeyInfo {
|
||||
algorithm: spki::AlgorithmIdentifierRef {
|
||||
oid: X448_OID,
|
||||
parameters: None,
|
||||
},
|
||||
subject_public_key: BitString::from_bytes(pubkey)?,
|
||||
};
|
||||
Ok(
|
||||
key_info
|
||||
.to_der()
|
||||
.map_err(|_| {
|
||||
custom_error("DOMExceptionOperationError", "Failed to export key")
|
||||
})?
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_crypto_export_pkcs8_x448(
|
||||
#[buffer] pkey: &[u8],
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
use rsa::pkcs1::der::Encode;
|
||||
|
||||
let pk_info = rsa::pkcs8::PrivateKeyInfo {
|
||||
public_key: None,
|
||||
algorithm: rsa::pkcs8::AlgorithmIdentifierRef {
|
||||
oid: X448_OID,
|
||||
parameters: None,
|
||||
},
|
||||
private_key: pkey, // OCTET STRING
|
||||
};
|
||||
|
||||
let mut buf = Vec::new();
|
||||
pk_info.encode_to_vec(&mut buf)?;
|
||||
Ok(buf.into())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_crypto_import_spki_x448(
|
||||
#[buffer] key_data: &[u8],
|
||||
#[buffer] out: &mut [u8],
|
||||
) -> bool {
|
||||
// 2-3.
|
||||
let pk_info = match spki::SubjectPublicKeyInfoRef::try_from(key_data) {
|
||||
Ok(pk_info) => pk_info,
|
||||
Err(_) => return false,
|
||||
};
|
||||
// 4.
|
||||
let alg = pk_info.algorithm.oid;
|
||||
if alg != X448_OID {
|
||||
return false;
|
||||
}
|
||||
// 5.
|
||||
if pk_info.algorithm.parameters.is_some() {
|
||||
return false;
|
||||
}
|
||||
out.copy_from_slice(pk_info.subject_public_key.raw_bytes());
|
||||
true
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_crypto_import_pkcs8_x448(
|
||||
#[buffer] key_data: &[u8],
|
||||
#[buffer] out: &mut [u8],
|
||||
) -> bool {
|
||||
// 2-3.
|
||||
let pk_info = match PrivateKeyInfo::from_der(key_data) {
|
||||
Ok(pk_info) => pk_info,
|
||||
Err(_) => return false,
|
||||
};
|
||||
// 4.
|
||||
let alg = pk_info.algorithm.oid;
|
||||
if alg != X448_OID {
|
||||
return false;
|
||||
}
|
||||
// 5.
|
||||
if pk_info.algorithm.parameters.is_some() {
|
||||
return false;
|
||||
}
|
||||
// 6.
|
||||
// CurvePrivateKey ::= OCTET STRING
|
||||
if pk_info.private_key.len() != 56 {
|
||||
return false;
|
||||
}
|
||||
out.copy_from_slice(&pk_info.private_key[2..]);
|
||||
true
|
||||
}
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fetch"
|
||||
version = "0.193.0"
|
||||
version = "0.195.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_ffi"
|
||||
version = "0.156.0"
|
||||
version = "0.158.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fs"
|
||||
version = "0.79.0"
|
||||
version = "0.81.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_http"
|
||||
version = "0.167.0"
|
||||
version = "0.169.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_io"
|
||||
version = "0.79.0"
|
||||
version = "0.81.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_kv"
|
||||
version = "0.77.0"
|
||||
version = "0.79.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_napi"
|
||||
version = "0.100.0"
|
||||
version = "0.102.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_net"
|
||||
version = "0.161.0"
|
||||
version = "0.163.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_node"
|
||||
version = "0.106.0"
|
||||
version = "0.108.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -66,6 +66,7 @@ pub trait NodePermissions {
|
|||
&mut self,
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
fn query_read_all(&mut self) -> bool;
|
||||
fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write_with_api_name(
|
||||
|
@ -103,6 +104,10 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
|
|||
deno_permissions::PermissionsContainer::check_read_path(self, path, None)
|
||||
}
|
||||
|
||||
fn query_read_all(&mut self) -> bool {
|
||||
deno_permissions::PermissionsContainer::query_read_all(self)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_write_with_api_name(
|
||||
&mut self,
|
||||
|
@ -124,11 +129,12 @@ pub type NodeRequireResolverRc =
|
|||
deno_fs::sync::MaybeArc<dyn NodeRequireResolver>;
|
||||
|
||||
pub trait NodeRequireResolver: std::fmt::Debug + MaybeSend + MaybeSync {
|
||||
fn ensure_read_permission(
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError>;
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
}
|
||||
|
||||
pub static NODE_ENV_VAR_ALLOWLIST: Lazy<HashSet<String>> = Lazy::new(|| {
|
||||
|
|
|
@ -15,6 +15,7 @@ use deno_path_util::normalize_path;
|
|||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use node_resolver::REQUIRE_CONDITIONS;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -25,10 +26,11 @@ use crate::NodeRequireResolverRc;
|
|||
use crate::NodeResolverRc;
|
||||
use crate::NpmResolverRc;
|
||||
|
||||
fn ensure_read_permission<P>(
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn ensure_read_permission<'a, P>(
|
||||
state: &mut OpState,
|
||||
file_path: &Path,
|
||||
) -> Result<(), AnyError>
|
||||
file_path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -107,7 +109,7 @@ where
|
|||
deno_path_util::normalize_path(current_dir.join(from))
|
||||
};
|
||||
|
||||
ensure_read_permission::<P>(state, &from)?;
|
||||
let from = ensure_read_permission::<P>(state, &from)?;
|
||||
|
||||
if cfg!(windows) {
|
||||
// return root node_modules when path is 'D:\\'.
|
||||
|
@ -129,7 +131,7 @@ where
|
|||
}
|
||||
|
||||
let mut paths = Vec::with_capacity(from.components().count());
|
||||
let mut current_path = from.as_path();
|
||||
let mut current_path = from.as_ref();
|
||||
let mut maybe_parent = Some(current_path);
|
||||
while let Some(parent) = maybe_parent {
|
||||
if !parent.ends_with("node_modules") {
|
||||
|
@ -267,7 +269,7 @@ where
|
|||
P: NodePermissions + 'static,
|
||||
{
|
||||
let path = PathBuf::from(path);
|
||||
ensure_read_permission::<P>(state, &path)?;
|
||||
let path = ensure_read_permission::<P>(state, &path)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
if let Ok(metadata) = fs.stat_sync(&path) {
|
||||
if metadata.is_file {
|
||||
|
@ -290,7 +292,7 @@ where
|
|||
P: NodePermissions + 'static,
|
||||
{
|
||||
let path = PathBuf::from(request);
|
||||
ensure_read_permission::<P>(state, &path)?;
|
||||
let path = ensure_read_permission::<P>(state, &path)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let canonicalized_path =
|
||||
deno_core::strip_unc_prefix(fs.realpath_sync(&path)?);
|
||||
|
@ -362,7 +364,7 @@ where
|
|||
if parent_id == "<repl>" || parent_id == "internal/preload" {
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
if let Ok(cwd) = fs.cwd() {
|
||||
ensure_read_permission::<P>(state, &cwd)?;
|
||||
let cwd = ensure_read_permission::<P>(state, &cwd)?;
|
||||
return Ok(Some(cwd.to_string_lossy().into_owned()));
|
||||
}
|
||||
}
|
||||
|
@ -443,7 +445,7 @@ where
|
|||
P: NodePermissions + 'static,
|
||||
{
|
||||
let file_path = PathBuf::from(file_path);
|
||||
ensure_read_permission::<P>(state, &file_path)?;
|
||||
let file_path = ensure_read_permission::<P>(state, &file_path)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
Ok(fs.read_text_file_lossy_sync(&file_path, None)?)
|
||||
}
|
||||
|
@ -528,7 +530,7 @@ where
|
|||
P: NodePermissions + 'static,
|
||||
{
|
||||
let filename = PathBuf::from(filename);
|
||||
ensure_read_permission::<P>(state, filename.parent().unwrap())?;
|
||||
// permissions: allow reading the closest package.json files
|
||||
let node_resolver = state.borrow::<NodeResolverRc>().clone();
|
||||
node_resolver
|
||||
.get_closest_package_json_from_path(&filename)
|
||||
|
@ -567,7 +569,7 @@ where
|
|||
P: NodePermissions + 'static,
|
||||
{
|
||||
let referrer_path = PathBuf::from(&referrer_filename);
|
||||
ensure_read_permission::<P>(state, &referrer_path)?;
|
||||
let referrer_path = ensure_read_permission::<P>(state, &referrer_path)?;
|
||||
let node_resolver = state.borrow::<NodeResolverRc>();
|
||||
let Some(pkg) =
|
||||
node_resolver.get_closest_package_json_from_path(&referrer_path)?
|
||||
|
|
|
@ -7,6 +7,7 @@ use deno_core::url::Url;
|
|||
use deno_core::OpState;
|
||||
use deno_fs::FileSystemRc;
|
||||
use node_resolver::NodeResolution;
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
@ -14,10 +15,11 @@ use crate::NodePermissions;
|
|||
use crate::NodeRequireResolverRc;
|
||||
use crate::NodeResolverRc;
|
||||
|
||||
fn ensure_read_permission<P>(
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn ensure_read_permission<'a, P>(
|
||||
state: &mut OpState,
|
||||
file_path: &Path,
|
||||
) -> Result<(), AnyError>
|
||||
file_path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -47,7 +49,7 @@ where
|
|||
"Relative path entries must start with '.' or '..'",
|
||||
));
|
||||
}
|
||||
ensure_read_permission::<P>(state, &path)?;
|
||||
let path = ensure_read_permission::<P>(state, &path)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let canonicalized_path =
|
||||
deno_core::strip_unc_prefix(fs.realpath_sync(&path)?);
|
||||
|
@ -57,7 +59,7 @@ where
|
|||
let url_path = url
|
||||
.to_file_path()
|
||||
.map_err(|e| generic_error(format!("URL to Path-String: {:#?}", e)))?;
|
||||
ensure_read_permission::<P>(state, &url_path)?;
|
||||
let url_path = ensure_read_permission::<P>(state, &url_path)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
if !fs.exists_sync(&url_path) {
|
||||
return Err(generic_error(format!("File not found [{:?}]", url_path)));
|
||||
|
|
|
@ -173,7 +173,7 @@ export function readSync(
|
|||
validateBuffer(buffer);
|
||||
|
||||
if (length == null) {
|
||||
length = 0;
|
||||
length = buffer.byteLength;
|
||||
}
|
||||
|
||||
if (typeof offsetOrOpt === "number") {
|
||||
|
|
|
@ -302,8 +302,8 @@ class NodeWorker extends EventEmitter {
|
|||
if (this.#status !== "TERMINATED") {
|
||||
this.#status = "TERMINATED";
|
||||
op_host_terminate_worker(this.#id);
|
||||
this.emit("exit", 0);
|
||||
}
|
||||
this.emit("exit", 0);
|
||||
return PromiseResolve(0);
|
||||
}
|
||||
|
||||
|
@ -422,7 +422,11 @@ internals.__initWorkerThreads = (
|
|||
|
||||
parentPort.once = function (this: ParentPort, name, listener) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
const _listener = (ev: any) => listener(ev.data);
|
||||
const _listener = (ev: any) => {
|
||||
const message = ev.data;
|
||||
patchMessagePortIfFound(message);
|
||||
return listener(message);
|
||||
};
|
||||
listeners.set(listener, _listener);
|
||||
this.addEventListener(name, _listener);
|
||||
return this;
|
||||
|
@ -494,7 +498,9 @@ export function receiveMessageOnPort(port: MessagePort): object | undefined {
|
|||
port[MessagePortReceiveMessageOnPortSymbol] = true;
|
||||
const data = op_message_port_recv_message_sync(port[MessagePortIdSymbol]);
|
||||
if (data === null) return undefined;
|
||||
return { message: deserializeJsMessageData(data)[0] };
|
||||
const message = deserializeJsMessageData(data)[0];
|
||||
patchMessagePortIfFound(message);
|
||||
return { message };
|
||||
}
|
||||
|
||||
class NodeMessageChannel {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_tls"
|
||||
version = "0.156.0"
|
||||
version = "0.158.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -31,6 +31,7 @@ import * as webidl from "ext:deno_webidl/00_webidl.js";
|
|||
import { createFilteredInspectProxy } from "ext:deno_console/01_console.js";
|
||||
|
||||
const _components = Symbol("components");
|
||||
const urlPatternSettings = { groupStringFallback: false };
|
||||
|
||||
/**
|
||||
* @typedef Components
|
||||
|
@ -349,7 +350,11 @@ class URLPattern {
|
|||
const groups = res.groups;
|
||||
for (let i = 0; i < groupList.length; ++i) {
|
||||
// TODO(lucacasonato): this is vulnerable to override mistake
|
||||
groups[groupList[i]] = match[i + 1] ?? ""; // TODO(@crowlKats): remove fallback for 2.0
|
||||
if (urlPatternSettings.groupStringFallback) {
|
||||
groups[groupList[i]] = match[i + 1] ?? "";
|
||||
} else {
|
||||
groups[groupList[i]] = match[i + 1];
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -422,4 +427,4 @@ webidl.converters.URLPatternOptions = webidl
|
|||
},
|
||||
]);
|
||||
|
||||
export { URLPattern };
|
||||
export { URLPattern, urlPatternSettings };
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_url"
|
||||
version = "0.169.0"
|
||||
version = "0.171.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -22,6 +22,7 @@ const {
|
|||
Symbol,
|
||||
SymbolFor,
|
||||
SymbolIterator,
|
||||
PromiseResolve,
|
||||
SafeArrayIterator,
|
||||
TypeError,
|
||||
} = primordials;
|
||||
|
@ -41,7 +42,10 @@ import {
|
|||
import { isDetachedBuffer } from "./06_streams.js";
|
||||
import { DOMException } from "./01_dom_exception.js";
|
||||
|
||||
let messageEventListenerCount = 0;
|
||||
// counter of how many message ports are actively refed
|
||||
// either due to the existence of "message" event listeners or
|
||||
// explicit calls to ref/unref (in the case of node message ports)
|
||||
let refedMessagePortsCount = 0;
|
||||
|
||||
class MessageChannel {
|
||||
/** @type {MessagePort} */
|
||||
|
@ -93,6 +97,7 @@ const MessagePortReceiveMessageOnPortSymbol = Symbol(
|
|||
);
|
||||
const _enabled = Symbol("enabled");
|
||||
const _refed = Symbol("refed");
|
||||
const _messageEventListenerCount = Symbol("messageEventListenerCount");
|
||||
const nodeWorkerThreadCloseCb = Symbol("nodeWorkerThreadCloseCb");
|
||||
const nodeWorkerThreadCloseCbInvoked = Symbol("nodeWorkerThreadCloseCbInvoked");
|
||||
export const refMessagePort = Symbol("refMessagePort");
|
||||
|
@ -109,6 +114,9 @@ function createMessagePort(id) {
|
|||
port[core.hostObjectBrand] = core.hostObjectBrand;
|
||||
setEventTargetData(port);
|
||||
port[_id] = id;
|
||||
port[_enabled] = false;
|
||||
port[_messageEventListenerCount] = 0;
|
||||
port[_refed] = false;
|
||||
return port;
|
||||
}
|
||||
|
||||
|
@ -122,12 +130,18 @@ function nodeWorkerThreadMaybeInvokeCloseCb(port) {
|
|||
}
|
||||
}
|
||||
|
||||
const _isRefed = Symbol("isRefed");
|
||||
const _dataPromise = Symbol("dataPromise");
|
||||
|
||||
class MessagePort extends EventTarget {
|
||||
/** @type {number | null} */
|
||||
[_id] = null;
|
||||
/** @type {boolean} */
|
||||
[_enabled] = false;
|
||||
[_refed] = false;
|
||||
/** @type {Promise<any> | undefined} */
|
||||
[_dataPromise] = undefined;
|
||||
[_messageEventListenerCount] = 0;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
@ -193,24 +207,21 @@ class MessagePort extends EventTarget {
|
|||
this[_enabled] = true;
|
||||
while (true) {
|
||||
if (this[_id] === null) break;
|
||||
// Exit if no message event listeners are present in Node compat mode.
|
||||
if (
|
||||
typeof this[nodeWorkerThreadCloseCb] == "function" &&
|
||||
messageEventListenerCount === 0
|
||||
) break;
|
||||
let data;
|
||||
try {
|
||||
data = await op_message_port_recv_message(
|
||||
this[_dataPromise] = op_message_port_recv_message(
|
||||
this[_id],
|
||||
);
|
||||
if (
|
||||
typeof this[nodeWorkerThreadCloseCb] === "function" &&
|
||||
!this[_refed]
|
||||
) {
|
||||
core.unrefOpPromise(this[_dataPromise]);
|
||||
}
|
||||
data = await this[_dataPromise];
|
||||
this[_dataPromise] = undefined;
|
||||
} catch (err) {
|
||||
if (ObjectPrototypeIsPrototypeOf(InterruptedPrototype, err)) {
|
||||
// If we were interrupted, check if the interruption is coming
|
||||
// from `receiveMessageOnPort` API from Node compat, if so, continue.
|
||||
if (this[MessagePortReceiveMessageOnPortSymbol]) {
|
||||
this[MessagePortReceiveMessageOnPortSymbol] = false;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
nodeWorkerThreadMaybeInvokeCloseCb(this);
|
||||
|
@ -246,12 +257,26 @@ class MessagePort extends EventTarget {
|
|||
}
|
||||
|
||||
[refMessagePort](ref) {
|
||||
if (ref && !this[_refed]) {
|
||||
this[_refed] = true;
|
||||
messageEventListenerCount++;
|
||||
} else if (!ref && this[_refed]) {
|
||||
this[_refed] = false;
|
||||
messageEventListenerCount = 0;
|
||||
if (ref) {
|
||||
if (!this[_refed]) {
|
||||
refedMessagePortsCount++;
|
||||
if (
|
||||
this[_dataPromise]
|
||||
) {
|
||||
core.refOpPromise(this[_dataPromise]);
|
||||
}
|
||||
this[_refed] = true;
|
||||
}
|
||||
} else if (!ref) {
|
||||
if (this[_refed]) {
|
||||
refedMessagePortsCount--;
|
||||
if (
|
||||
this[_dataPromise]
|
||||
) {
|
||||
core.unrefOpPromise(this[_dataPromise]);
|
||||
}
|
||||
this[_refed] = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -266,15 +291,20 @@ class MessagePort extends EventTarget {
|
|||
|
||||
removeEventListener(...args) {
|
||||
if (args[0] == "message") {
|
||||
messageEventListenerCount--;
|
||||
if (--this[_messageEventListenerCount] === 0 && this[_refed]) {
|
||||
refedMessagePortsCount--;
|
||||
this[_refed] = false;
|
||||
}
|
||||
}
|
||||
super.removeEventListener(...new SafeArrayIterator(args));
|
||||
}
|
||||
|
||||
addEventListener(...args) {
|
||||
if (args[0] == "message") {
|
||||
messageEventListenerCount++;
|
||||
if (!this[_refed]) this[_refed] = true;
|
||||
if (++this[_messageEventListenerCount] === 1 && !this[_refed]) {
|
||||
refedMessagePortsCount++;
|
||||
this[_refed] = true;
|
||||
}
|
||||
}
|
||||
super.addEventListener(...new SafeArrayIterator(args));
|
||||
}
|
||||
|
@ -295,7 +325,17 @@ class MessagePort extends EventTarget {
|
|||
}
|
||||
|
||||
defineEventHandler(MessagePort.prototype, "message", function (self) {
|
||||
self.start();
|
||||
if (self[nodeWorkerThreadCloseCb]) {
|
||||
(async () => {
|
||||
// delay `start()` until he end of this event loop turn, to give `receiveMessageOnPort`
|
||||
// a chance to receive a message first. this is primarily to resolve an issue with
|
||||
// a pattern used in `npm:piscina` that results in an indefinite hang
|
||||
await PromiseResolve();
|
||||
self.start();
|
||||
})();
|
||||
} else {
|
||||
self.start();
|
||||
}
|
||||
});
|
||||
defineEventHandler(MessagePort.prototype, "messageerror");
|
||||
|
||||
|
@ -463,12 +503,12 @@ function structuredClone(value, options) {
|
|||
export {
|
||||
deserializeJsMessageData,
|
||||
MessageChannel,
|
||||
messageEventListenerCount,
|
||||
MessagePort,
|
||||
MessagePortIdSymbol,
|
||||
MessagePortPrototype,
|
||||
MessagePortReceiveMessageOnPortSymbol,
|
||||
nodeWorkerThreadCloseCb,
|
||||
refedMessagePortsCount,
|
||||
serializeJsMessageData,
|
||||
structuredClone,
|
||||
};
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_web"
|
||||
version = "0.200.0"
|
||||
version = "0.202.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -239,7 +239,6 @@ pub fn op_message_port_recv_message_sync(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<Option<JsMessageData>, AnyError> {
|
||||
let resource = state.resource_table.get::<MessagePortResource>(rid)?;
|
||||
resource.cancel.cancel();
|
||||
let mut rx = resource.port.rx.borrow_mut();
|
||||
|
||||
match rx.try_recv() {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_webgpu"
|
||||
version = "0.136.0"
|
||||
version = "0.138.0"
|
||||
authors = ["the Deno authors"]
|
||||
edition.workspace = true
|
||||
license = "MIT"
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_webidl"
|
||||
version = "0.169.0"
|
||||
version = "0.171.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_websocket"
|
||||
version = "0.174.0"
|
||||
version = "0.176.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -143,6 +143,9 @@ function createStorage(persistent) {
|
|||
if (ReflectHas(target, key)) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof key === "symbol") {
|
||||
return undefined;
|
||||
}
|
||||
const value = target.getItem(key);
|
||||
if (value === null) {
|
||||
return undefined;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_webstorage"
|
||||
version = "0.164.0"
|
||||
version = "0.166.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_resolver"
|
||||
version = "0.1.0"
|
||||
version = "0.3.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -22,6 +22,7 @@ deno_package_json.features = ["sync"]
|
|||
deno_path_util.workspace = true
|
||||
deno_semver.workspace = true
|
||||
node_resolver.workspace = true
|
||||
thiserror.workspace = true
|
||||
url.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -5,8 +5,6 @@ use std::path::Path;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::bail;
|
||||
use anyhow::Error as AnyError;
|
||||
use deno_package_json::PackageJson;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_path_util::url_to_file_path;
|
||||
|
@ -18,6 +16,7 @@ use node_resolver::errors::PackageJsonLoadError;
|
|||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::load_pkg_json;
|
||||
use node_resolver::NpmResolver;
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use crate::fs::DenoPkgJsonFsAdapter;
|
||||
|
@ -25,6 +24,18 @@ use crate::fs::DenoResolverFs;
|
|||
|
||||
use super::local::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ByonmResolvePkgFolderFromDenoReqError {
|
||||
#[error("Could not find \"{}\" in a node_modules folder. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", .0)]
|
||||
MissingAlias(String),
|
||||
#[error(transparent)]
|
||||
PackageJson(#[from] PackageJsonLoadError),
|
||||
#[error("Could not find a matching package for 'npm:{}' in the node_modules directory. Ensure you have all your JSR and npm dependencies listed in your deno.json or package.json, then run `deno install`. Alternatively, turn on auto-install by specifying `\"nodeModulesDir\": \"auto\"` in your deno.json file.", .0)]
|
||||
UnmatchedReq(PackageReq),
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
pub struct ByonmNpmResolverCreateOptions<Fs: DenoResolverFs> {
|
||||
pub fs: Fs,
|
||||
// todo(dsherret): investigate removing this
|
||||
|
@ -100,12 +111,12 @@ impl<Fs: DenoResolverFs> ByonmNpmResolver<Fs> {
|
|||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, ByonmResolvePkgFolderFromDenoReqError> {
|
||||
fn node_resolve_dir<Fs: DenoResolverFs>(
|
||||
fs: &Fs,
|
||||
alias: &str,
|
||||
start_dir: &Path,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
) -> std::io::Result<Option<PathBuf>> {
|
||||
for ancestor in start_dir.ancestors() {
|
||||
let node_modules_folder = ancestor.join("node_modules");
|
||||
let sub_dir = join_package_name(&node_modules_folder, alias);
|
||||
|
@ -131,14 +142,7 @@ impl<Fs: DenoResolverFs> ByonmNpmResolver<Fs> {
|
|||
return Ok(resolved);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
alias,
|
||||
);
|
||||
Err(ByonmResolvePkgFolderFromDenoReqError::MissingAlias(alias))
|
||||
}
|
||||
None => {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
|
@ -146,16 +150,9 @@ impl<Fs: DenoResolverFs> ByonmNpmResolver<Fs> {
|
|||
return Ok(folder);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in the node_modules ",
|
||||
"directory. Ensure you have all your JSR and npm dependencies listed ",
|
||||
"in your deno.json or package.json, then run `deno install`. Alternatively, ",
|
||||
r#"turn on auto-install by specifying `"nodeModulesDir": "auto"` in your "#,
|
||||
"deno.json file."
|
||||
),
|
||||
req,
|
||||
);
|
||||
Err(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(
|
||||
req.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -164,7 +161,7 @@ impl<Fs: DenoResolverFs> ByonmNpmResolver<Fs> {
|
|||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &Url,
|
||||
) -> Result<Option<(Arc<PackageJson>, String)>, AnyError> {
|
||||
) -> Result<Option<(Arc<PackageJson>, String)>, PackageJsonLoadError> {
|
||||
fn resolve_alias_from_pkg_json(
|
||||
req: &PackageReq,
|
||||
pkg_json: &PackageJson,
|
||||
|
@ -256,7 +253,24 @@ impl<Fs: DenoResolverFs> ByonmNpmResolver<Fs> {
|
|||
let Ok(version) = Version::parse_from_npm(version) else {
|
||||
continue;
|
||||
};
|
||||
if req.version_req.matches(&version) {
|
||||
if let Some(tag) = req.version_req.tag() {
|
||||
let initialized_file =
|
||||
node_modules_deno_dir.join(&entry.name).join(".initialized");
|
||||
let Ok(contents) = self.fs.read_to_string_lossy(&initialized_file)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let mut tags = contents.split(',').map(str::trim);
|
||||
if tags.any(|t| t == tag) {
|
||||
if let Some((best_version_version, _)) = &best_version {
|
||||
if version > *best_version_version {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
} else {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
}
|
||||
} else if req.version_req.matches(&version) {
|
||||
if let Some((best_version_version, _)) = &best_version {
|
||||
if version > *best_version_version {
|
||||
best_version = Some((version, entry.name));
|
||||
|
|
|
@ -5,4 +5,5 @@ mod local;
|
|||
|
||||
pub use byonm::ByonmNpmResolver;
|
||||
pub use byonm::ByonmNpmResolverCreateOptions;
|
||||
pub use byonm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
pub use local::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "node_resolver"
|
||||
version = "0.8.0"
|
||||
version = "0.10.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_runtime"
|
||||
version = "0.178.0"
|
||||
version = "0.180.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -169,8 +169,11 @@ let isClosing = false;
|
|||
let globalDispatchEvent;
|
||||
|
||||
function hasMessageEventListener() {
|
||||
// the function name is kind of a misnomer, but we want to behave
|
||||
// as if we have message event listeners if a node message port is explicitly
|
||||
// refed (and the inverse as well)
|
||||
return event.listenerCount(globalThis, "message") > 0 ||
|
||||
messagePort.messageEventListenerCount > 0;
|
||||
messagePort.refedMessagePortsCount > 0;
|
||||
}
|
||||
|
||||
async function pollForMessages() {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_permissions"
|
||||
version = "0.29.0"
|
||||
version = "0.31.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -476,6 +476,9 @@ impl<TQuery: QueryDescriptor> UnaryPermission<TQuery> {
|
|||
if state != PermissionState::Prompt {
|
||||
return state;
|
||||
}
|
||||
if !self.prompt {
|
||||
return PermissionState::Denied;
|
||||
}
|
||||
let mut message = String::with_capacity(40);
|
||||
message.push_str(&format!("{} access", TQuery::flag_name()));
|
||||
if let Some(desc) = desc {
|
||||
|
@ -2282,6 +2285,11 @@ impl PermissionsContainer {
|
|||
self.inner.lock().read.check_all(Some(api_name))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn query_read_all(&self) -> bool {
|
||||
self.inner.lock().read.query(None) == PermissionState::Granted
|
||||
}
|
||||
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
#[inline(always)]
|
||||
pub fn check_write(
|
||||
|
@ -2611,8 +2619,13 @@ impl PermissionsContainer {
|
|||
&self,
|
||||
path: Option<&str>,
|
||||
) -> Result<PermissionState, AnyError> {
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.read;
|
||||
if permission.is_allow_all() {
|
||||
return Ok(PermissionState::Granted);
|
||||
}
|
||||
Ok(
|
||||
self.inner.lock().read.query(
|
||||
permission.query(
|
||||
path
|
||||
.map(|path| {
|
||||
Result::<_, AnyError>::Ok(
|
||||
|
@ -2630,8 +2643,13 @@ impl PermissionsContainer {
|
|||
&self,
|
||||
path: Option<&str>,
|
||||
) -> Result<PermissionState, AnyError> {
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.write;
|
||||
if permission.is_allow_all() {
|
||||
return Ok(PermissionState::Granted);
|
||||
}
|
||||
Ok(
|
||||
self.inner.lock().write.query(
|
||||
permission.query(
|
||||
path
|
||||
.map(|path| {
|
||||
Result::<_, AnyError>::Ok(
|
||||
|
@ -2649,8 +2667,13 @@ impl PermissionsContainer {
|
|||
&self,
|
||||
host: Option<&str>,
|
||||
) -> Result<PermissionState, AnyError> {
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.net;
|
||||
if permission.is_allow_all() {
|
||||
return Ok(PermissionState::Granted);
|
||||
}
|
||||
Ok(
|
||||
self.inner.lock().net.query(
|
||||
permission.query(
|
||||
match host {
|
||||
None => None,
|
||||
Some(h) => Some(self.descriptor_parser.parse_net_descriptor(h)?),
|
||||
|
@ -2662,7 +2685,12 @@ impl PermissionsContainer {
|
|||
|
||||
#[inline(always)]
|
||||
pub fn query_env(&self, var: Option<&str>) -> PermissionState {
|
||||
self.inner.lock().env.query(var)
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.env;
|
||||
if permission.is_allow_all() {
|
||||
return PermissionState::Granted;
|
||||
}
|
||||
permission.query(var)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -2670,8 +2698,13 @@ impl PermissionsContainer {
|
|||
&self,
|
||||
kind: Option<&str>,
|
||||
) -> Result<PermissionState, AnyError> {
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.sys;
|
||||
if permission.is_allow_all() {
|
||||
return Ok(PermissionState::Granted);
|
||||
}
|
||||
Ok(
|
||||
self.inner.lock().sys.query(
|
||||
permission.query(
|
||||
kind
|
||||
.map(|kind| self.descriptor_parser.parse_sys_descriptor(kind))
|
||||
.transpose()?
|
||||
|
@ -2685,8 +2718,13 @@ impl PermissionsContainer {
|
|||
&self,
|
||||
cmd: Option<&str>,
|
||||
) -> Result<PermissionState, AnyError> {
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.run;
|
||||
if permission.is_allow_all() {
|
||||
return Ok(PermissionState::Granted);
|
||||
}
|
||||
Ok(
|
||||
self.inner.lock().run.query(
|
||||
permission.query(
|
||||
cmd
|
||||
.map(|request| self.descriptor_parser.parse_run_query(request))
|
||||
.transpose()?
|
||||
|
@ -2700,8 +2738,13 @@ impl PermissionsContainer {
|
|||
&self,
|
||||
path: Option<&str>,
|
||||
) -> Result<PermissionState, AnyError> {
|
||||
let inner = self.inner.lock();
|
||||
let permission = &inner.ffi;
|
||||
if permission.is_allow_all() {
|
||||
return Ok(PermissionState::Granted);
|
||||
}
|
||||
Ok(
|
||||
self.inner.lock().ffi.query(
|
||||
permission.query(
|
||||
path
|
||||
.map(|path| {
|
||||
Result::<_, AnyError>::Ok(
|
||||
|
@ -3906,7 +3949,8 @@ mod tests {
|
|||
fn test_request() {
|
||||
set_prompter(Box::new(TestPrompter));
|
||||
let parser = TestPermissionDescriptorParser;
|
||||
let mut perms: Permissions = Permissions::none_without_prompt();
|
||||
let mut perms: Permissions = Permissions::none_with_prompt();
|
||||
let mut perms_no_prompt: Permissions = Permissions::none_without_prompt();
|
||||
let read_query =
|
||||
|path: &str| parser.parse_path_query(path).unwrap().into_read();
|
||||
let write_query =
|
||||
|
@ -3955,6 +3999,7 @@ mod tests {
|
|||
assert_eq!(perms.run.query(None), PermissionState::Prompt);
|
||||
prompt_value.set(false);
|
||||
assert_eq!(perms.run.request(Some(&run_query)), PermissionState::Granted);
|
||||
assert_eq!(perms_no_prompt.read.request(Some(&read_query("/foo"))), PermissionState::Denied);
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -97,6 +97,9 @@ impl deno_node::NodePermissions for Permissions {
|
|||
) -> Result<PathBuf, deno_core::error::AnyError> {
|
||||
unreachable!("snapshotting!")
|
||||
}
|
||||
fn query_read_all(&mut self) -> bool {
|
||||
unreachable!("snapshotting!")
|
||||
}
|
||||
fn check_write_with_api_name(
|
||||
&mut self,
|
||||
_p: &str,
|
||||
|
|
|
@ -628,3 +628,37 @@ async fn jupyter_store_history_false() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn jupyter_http_server() -> Result<()> {
|
||||
let (_ctx, client, _process) = setup().await;
|
||||
client
|
||||
.send(
|
||||
Shell,
|
||||
"execute_request",
|
||||
json!({
|
||||
"silent": false,
|
||||
"store_history": false,
|
||||
"code": r#"Deno.serve({ port: 10234 }, (req) => Response.json({ hello: "world" }))"#,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let reply = client.recv(Shell).await?;
|
||||
assert_eq!(reply.header.msg_type, "execute_reply");
|
||||
assert_json_subset(
|
||||
reply.content,
|
||||
json!({
|
||||
"status": "ok",
|
||||
"execution_count": 0,
|
||||
}),
|
||||
);
|
||||
|
||||
for _ in 0..3 {
|
||||
let resp = reqwest::get("http://localhost:10234").await.unwrap();
|
||||
let text: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(text, json!({ "hello": "world" }));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -8504,6 +8504,74 @@ fn lsp_completions_auto_import_and_quick_fix_with_import_map() {
|
|||
client.shutdown();
|
||||
}
|
||||
|
||||
// Regression test for https://github.com/denoland/deno/issues/25775.
|
||||
#[test]
|
||||
fn lsp_quick_fix_missing_import_exclude_bare_node_builtins() {
|
||||
let context = TestContextBuilder::new()
|
||||
.use_http_server()
|
||||
.use_temp_cwd()
|
||||
.add_npm_env_vars()
|
||||
.build();
|
||||
let temp_dir = context.temp_dir();
|
||||
temp_dir.write(
|
||||
"package.json",
|
||||
json!({
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
},
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
context.run_npm("install");
|
||||
let mut client = context.new_lsp_command().build();
|
||||
client.initialize_default();
|
||||
let diagnostics = client.did_open(json!({
|
||||
"textDocument": {
|
||||
"uri": temp_dir.url().join("file.ts").unwrap(),
|
||||
"languageId": "typescript",
|
||||
"version": 1,
|
||||
// Include node:buffer import to ensure @types/node is in the dep graph.
|
||||
"text": "import \"node:buffer\";\nassert();\n",
|
||||
},
|
||||
}));
|
||||
let diagnostic = diagnostics
|
||||
.all()
|
||||
.into_iter()
|
||||
.find(|d| d.message == "Cannot find name 'assert'.")
|
||||
.unwrap();
|
||||
let res = client.write_request(
|
||||
"textDocument/codeAction",
|
||||
json!({
|
||||
"textDocument": {
|
||||
"uri": temp_dir.url().join("file.ts").unwrap(),
|
||||
},
|
||||
"range": {
|
||||
"start": { "line": 1, "character": 0 },
|
||||
"end": { "line": 1, "character": 6 },
|
||||
},
|
||||
"context": {
|
||||
"diagnostics": [&diagnostic],
|
||||
"only": ["quickfix"],
|
||||
},
|
||||
}),
|
||||
);
|
||||
let code_actions =
|
||||
serde_json::from_value::<Vec<lsp::CodeAction>>(res).unwrap();
|
||||
let titles = code_actions
|
||||
.iter()
|
||||
.map(|a| a.title.clone())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
json!(titles),
|
||||
json!([
|
||||
"Add import from \"node:assert\"",
|
||||
"Add import from \"node:console\"",
|
||||
"Add missing function declaration 'assert'",
|
||||
]),
|
||||
);
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_completions_snippet() {
|
||||
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||
|
|
|
@ -1136,3 +1136,22 @@ fn eval_file_promise_error() {
|
|||
assert_contains!(out, "Uncaught undefined");
|
||||
assert!(err.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repl_json_imports() {
|
||||
let context = TestContextBuilder::default().use_temp_cwd().build();
|
||||
let temp_dir = context.temp_dir();
|
||||
temp_dir.write("./data.json", r#"{"hello": "world"}"#);
|
||||
context
|
||||
.new_command()
|
||||
.env("NO_COLOR", "1")
|
||||
.args_vec(["repl", "-A"])
|
||||
.with_pty(|mut console| {
|
||||
console.write_line_raw(
|
||||
"import data from './data.json' with { type: 'json' };",
|
||||
);
|
||||
console.expect("undefined");
|
||||
console.write_line_raw("data");
|
||||
console.expect(r#"{ hello: "world" }"#);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -926,9 +926,7 @@ fn lock_redirects() {
|
|||
);
|
||||
}
|
||||
|
||||
// TODO(2.0): this should be rewritten to a spec test and first run `deno install`
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn lock_deno_json_package_json_deps() {
|
||||
let context = TestContextBuilder::new()
|
||||
.use_temp_cwd()
|
||||
|
@ -942,6 +940,7 @@ fn lock_deno_json_package_json_deps() {
|
|||
|
||||
// add a jsr and npm dependency
|
||||
deno_json.write_json(&json!({
|
||||
"nodeModulesDir": "auto",
|
||||
"imports": {
|
||||
"esm-basic": "npm:@denotest/esm-basic",
|
||||
"module_graph": "jsr:@denotest/module-graph@1.4",
|
||||
|
@ -984,6 +983,7 @@ fn lock_deno_json_package_json_deps() {
|
|||
// now remove the npm dependency from the deno.json and move
|
||||
// it to a package.json that uses an alias
|
||||
deno_json.write_json(&json!({
|
||||
"nodeModulesDir": "auto",
|
||||
"imports": {
|
||||
"module_graph": "jsr:@denotest/module-graph@1.4",
|
||||
}
|
||||
|
@ -1060,7 +1060,9 @@ fn lock_deno_json_package_json_deps() {
|
|||
}));
|
||||
|
||||
// now remove the deps from the deno.json
|
||||
deno_json.write("{}");
|
||||
deno_json.write_json(&json!({
|
||||
"nodeModulesDir": "auto"
|
||||
}));
|
||||
main_ts.write("");
|
||||
context
|
||||
.new_command()
|
||||
|
@ -3512,6 +3514,22 @@ itest!(no_prompt_flag {
|
|||
output_str: Some(""),
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn permission_request_with_no_prompt() {
|
||||
TestContext::default()
|
||||
.new_command()
|
||||
.env("NO_COLOR", "1")
|
||||
.args_vec([
|
||||
"run",
|
||||
"--quiet",
|
||||
"--no-prompt",
|
||||
"run/permission_request_no_prompt.ts",
|
||||
])
|
||||
.with_pty(|mut console| {
|
||||
console.expect("PermissionStatus { state: \"denied\", onchange: null }");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deno_no_prompt_environment_variable() {
|
||||
let output = util::deno_cmd()
|
||||
|
|
|
@ -18,19 +18,6 @@
|
|||
// http_server: true,
|
||||
// });
|
||||
|
||||
// TODO(2.0): decide what to do with this test
|
||||
// should not auto-install the packages in the package.json
|
||||
// when using nodeModulesDir: false
|
||||
// itest!(task_package_json_node_modules_dir_false {
|
||||
// args: "task echo",
|
||||
// cwd: Some("task/package_json_node_modules_dir_false/"),
|
||||
// output: "task/package_json_node_modules_dir_false/bin.out",
|
||||
// copy_temp_dir: Some("task/package_json_node_modules_dir_false/"),
|
||||
// envs: env_vars_for_npm_tests(),
|
||||
// exit_code: 0,
|
||||
// http_server: true,
|
||||
// });
|
||||
|
||||
// TODO(2.0): not entirely clear what's wrong with this test but it hangs for more than 60s
|
||||
// itest!(task_npx_on_own {
|
||||
// args: "task on-own",
|
||||
|
|
|
@ -13,6 +13,7 @@ repository.workspace = true
|
|||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
libuv-sys-lite = "=1.48.2"
|
||||
napi-sys = { version = "=2.2.2", default-features = false, features = ["napi7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -31,6 +31,7 @@ pub mod strings;
|
|||
pub mod symbol;
|
||||
pub mod tsfn;
|
||||
pub mod typedarray;
|
||||
pub mod uv;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! cstr {
|
||||
|
@ -138,6 +139,7 @@ unsafe extern "C" fn napi_register_module_v1(
|
|||
#[cfg(windows)]
|
||||
{
|
||||
napi_sys::setup();
|
||||
libuv_sys_lite::setup();
|
||||
}
|
||||
|
||||
// We create a fresh exports object and leave the passed
|
||||
|
@ -169,6 +171,7 @@ unsafe extern "C" fn napi_register_module_v1(
|
|||
symbol::init(env, exports);
|
||||
make_callback::init(env, exports);
|
||||
object::init(env, exports);
|
||||
uv::init(env, exports);
|
||||
|
||||
init_cleanup_hook(env, exports);
|
||||
|
||||
|
|
206
tests/napi/src/uv.rs
Normal file
206
tests/napi/src/uv.rs
Normal file
|
@ -0,0 +1,206 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::assert_napi_ok;
|
||||
use crate::napi_get_callback_info;
|
||||
use crate::napi_new_property;
|
||||
use libuv_sys_lite::uv_async_init;
|
||||
use libuv_sys_lite::uv_async_t;
|
||||
use libuv_sys_lite::uv_close;
|
||||
use libuv_sys_lite::uv_handle_t;
|
||||
use libuv_sys_lite::uv_mutex_destroy;
|
||||
use libuv_sys_lite::uv_mutex_lock;
|
||||
use libuv_sys_lite::uv_mutex_t;
|
||||
use libuv_sys_lite::uv_mutex_unlock;
|
||||
use napi_sys::*;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::ptr;
|
||||
use std::ptr::addr_of_mut;
|
||||
use std::ptr::null_mut;
|
||||
use std::time::Duration;
|
||||
|
||||
struct KeepAlive {
|
||||
tsfn: napi_threadsafe_function,
|
||||
}
|
||||
|
||||
impl KeepAlive {
|
||||
fn new(env: napi_env) -> Self {
|
||||
let mut name = null_mut();
|
||||
assert_napi_ok!(napi_create_string_utf8(
|
||||
env,
|
||||
c"test_uv_async".as_ptr(),
|
||||
13,
|
||||
&mut name
|
||||
));
|
||||
|
||||
unsafe extern "C" fn dummy(
|
||||
_env: napi_env,
|
||||
_cb: napi_callback_info,
|
||||
) -> napi_value {
|
||||
ptr::null_mut()
|
||||
}
|
||||
|
||||
let mut func = null_mut();
|
||||
assert_napi_ok!(napi_create_function(
|
||||
env,
|
||||
c"dummy".as_ptr(),
|
||||
usize::MAX,
|
||||
Some(dummy),
|
||||
null_mut(),
|
||||
&mut func,
|
||||
));
|
||||
|
||||
let mut tsfn = null_mut();
|
||||
assert_napi_ok!(napi_create_threadsafe_function(
|
||||
env,
|
||||
func,
|
||||
null_mut(),
|
||||
name,
|
||||
0,
|
||||
1,
|
||||
null_mut(),
|
||||
None,
|
||||
null_mut(),
|
||||
None,
|
||||
&mut tsfn,
|
||||
));
|
||||
assert_napi_ok!(napi_ref_threadsafe_function(env, tsfn));
|
||||
Self { tsfn }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for KeepAlive {
|
||||
fn drop(&mut self) {
|
||||
assert_napi_ok!(napi_release_threadsafe_function(
|
||||
self.tsfn,
|
||||
ThreadsafeFunctionReleaseMode::release,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
struct Async {
|
||||
mutex: *mut uv_mutex_t,
|
||||
env: napi_env,
|
||||
value: u32,
|
||||
callback: napi_ref,
|
||||
_keep_alive: KeepAlive,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct UvAsyncPtr(*mut uv_async_t);
|
||||
|
||||
unsafe impl Send for UvAsyncPtr {}
|
||||
|
||||
fn new_raw<T>(t: T) -> *mut T {
|
||||
Box::into_raw(Box::new(t))
|
||||
}
|
||||
|
||||
unsafe extern "C" fn close_cb(handle: *mut uv_handle_t) {
|
||||
let handle = handle.cast::<uv_async_t>();
|
||||
let async_ = (*handle).data as *mut Async;
|
||||
let env = (*async_).env;
|
||||
assert_napi_ok!(napi_delete_reference(env, (*async_).callback));
|
||||
|
||||
uv_mutex_destroy((*async_).mutex);
|
||||
let _ = Box::from_raw((*async_).mutex);
|
||||
let _ = Box::from_raw(async_);
|
||||
let _ = Box::from_raw(handle);
|
||||
}
|
||||
|
||||
unsafe extern "C" fn callback(handle: *mut uv_async_t) {
|
||||
eprintln!("callback");
|
||||
let async_ = (*handle).data as *mut Async;
|
||||
uv_mutex_lock((*async_).mutex);
|
||||
let env = (*async_).env;
|
||||
let mut js_cb = null_mut();
|
||||
assert_napi_ok!(napi_get_reference_value(
|
||||
env,
|
||||
(*async_).callback,
|
||||
&mut js_cb
|
||||
));
|
||||
let mut global: napi_value = ptr::null_mut();
|
||||
assert_napi_ok!(napi_get_global(env, &mut global));
|
||||
|
||||
let mut result: napi_value = ptr::null_mut();
|
||||
let value = (*async_).value;
|
||||
eprintln!("value is {value}");
|
||||
let mut value_js = ptr::null_mut();
|
||||
assert_napi_ok!(napi_create_uint32(env, value, &mut value_js));
|
||||
let args = &[value_js];
|
||||
assert_napi_ok!(napi_call_function(
|
||||
env,
|
||||
global,
|
||||
js_cb,
|
||||
1,
|
||||
args.as_ptr(),
|
||||
&mut result,
|
||||
));
|
||||
uv_mutex_unlock((*async_).mutex);
|
||||
if value == 5 {
|
||||
uv_close(handle.cast(), Some(close_cb));
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn uv_async_send(ptr: UvAsyncPtr) {
|
||||
assert_napi_ok!(libuv_sys_lite::uv_async_send(ptr.0));
|
||||
}
|
||||
|
||||
fn make_uv_mutex() -> *mut uv_mutex_t {
|
||||
let mutex = new_raw(MaybeUninit::<uv_mutex_t>::uninit());
|
||||
assert_napi_ok!(libuv_sys_lite::uv_mutex_init(mutex.cast()));
|
||||
mutex.cast()
|
||||
}
|
||||
|
||||
#[allow(unused_unsafe)]
|
||||
extern "C" fn test_uv_async(
|
||||
env: napi_env,
|
||||
info: napi_callback_info,
|
||||
) -> napi_value {
|
||||
let (args, argc, _) = napi_get_callback_info!(env, info, 1);
|
||||
assert_eq!(argc, 1);
|
||||
|
||||
let mut loop_ = null_mut();
|
||||
assert_napi_ok!(napi_get_uv_event_loop(env, &mut loop_));
|
||||
let uv_async = new_raw(MaybeUninit::<uv_async_t>::uninit());
|
||||
let uv_async = uv_async.cast::<uv_async_t>();
|
||||
let mut js_cb = null_mut();
|
||||
assert_napi_ok!(napi_create_reference(env, args[0], 1, &mut js_cb));
|
||||
// let mut tsfn = null_mut();
|
||||
|
||||
let data = new_raw(Async {
|
||||
env,
|
||||
callback: js_cb,
|
||||
mutex: make_uv_mutex(),
|
||||
value: 0,
|
||||
_keep_alive: KeepAlive::new(env),
|
||||
});
|
||||
unsafe {
|
||||
addr_of_mut!((*uv_async).data).write(data.cast());
|
||||
assert_napi_ok!(uv_async_init(loop_.cast(), uv_async, Some(callback)));
|
||||
let uv_async = UvAsyncPtr(uv_async);
|
||||
std::thread::spawn({
|
||||
move || {
|
||||
let data = (*uv_async.0).data as *mut Async;
|
||||
for _ in 0..5 {
|
||||
uv_mutex_lock((*data).mutex);
|
||||
(*data).value += 1;
|
||||
uv_mutex_unlock((*data).mutex);
|
||||
std::thread::sleep(Duration::from_millis(10));
|
||||
uv_async_send(uv_async);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ptr::null_mut()
|
||||
}
|
||||
|
||||
pub fn init(env: napi_env, exports: napi_value) {
|
||||
let properties = &[napi_new_property!(env, "test_uv_async", test_uv_async)];
|
||||
|
||||
assert_napi_ok!(napi_define_properties(
|
||||
env,
|
||||
exports,
|
||||
properties.len(),
|
||||
properties.as_ptr()
|
||||
));
|
||||
}
|
22
tests/napi/uv_test.js
Normal file
22
tests/napi/uv_test.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
import { assertEquals, loadTestLibrary } from "./common.js";
|
||||
|
||||
const uv = loadTestLibrary();
|
||||
|
||||
Deno.test({
|
||||
name: "napi uv async",
|
||||
ignore: true,
|
||||
fn: async () => {
|
||||
let called = false;
|
||||
await new Promise((resolve) => {
|
||||
uv.test_uv_async((value) => {
|
||||
called = true;
|
||||
if (value === 5) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
assertEquals(called, true);
|
||||
},
|
||||
});
|
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"tempDir": true,
|
||||
|
||||
"tests": {
|
||||
"tag_with_byonm": {
|
||||
"steps": [
|
||||
{
|
||||
"args": "install",
|
||||
"output": "[WILDCARD]"
|
||||
},
|
||||
{
|
||||
"args": "run -A main.ts",
|
||||
"output": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
"no_tag_then_tag": {
|
||||
"steps": [
|
||||
{
|
||||
"args": "run -A replace-version-req.ts 1.0.0",
|
||||
"output": ""
|
||||
},
|
||||
{
|
||||
"args": "install",
|
||||
"output": "[WILDCARD]"
|
||||
},
|
||||
{
|
||||
"args": "run -A replace-version-req.ts latest",
|
||||
"output": ""
|
||||
},
|
||||
{
|
||||
"args": "run -A main.ts",
|
||||
"output": "node_modules_out_of_date.out",
|
||||
"exitCode": 1
|
||||
},
|
||||
{
|
||||
"args": "install",
|
||||
"output": "[WILDCARD]"
|
||||
},
|
||||
{ "args": "run -A main.ts", "output": "" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"imports": {
|
||||
"@denotest/esm-basic": "npm:@denotest/esm-basic@latest"
|
||||
}
|
||||
}
|
1
tests/specs/install/byonm_run_tag_after_install/main.ts
Normal file
1
tests/specs/install/byonm_run_tag_after_install/main.ts
Normal file
|
@ -0,0 +1 @@
|
|||
import { add } from "@denotest/esm-basic";
|
|
@ -0,0 +1,2 @@
|
|||
error: Could not find a matching package for 'npm:@denotest/esm-basic@latest' in the node_modules directory. Ensure you have all your JSR and npm dependencies listed in your deno.json or package.json, then run `deno install`. Alternatively, turn on auto-install by specifying `"nodeModulesDir": "auto"` in your deno.json file.
|
||||
at [WILDCARD]main.ts:1:21
|
|
@ -0,0 +1 @@
|
|||
{}
|
|
@ -0,0 +1,7 @@
|
|||
const newReq = Deno.args[0]?.trim();
|
||||
if (!newReq) {
|
||||
throw new Error("Missing required argument");
|
||||
}
|
||||
const config = JSON.parse(Deno.readTextFileSync("deno.json"));
|
||||
config.imports["@denotest/esm-basic"] = `npm:@denotest/esm-basic@${newReq}`;
|
||||
Deno.writeTextFileSync("deno.json", JSON.stringify(config));
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"tempDir": true,
|
||||
"steps": [
|
||||
{
|
||||
"args": "install",
|
||||
"output": "install.out"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
Download http://localhost:4260/@denotest/esm-basic
|
||||
Download http://localhost:4260/@denotest/esm-basic/1.0.0.tgz
|
||||
Initialize @denotest/esm-basic@1.0.0
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"dependencies": {
|
||||
"@denotest/esm-basic": "latest"
|
||||
},
|
||||
"workspaces": ["package1"]
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue