1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge branch 'denoland:main' into cjs_suggestions_for_mjs

This commit is contained in:
Mohammad Sulaiman 2024-11-08 19:12:06 +02:00 committed by GitHub
commit 0b6c8bdbe6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
1744 changed files with 10422 additions and 3036 deletions

View file

@ -65,7 +65,11 @@
"tests/wpt/runner/expectation.json",
"tests/wpt/runner/manifest.json",
"tests/wpt/suite",
"third_party"
"third_party",
"tests/specs/run/shebang_with_json_imports_tsc",
"tests/specs/run/shebang_with_json_imports_swc",
"tests/specs/run/ext_flag_takes_precedence_over_extension",
"tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs"
],
"plugins": [
"https://plugins.dprint.dev/typescript-0.93.2.wasm",

View file

@ -10,7 +10,7 @@ concurrency:
jobs:
build:
name: cargo publish
runs-on: ubuntu-20.04-xl
runs-on: ubuntu-24.04-xl
timeout-minutes: 90
env:

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 23;
const cacheVersion = 24;
const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl";

View file

@ -361,8 +361,8 @@ jobs:
path: |-
~/.cargo/registry/index
~/.cargo/registry/cache
key: '23-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '23-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
key: '24-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '24-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)'
- name: Restore cache build output (PR)
uses: actions/cache/restore@v4
@ -375,7 +375,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '23-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '24-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -685,7 +685,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz
key: '23-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '24-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-24.04

View file

@ -7,7 +7,7 @@ on:
jobs:
update-dl-version:
name: update dl.deno.land version
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
if: github.repository == 'denoland/deno'
steps:
- name: Authenticate with Google Cloud

View file

@ -16,7 +16,7 @@ on:
jobs:
build:
name: start release
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 30
env:

View file

@ -16,7 +16,7 @@ on:
jobs:
build:
name: version bump
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 90
env:

View file

@ -20,7 +20,7 @@ jobs:
fail-fast: false
matrix:
deno-version: [v1.x, canary]
os: [ubuntu-22.04-xl]
os: [ubuntu-24.04-xl]
steps:
- name: Clone repository

311
Cargo.lock generated
View file

@ -765,6 +765,8 @@ dependencies = [
"fastwebsockets",
"file_test_runner",
"flaky_test",
"hickory-client",
"hickory-server",
"http 1.1.0",
"http-body-util",
"hyper 1.4.1",
@ -778,8 +780,6 @@ dependencies = [
"serde",
"test_server",
"tokio",
"trust-dns-client",
"trust-dns-server",
"url",
"uuid",
"zeromq",
@ -1154,7 +1154,7 @@ dependencies = [
[[package]]
name = "deno"
version = "2.0.4"
version = "2.0.5"
dependencies = [
"anstream",
"async-trait",
@ -1323,7 +1323,7 @@ dependencies = [
[[package]]
name = "deno_bench_util"
version = "0.169.0"
version = "0.170.0"
dependencies = [
"bencher",
"deno_core",
@ -1332,7 +1332,7 @@ dependencies = [
[[package]]
name = "deno_broadcast_channel"
version = "0.169.0"
version = "0.170.0"
dependencies = [
"async-trait",
"deno_core",
@ -1343,7 +1343,7 @@ dependencies = [
[[package]]
name = "deno_cache"
version = "0.107.0"
version = "0.108.0"
dependencies = [
"async-trait",
"deno_core",
@ -1376,7 +1376,7 @@ dependencies = [
[[package]]
name = "deno_canvas"
version = "0.44.0"
version = "0.45.0"
dependencies = [
"deno_core",
"deno_webgpu",
@ -1411,7 +1411,7 @@ dependencies = [
[[package]]
name = "deno_console"
version = "0.175.0"
version = "0.176.0"
dependencies = [
"deno_core",
]
@ -1456,7 +1456,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
[[package]]
name = "deno_cron"
version = "0.55.0"
version = "0.56.0"
dependencies = [
"anyhow",
"async-trait",
@ -1469,7 +1469,7 @@ dependencies = [
[[package]]
name = "deno_crypto"
version = "0.189.0"
version = "0.190.0"
dependencies = [
"aes",
"aes-gcm",
@ -1531,7 +1531,7 @@ dependencies = [
[[package]]
name = "deno_fetch"
version = "0.199.0"
version = "0.200.0"
dependencies = [
"base64 0.21.7",
"bytes",
@ -1564,7 +1564,7 @@ dependencies = [
[[package]]
name = "deno_ffi"
version = "0.162.0"
version = "0.163.0"
dependencies = [
"deno_core",
"deno_permissions",
@ -1584,7 +1584,7 @@ dependencies = [
[[package]]
name = "deno_fs"
version = "0.85.0"
version = "0.86.0"
dependencies = [
"async-trait",
"base32",
@ -1635,7 +1635,7 @@ dependencies = [
[[package]]
name = "deno_http"
version = "0.173.0"
version = "0.174.0"
dependencies = [
"async-compression",
"async-trait",
@ -1674,7 +1674,7 @@ dependencies = [
[[package]]
name = "deno_io"
version = "0.85.0"
version = "0.86.0"
dependencies = [
"async-trait",
"deno_core",
@ -1695,7 +1695,7 @@ dependencies = [
[[package]]
name = "deno_kv"
version = "0.83.0"
version = "0.84.0"
dependencies = [
"anyhow",
"async-trait",
@ -1767,7 +1767,7 @@ dependencies = [
[[package]]
name = "deno_napi"
version = "0.106.0"
version = "0.107.0"
dependencies = [
"deno_core",
"deno_permissions",
@ -1795,24 +1795,24 @@ dependencies = [
[[package]]
name = "deno_net"
version = "0.167.0"
version = "0.168.0"
dependencies = [
"deno_core",
"deno_permissions",
"deno_tls",
"hickory-proto",
"hickory-resolver",
"pin-project",
"rustls-tokio-stream",
"serde",
"socket2",
"thiserror",
"tokio",
"trust-dns-proto",
"trust-dns-resolver",
]
[[package]]
name = "deno_node"
version = "0.112.0"
version = "0.113.0"
dependencies = [
"aead-gcm-stream",
"aes",
@ -1961,7 +1961,7 @@ dependencies = [
[[package]]
name = "deno_permissions"
version = "0.35.0"
version = "0.36.0"
dependencies = [
"deno_core",
"deno_path_util",
@ -1979,7 +1979,7 @@ dependencies = [
[[package]]
name = "deno_resolver"
version = "0.7.0"
version = "0.8.0"
dependencies = [
"anyhow",
"base32",
@ -1995,7 +1995,7 @@ dependencies = [
[[package]]
name = "deno_runtime"
version = "0.184.0"
version = "0.185.0"
dependencies = [
"color-print",
"deno_ast",
@ -2113,7 +2113,7 @@ dependencies = [
[[package]]
name = "deno_tls"
version = "0.162.0"
version = "0.163.0"
dependencies = [
"deno_core",
"deno_native_certs",
@ -2162,7 +2162,7 @@ dependencies = [
[[package]]
name = "deno_url"
version = "0.175.0"
version = "0.176.0"
dependencies = [
"deno_bench_util",
"deno_console",
@ -2174,7 +2174,7 @@ dependencies = [
[[package]]
name = "deno_web"
version = "0.206.0"
version = "0.207.0"
dependencies = [
"async-trait",
"base64-simd 0.8.0",
@ -2196,7 +2196,7 @@ dependencies = [
[[package]]
name = "deno_webgpu"
version = "0.142.0"
version = "0.143.0"
dependencies = [
"deno_core",
"raw-window-handle",
@ -2209,7 +2209,7 @@ dependencies = [
[[package]]
name = "deno_webidl"
version = "0.175.0"
version = "0.176.0"
dependencies = [
"deno_bench_util",
"deno_core",
@ -2217,7 +2217,7 @@ dependencies = [
[[package]]
name = "deno_websocket"
version = "0.180.0"
version = "0.181.0"
dependencies = [
"bytes",
"deno_core",
@ -2239,7 +2239,7 @@ dependencies = [
[[package]]
name = "deno_webstorage"
version = "0.170.0"
version = "0.171.0"
dependencies = [
"deno_core",
"deno_web",
@ -2639,15 +2639,6 @@ dependencies = [
"text_lines",
]
[[package]]
name = "drain"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d105028bd2b5dfcb33318fd79a445001ead36004dd8dffef1bdd7e493d8bc1e"
dependencies = [
"tokio",
]
[[package]]
name = "dsa"
version = "0.6.3"
@ -3545,6 +3536,92 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df"
[[package]]
name = "hickory-client"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bab9683b08d8f8957a857b0236455d80e1886eaa8c6178af556aa7871fb61b55"
dependencies = [
"cfg-if",
"data-encoding",
"futures-channel",
"futures-util",
"hickory-proto",
"once_cell",
"radix_trie",
"rand",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "hickory-proto"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512"
dependencies = [
"async-trait",
"cfg-if",
"data-encoding",
"enum-as-inner",
"futures-channel",
"futures-io",
"futures-util",
"idna 0.4.0",
"ipnet",
"once_cell",
"rand",
"serde",
"thiserror",
"tinyvec",
"tokio",
"tracing",
"url",
]
[[package]]
name = "hickory-resolver"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243"
dependencies = [
"cfg-if",
"futures-util",
"hickory-proto",
"ipconfig",
"lru-cache",
"once_cell",
"parking_lot",
"rand",
"resolv-conf",
"serde",
"smallvec",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "hickory-server"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be0e43c556b9b3fdb6c7c71a9a32153a2275d02419e3de809e520bfcfe40c37"
dependencies = [
"async-trait",
"bytes",
"cfg-if",
"enum-as-inner",
"futures-util",
"hickory-proto",
"serde",
"thiserror",
"time",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "hkdf"
version = "0.12.4"
@ -4195,9 +4272,9 @@ dependencies = [
[[package]]
name = "libsui"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "205eca4e7beaad637dcd38fe41292065894ee7f498077cf3c135d5f7252b9f27"
checksum = "89795977654ad6250d6c0915411b622bac22f9efb4f852af94b2e00964cab832"
dependencies = [
"editpe",
"libc",
@ -4484,7 +4561,7 @@ dependencies = [
[[package]]
name = "napi_sym"
version = "0.105.0"
version = "0.106.0"
dependencies = [
"quote",
"serde",
@ -4539,7 +4616,7 @@ dependencies = [
[[package]]
name = "node_resolver"
version = "0.14.0"
version = "0.15.0"
dependencies = [
"anyhow",
"async-trait",
@ -6147,15 +6224,6 @@ dependencies = [
"syn 2.0.72",
]
[[package]]
name = "serde_spanned"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0"
dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -7122,6 +7190,7 @@ dependencies = [
"console_static_text",
"deno_unsync",
"denokv_proto",
"faster-hex",
"fastwebsockets",
"flate2",
"futures",
@ -7369,40 +7438,6 @@ dependencies = [
"serde",
]
[[package]]
name = "toml"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.19.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow 0.5.40",
]
[[package]]
name = "tower"
version = "0.4.13"
@ -7492,95 +7527,6 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "trust-dns-client"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14135e72c7e6d4c9b6902d4437881a8598f0145dbb2e3f86f92dbad845b61e63"
dependencies = [
"cfg-if",
"data-encoding",
"futures-channel",
"futures-util",
"once_cell",
"radix_trie",
"rand",
"thiserror",
"tokio",
"tracing",
"trust-dns-proto",
]
[[package]]
name = "trust-dns-proto"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374"
dependencies = [
"async-trait",
"cfg-if",
"data-encoding",
"enum-as-inner",
"futures-channel",
"futures-io",
"futures-util",
"idna 0.4.0",
"ipnet",
"once_cell",
"rand",
"serde",
"smallvec",
"thiserror",
"tinyvec",
"tokio",
"tracing",
"url",
]
[[package]]
name = "trust-dns-resolver"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6"
dependencies = [
"cfg-if",
"futures-util",
"ipconfig",
"lru-cache",
"once_cell",
"parking_lot",
"rand",
"resolv-conf",
"serde",
"smallvec",
"thiserror",
"tokio",
"tracing",
"trust-dns-proto",
]
[[package]]
name = "trust-dns-server"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c540f73c2b2ec2f6c54eabd0900e7aafb747a820224b742f556e8faabb461bc7"
dependencies = [
"async-trait",
"bytes",
"cfg-if",
"drain",
"enum-as-inner",
"futures-executor",
"futures-util",
"serde",
"thiserror",
"time",
"tokio",
"toml 0.7.8",
"tracing",
"trust-dns-proto",
]
[[package]]
name = "try-lock"
version = "0.2.5"
@ -8330,15 +8276,6 @@ version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winnow"
version = "0.5.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876"
dependencies = [
"memchr",
]
[[package]]
name = "winnow"
version = "0.6.15"
@ -8374,7 +8311,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c"
dependencies = [
"toml 0.5.11",
"toml",
]
[[package]]
@ -8451,7 +8388,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a6a39b6b5ba0d02c910d05d7fbc366a4befb8901ea107dcde9c1c97acb8a366"
dependencies = [
"rowan",
"winnow 0.6.15",
"winnow",
]
[[package]]

View file

@ -48,16 +48,16 @@ repository = "https://github.com/denoland/deno"
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.318.0" }
deno_bench_util = { version = "0.169.0", path = "./bench_util" }
deno_bench_util = { version = "0.170.0", path = "./bench_util" }
deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.25.4"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.35.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.184.0", path = "./runtime" }
deno_permissions = { version = "0.36.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.185.0", path = "./runtime" }
deno_semver = "=0.5.16"
deno_terminal = "0.2.0"
napi_sym = { version = "0.105.0", path = "./ext/napi/sym" }
napi_sym = { version = "0.106.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1"
@ -66,32 +66,32 @@ denokv_remote = "0.8.1"
denokv_sqlite = { default-features = false, version = "0.8.2" }
# exts
deno_broadcast_channel = { version = "0.169.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.107.0", path = "./ext/cache" }
deno_canvas = { version = "0.44.0", path = "./ext/canvas" }
deno_console = { version = "0.175.0", path = "./ext/console" }
deno_cron = { version = "0.55.0", path = "./ext/cron" }
deno_crypto = { version = "0.189.0", path = "./ext/crypto" }
deno_fetch = { version = "0.199.0", path = "./ext/fetch" }
deno_ffi = { version = "0.162.0", path = "./ext/ffi" }
deno_fs = { version = "0.85.0", path = "./ext/fs" }
deno_http = { version = "0.173.0", path = "./ext/http" }
deno_io = { version = "0.85.0", path = "./ext/io" }
deno_kv = { version = "0.83.0", path = "./ext/kv" }
deno_napi = { version = "0.106.0", path = "./ext/napi" }
deno_net = { version = "0.167.0", path = "./ext/net" }
deno_node = { version = "0.112.0", path = "./ext/node" }
deno_tls = { version = "0.162.0", path = "./ext/tls" }
deno_url = { version = "0.175.0", path = "./ext/url" }
deno_web = { version = "0.206.0", path = "./ext/web" }
deno_webgpu = { version = "0.142.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.175.0", path = "./ext/webidl" }
deno_websocket = { version = "0.180.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.170.0", path = "./ext/webstorage" }
deno_broadcast_channel = { version = "0.170.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.108.0", path = "./ext/cache" }
deno_canvas = { version = "0.45.0", path = "./ext/canvas" }
deno_console = { version = "0.176.0", path = "./ext/console" }
deno_cron = { version = "0.56.0", path = "./ext/cron" }
deno_crypto = { version = "0.190.0", path = "./ext/crypto" }
deno_fetch = { version = "0.200.0", path = "./ext/fetch" }
deno_ffi = { version = "0.163.0", path = "./ext/ffi" }
deno_fs = { version = "0.86.0", path = "./ext/fs" }
deno_http = { version = "0.174.0", path = "./ext/http" }
deno_io = { version = "0.86.0", path = "./ext/io" }
deno_kv = { version = "0.84.0", path = "./ext/kv" }
deno_napi = { version = "0.107.0", path = "./ext/napi" }
deno_net = { version = "0.168.0", path = "./ext/net" }
deno_node = { version = "0.113.0", path = "./ext/node" }
deno_tls = { version = "0.163.0", path = "./ext/tls" }
deno_url = { version = "0.176.0", path = "./ext/url" }
deno_web = { version = "0.207.0", path = "./ext/web" }
deno_webgpu = { version = "0.143.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.176.0", path = "./ext/webidl" }
deno_websocket = { version = "0.181.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.171.0", path = "./ext/webstorage" }
# resolvers
deno_resolver = { version = "0.7.0", path = "./resolvers/deno" }
node_resolver = { version = "0.14.0", path = "./resolvers/node" }
deno_resolver = { version = "0.8.0", path = "./resolvers/deno" }
node_resolver = { version = "0.15.0", path = "./resolvers/node" }
aes = "=0.8.3"
anyhow = "1.0.57"

View file

@ -6,6 +6,42 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at:
https://github.com/denoland/deno_install
### 2.0.5 / 2024.11.05
- fix(add): better error message when adding package that only has pre-release
versions (#26724)
- fix(add): only add npm deps to package.json if it's at least as close as
deno.json (#26683)
- fix(cli): set `npm_config_user_agent` when running npm packages or tasks
(#26639)
- fix(coverage): exclude comment lines from coverage reports (#25939)
- fix(ext/node): add `findSourceMap` to the default export of `node:module`
(#26720)
- fix(ext/node): convert errors from `fs.readFile/fs.readFileSync` to node
format (#26632)
- fix(ext/node): resolve exports even if parent module filename isn't present
(#26553)
- fix(ext/node): return `this` from `http.Server.ref/unref()` (#26647)
- fix(fmt): do not panic for jsx ignore container followed by jsx text (#26723)
- fix(fmt): fix several HTML and components issues (#26654)
- fix(fmt): ignore file directive for YAML files (#26717)
- fix(install): handle invalid function error, and fallback to junctions
regardless of the error (#26730)
- fix(lsp): include unstable features from editor settings (#26655)
- fix(lsp): scope attribution for lazily loaded assets (#26699)
- fix(node): Implement `os.userInfo` properly, add missing `toPrimitive`
(#24702)
- fix(serve): support serve hmr (#26078)
- fix(types): missing `import` permission on `PermissionOptionsObject` (#26627)
- fix(workspace): support wildcard packages (#26568)
- fix: clamp smi in fast calls by default (#26506)
- fix: improved support for cjs and cts modules (#26558)
- fix: op_run_microtasks crash (#26718)
- fix: panic_hook hangs without procfs (#26732)
- fix: remove permission check in op_require_node_module_paths (#26645)
- fix: surface package.json location on dep parse failure (#26665)
- perf(lsp): don't walk coverage directory (#26715)
### 2.0.4 / 2024.10.29
- Revert "fix(ext/node): fix dns.lookup result ordering (#26264)" (#26621)

View file

@ -2,7 +2,7 @@
[package]
name = "deno_bench_util"
version = "0.169.0"
version = "0.170.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno"
version = "2.0.4"
version = "2.0.5"
authors.workspace = true
default-run = "deno"
edition.workspace = true
@ -84,7 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin
deno_semver.workspace = true
deno_task_shell = "=0.18.1"
deno_terminal.workspace = true
libsui = "0.4.0"
libsui = "0.5.0"
node_resolver.workspace = true
anstream = "0.6.14"

View file

@ -88,6 +88,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
}
}
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
"TypeError"
}
pub fn get_error_class_name(e: &AnyError) -> &'static str {
deno_runtime::errors::get_error_class_name(e)
.or_else(|| {
@ -106,5 +110,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class)
})
.or_else(|| {
e.downcast_ref::<std::num::TryFromIntError>()
.map(get_try_from_int_error_class)
})
.unwrap_or("Error")
}

View file

@ -12,7 +12,9 @@ use super::urls::url_to_uri;
use crate::args::jsr_url;
use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
use deno_config::workspace::MappedResolution;
use deno_graph::source::ResolutionMode;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_ast::SourceRange;
@ -228,6 +230,7 @@ pub struct TsResponseImportMapper<'a> {
documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: ModuleSpecifier,
}
@ -236,12 +239,14 @@ impl<'a> TsResponseImportMapper<'a> {
documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: &ModuleSpecifier,
) -> Self {
Self {
documents,
maybe_import_map,
resolver,
tsc_specifier_map,
file_referrer: file_referrer.clone(),
}
}
@ -387,6 +392,11 @@ impl<'a> TsResponseImportMapper<'a> {
}
}
}
} else if let Some(dep_name) = self
.resolver
.file_url_to_package_json_dep(specifier, Some(&self.file_referrer))
{
return Some(dep_name);
}
// check if the import map has this specifier
@ -457,19 +467,36 @@ impl<'a> TsResponseImportMapper<'a> {
specifier: &str,
referrer: &ModuleSpecifier,
) -> Option<String> {
if let Ok(specifier) = referrer.join(specifier) {
if let Some(specifier) = self.check_specifier(&specifier, referrer) {
return Some(specifier);
}
}
let specifier = specifier.strip_suffix(".js").unwrap_or(specifier);
for ext in SUPPORTED_EXTENSIONS {
let specifier_with_ext = format!("{specifier}{ext}");
if self
.documents
.contains_import(&specifier_with_ext, referrer)
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
SUPPORTED_EXTENSIONS
.iter()
.map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))),
);
for specifier in specifiers {
if let Some(specifier) = self
.resolver
.as_graph_resolver(Some(&self.file_referrer))
.resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
ResolutionMode::Types,
)
.ok()
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
.filter(|s| self.documents.exists(s, Some(&self.file_referrer)))
{
return Some(specifier_with_ext);
if let Some(specifier) = self
.check_specifier(&specifier, referrer)
.or_else(|| relative_specifier(referrer, &specifier))
.filter(|s| !s.contains("/node_modules/"))
{
return Some(specifier);
}
}
}
None
@ -559,8 +586,9 @@ fn try_reverse_map_package_json_exports(
pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier,
changes: &[tsc::FileTextChanges],
import_mapper: &TsResponseImportMapper,
language_server: &language_server::Inner,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
let mut r = Vec::new();
for change in changes {
let mut text_changes = Vec::new();
@ -605,7 +633,7 @@ pub fn fix_ts_import_changes(
fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier,
action: &'a tsc::CodeFixAction,
import_mapper: &TsResponseImportMapper,
language_server: &language_server::Inner,
) -> Option<Cow<'a, tsc::CodeFixAction>> {
if !matches!(
action.fix_name.as_str(),
@ -621,6 +649,7 @@ fn fix_ts_import_action<'a>(
let Some(specifier) = specifier else {
return Some(Cow::Borrowed(action));
};
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer)
{
@ -728,7 +757,7 @@ pub fn ts_changes_to_edit(
}))
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CodeActionData {
pub specifier: ModuleSpecifier,
@ -998,11 +1027,8 @@ impl CodeActionCollection {
"The action returned from TypeScript is unsupported.",
));
}
let Some(action) = fix_ts_import_action(
specifier,
action,
&language_server.get_ts_response_import_mapper(specifier),
) else {
let Some(action) = fix_ts_import_action(specifier, action, language_server)
else {
return Ok(());
};
let edit = ts_changes_to_edit(&action.changes, language_server)?;
@ -1051,10 +1077,12 @@ impl CodeActionCollection {
specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic,
) {
let data = Some(json!({
"specifier": specifier,
"fixId": action.fix_id,
}));
let data = action.fix_id.as_ref().map(|fix_id| {
json!(CodeActionData {
specifier: specifier.clone(),
fix_id: fix_id.clone(),
})
});
let title = if let Some(description) = &action.fix_all_description {
description.clone()
} else {

View file

@ -1059,34 +1059,6 @@ impl Documents {
self.cache.is_valid_file_referrer(specifier)
}
/// Return `true` if the provided specifier can be resolved to a document,
/// otherwise `false`.
pub fn contains_import(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
) -> bool {
let file_referrer = self.get_file_referrer(referrer);
let maybe_specifier = self
.resolver
.as_graph_resolver(file_referrer.as_deref())
.resolve(
specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
ResolutionMode::Types,
)
.ok();
if let Some(import_specifier) = maybe_specifier {
self.exists(&import_specifier, file_referrer.as_deref())
} else {
false
}
}
pub fn resolve_document_specifier(
&self,
specifier: &ModuleSpecifier,

View file

@ -1837,7 +1837,7 @@ impl Inner {
fix_ts_import_changes(
&code_action_data.specifier,
&combined_code_actions.changes,
&self.get_ts_response_import_mapper(&code_action_data.specifier),
self,
)
.map_err(|err| {
error!("Unable to remap changes: {:#}", err);
@ -1890,7 +1890,7 @@ impl Inner {
refactor_edit_info.edits = fix_ts_import_changes(
&action_data.specifier,
&refactor_edit_info.edits,
&self.get_ts_response_import_mapper(&action_data.specifier),
self,
)
.map_err(|err| {
error!("Unable to remap changes: {:#}", err);
@ -1921,7 +1921,8 @@ impl Inner {
// todo(dsherret): this should probably just take the resolver itself
// as the import map is an implementation detail
.and_then(|d| d.resolver.maybe_import_map()),
self.resolver.as_ref(),
&self.resolver,
&self.ts_server.specifier_map,
file_referrer,
)
}
@ -2284,7 +2285,11 @@ impl Inner {
.into(),
scope.cloned(),
)
.await;
.await
.unwrap_or_else(|err| {
error!("Unable to get completion info from TypeScript: {:#}", err);
None
});
if let Some(completions) = maybe_completion_info {
response = Some(

View file

@ -74,6 +74,7 @@ struct LspScopeResolver {
pkg_json_resolver: Option<Arc<PackageJsonResolver>>,
redirect_resolver: Option<Arc<RedirectResolver>>,
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
config_data: Option<Arc<ConfigData>>,
}
@ -88,6 +89,7 @@ impl Default for LspScopeResolver {
pkg_json_resolver: None,
redirect_resolver: None,
graph_imports: Default::default(),
package_json_deps_by_resolution: Default::default(),
config_data: None,
}
}
@ -165,6 +167,33 @@ impl LspScopeResolver {
)
})
.unwrap_or_default();
let package_json_deps_by_resolution = (|| {
let node_resolver = node_resolver.as_ref()?;
let package_json = config_data?.maybe_pkg_json()?;
let referrer = package_json.specifier();
let dependencies = package_json.dependencies.as_ref()?;
let result = dependencies
.iter()
.flat_map(|(name, _)| {
let req_ref =
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?;
let specifier = into_specifier_and_media_type(Some(
node_resolver
.resolve_req_reference(
&req_ref,
&referrer,
NodeResolutionMode::Types,
)
.ok()?,
))
.0;
Some((specifier, name.clone()))
})
.collect();
Some(result)
})();
let package_json_deps_by_resolution =
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
Self {
cjs_tracker: lsp_cjs_tracker,
graph_resolver,
@ -174,6 +203,7 @@ impl LspScopeResolver {
pkg_json_resolver: Some(pkg_json_resolver),
redirect_resolver,
graph_imports,
package_json_deps_by_resolution,
config_data: config_data.cloned(),
}
}
@ -216,6 +246,9 @@ impl LspScopeResolver {
redirect_resolver: self.redirect_resolver.clone(),
pkg_json_resolver: Some(pkg_json_resolver),
graph_imports: self.graph_imports.clone(),
package_json_deps_by_resolution: self
.package_json_deps_by_resolution
.clone(),
config_data: self.config_data.clone(),
})
}
@ -407,6 +440,18 @@ impl LspResolver {
)))
}
pub fn file_url_to_package_json_dep(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<String> {
let resolver = self.get_scope_resolver(file_referrer);
resolver
.package_json_deps_by_resolution
.get(specifier)
.cloned()
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
// consider any /node_modules/ directory as being in the node_modules

View file

@ -236,7 +236,7 @@ pub struct TsServer {
performance: Arc<Performance>,
sender: mpsc::UnboundedSender<Request>,
receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>,
specifier_map: Arc<TscSpecifierMap>,
pub specifier_map: Arc<TscSpecifierMap>,
inspector_server: Mutex<Option<Arc<InspectorServer>>>,
pending_change: Mutex<Option<PendingChange>>,
}
@ -882,20 +882,22 @@ impl TsServer {
options: GetCompletionsAtPositionOptions,
format_code_settings: FormatCodeSettings,
scope: Option<ModuleSpecifier>,
) -> Option<CompletionInfo> {
) -> Result<Option<CompletionInfo>, AnyError> {
let req = TscRequest::GetCompletionsAtPosition(Box::new((
self.specifier_map.denormalize(&specifier),
position,
options,
format_code_settings,
)));
match self.request(snapshot, req, scope).await {
Ok(maybe_info) => maybe_info,
Err(err) => {
log::error!("Unable to get completion info from TypeScript: {:#}", err);
None
}
}
self
.request::<Option<CompletionInfo>>(snapshot, req, scope)
.await
.map(|mut info| {
if let Some(info) = &mut info {
info.normalize(&self.specifier_map);
}
info
})
}
pub async fn get_completion_details(
@ -3642,6 +3644,12 @@ pub struct CompletionInfo {
}
impl CompletionInfo {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
for entry in &mut self.entries {
entry.normalize(specifier_map);
}
}
pub fn as_completion_response(
&self,
line_index: Arc<LineIndex>,
@ -3703,11 +3711,17 @@ pub struct CompletionItemData {
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
struct CompletionEntryDataImport {
struct CompletionEntryDataAutoImport {
module_specifier: String,
file_name: String,
}
#[derive(Debug)]
pub struct CompletionNormalizedAutoImportData {
raw: CompletionEntryDataAutoImport,
normalized: ModuleSpecifier,
}
#[derive(Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CompletionEntry {
@ -3740,9 +3754,28 @@ pub struct CompletionEntry {
is_import_statement_completion: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
data: Option<Value>,
/// This is not from tsc, we add it for convenience during normalization.
/// Represents `self.data.file_name`, but normalized.
#[serde(skip)]
auto_import_data: Option<CompletionNormalizedAutoImportData>,
}
impl CompletionEntry {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
let Some(data) = &self.data else {
return;
};
let Ok(raw) =
serde_json::from_value::<CompletionEntryDataAutoImport>(data.clone())
else {
return;
};
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
self.auto_import_data =
Some(CompletionNormalizedAutoImportData { raw, normalized });
}
}
fn get_commit_characters(
&self,
info: &CompletionInfo,
@ -3891,25 +3924,24 @@ impl CompletionEntry {
if let Some(source) = &self.source {
let mut display_source = source.clone();
if let Some(data) = &self.data {
if let Ok(import_data) =
serde_json::from_value::<CompletionEntryDataImport>(data.clone())
if let Some(import_data) = &self.auto_import_data {
if let Some(new_module_specifier) = language_server
.get_ts_response_import_mapper(specifier)
.check_specifier(&import_data.normalized, specifier)
.or_else(|| relative_specifier(specifier, &import_data.normalized))
{
if let Ok(import_specifier) = resolve_url(&import_data.file_name) {
if let Some(new_module_specifier) = language_server
.get_ts_response_import_mapper(specifier)
.check_specifier(&import_specifier, specifier)
.or_else(|| relative_specifier(specifier, &import_specifier))
{
display_source.clone_from(&new_module_specifier);
if new_module_specifier != import_data.module_specifier {
specifier_rewrite =
Some((import_data.module_specifier, new_module_specifier));
}
} else if source.starts_with(jsr_url().as_str()) {
return None;
}
if new_module_specifier.contains("/node_modules/") {
return None;
}
display_source.clone_from(&new_module_specifier);
if new_module_specifier != import_data.raw.module_specifier {
specifier_rewrite = Some((
import_data.raw.module_specifier.clone(),
new_module_specifier,
));
}
} else if source.starts_with(jsr_url().as_str()) {
return None;
}
}
// We want relative or bare (import-mapped or otherwise) specifiers to
@ -4212,6 +4244,13 @@ impl TscSpecifierMap {
return specifier.to_string();
}
let mut specifier = original.to_string();
if specifier.contains("/node_modules/.deno/")
&& !specifier.contains("/node_modules/@types/node/")
{
// The ts server doesn't give completions from files in
// `node_modules/.deno/`. We work around it like this.
specifier = specifier.replace("/node_modules/", "/$node_modules/");
}
let media_type = MediaType::from_specifier(original);
// If the URL-inferred media type doesn't correspond to tsc's path-inferred
// media type, force it to be the same by appending an extension.
@ -4329,7 +4368,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool {
fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool {
let state = state.borrow::<State>();
let mark = state.performance.mark("tsc.op.op_is_node_file");
let r = match ModuleSpecifier::parse(&path) {
let r = match state.specifier_map.normalize(path) {
Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier),
Err(_) => false,
};
@ -4609,7 +4648,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
for doc in &docs {
let specifier = doc.specifier();
let is_open = doc.is_open();
if is_open || specifier.scheme() == "file" {
if is_open
|| (specifier.scheme() == "file"
&& !state.state_snapshot.resolver.in_node_modules(specifier))
{
let script_names = doc
.scope()
.and_then(|s| result.by_scope.get_mut(s))
@ -6035,6 +6077,7 @@ mod tests {
Some(temp_dir.url()),
)
.await
.unwrap()
.unwrap();
assert_eq!(info.entries.len(), 22);
let details = ts_server
@ -6194,6 +6237,7 @@ mod tests {
Some(temp_dir.url()),
)
.await
.unwrap()
.unwrap();
let entry = info
.entries

View file

@ -56,7 +56,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
pub fn op_pledge_test_permissions(
state: &mut OpState,
#[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> {
) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
let token = Uuid::new_v4();
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = parent_permissions.create_child_permissions(args)?;
@ -147,7 +147,7 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) {
#[op2(fast)]
#[number]
fn op_bench_now(state: &mut OpState) -> Result<u64, AnyError> {
fn op_bench_now(state: &mut OpState) -> Result<u64, std::num::TryFromIntError> {
let ns = state.borrow::<time::Instant>().elapsed().as_nanos();
let ns_u64 = u64::try_from(ns)?;
Ok(ns_u64)

View file

@ -46,7 +46,7 @@ pub fn op_jupyter_input(
state: &mut OpState,
#[string] prompt: String,
is_password: bool,
) -> Result<Option<String>, AnyError> {
) -> Option<String> {
let (last_execution_request, stdin_connection_proxy) = {
(
state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
@ -58,11 +58,11 @@ pub fn op_jupyter_input(
if let Some(last_request) = maybe_last_request {
let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content
else {
return Ok(None);
return None;
};
if !msg.allow_stdin {
return Ok(None);
return None;
}
let content = InputRequest {
@ -73,7 +73,7 @@ pub fn op_jupyter_input(
let msg = JupyterMessage::new(content, Some(&last_request));
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
return Ok(None);
return None;
};
// Need to spawn a separate thread here, because `blocking_recv()` can't
@ -82,17 +82,25 @@ pub fn op_jupyter_input(
stdin_connection_proxy.lock().rx.blocking_recv()
});
let Ok(Some(response)) = join_handle.join() else {
return Ok(None);
return None;
};
let JupyterMessageContent::InputReply(msg) = response.content else {
return Ok(None);
return None;
};
return Ok(Some(msg.value));
return Some(msg.value);
}
Ok(None)
None
}
#[derive(Debug, thiserror::Error)]
pub enum JupyterBroadcastError {
#[error(transparent)]
SerdeJson(serde_json::Error),
#[error(transparent)]
ZeroMq(AnyError),
}
#[op2(async)]
@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast(
#[serde] content: serde_json::Value,
#[serde] metadata: serde_json::Value,
#[serde] buffers: Vec<deno_core::JsBuffer>,
) -> Result<(), AnyError> {
) -> Result<(), JupyterBroadcastError> {
let (iopub_connection, last_execution_request) = {
let s = state.borrow();
@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast(
content,
err
);
err
JupyterBroadcastError::SerdeJson(err)
})?;
let jupyter_message = JupyterMessage::new(content, Some(&last_request))
.with_metadata(metadata)
.with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect());
iopub_connection.lock().send(jupyter_message).await?;
iopub_connection
.lock()
.send(jupyter_message)
.await
.map_err(JupyterBroadcastError::ZeroMq)?;
}
Ok(())
}
#[op2(fast)]
pub fn op_print(
state: &mut OpState,
#[string] msg: &str,
is_err: bool,
) -> Result<(), AnyError> {
pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) {
let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>();
if is_err {
if let Err(err) = sender.send(StreamContent::stderr(msg)) {
log::error!("Failed to send stderr message: {}", err);
}
return Ok(());
return;
}
if let Err(err) = sender.send(StreamContent::stdout(msg)) {
log::error!("Failed to send stdout message: {}", err);
}
Ok(())
}

View file

@ -51,7 +51,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
pub fn op_pledge_test_permissions(
state: &mut OpState,
#[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> {
) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
let token = Uuid::new_v4();
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = parent_permissions.create_child_permissions(args)?;
@ -150,7 +150,7 @@ fn op_register_test_step(
#[smi] parent_id: usize,
#[smi] root_id: usize,
#[string] root_name: String,
) -> Result<usize, AnyError> {
) -> usize {
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
let origin = state.borrow::<ModuleSpecifier>().to_string();
let description = TestStepDescription {
@ -169,7 +169,7 @@ fn op_register_test_step(
};
let sender = state.borrow_mut::<TestEventSender>();
sender.send(TestEvent::StepRegister(description)).ok();
Ok(id)
id
}
#[op2(fast)]

View file

@ -1032,7 +1032,7 @@ fn get_resolved_markup_fmt_config(
max_attrs_per_line: None,
prefer_attrs_single_line: false,
html_normal_self_closing: None,
html_void_self_closing: Some(true),
html_void_self_closing: None,
component_self_closing: None,
svg_self_closing: None,
mathml_self_closing: None,

View file

@ -2,7 +2,7 @@
[package]
name = "deno_broadcast_channel"
version = "0.169.0"
version = "0.170.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_cache"
version = "0.107.0"
version = "0.108.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

6
ext/cache/lib.rs vendored
View file

@ -33,7 +33,9 @@ pub enum CacheError {
}
#[derive(Clone)]
pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>);
pub struct CreateCache<C: Cache + 'static>(
pub Arc<dyn Fn() -> Result<C, CacheError>>,
);
deno_core::extension!(deno_cache,
deps = [ deno_webidl, deno_web, deno_url, deno_fetch ],
@ -231,7 +233,7 @@ where
if let Some(cache) = state.try_borrow::<CA>() {
Ok(cache.clone())
} else if let Some(create_cache) = state.try_borrow::<CreateCache<CA>>() {
let cache = create_cache.0();
let cache = create_cache.0()?;
state.put(cache);
Ok(state.borrow::<CA>().clone())
} else {

23
ext/cache/sqlite.rs vendored
View file

@ -42,7 +42,7 @@ pub struct SqliteBackedCache {
}
impl SqliteBackedCache {
pub fn new(cache_storage_dir: PathBuf) -> Self {
pub fn new(cache_storage_dir: PathBuf) -> Result<Self, CacheError> {
{
std::fs::create_dir_all(&cache_storage_dir)
.expect("failed to create cache dir");
@ -57,18 +57,14 @@ impl SqliteBackedCache {
PRAGMA synchronous=NORMAL;
PRAGMA optimize;
";
connection
.execute_batch(initial_pragmas)
.expect("failed to execute pragmas");
connection
.execute(
"CREATE TABLE IF NOT EXISTS cache_storage (
connection.execute_batch(initial_pragmas)?;
connection.execute(
"CREATE TABLE IF NOT EXISTS cache_storage (
id INTEGER PRIMARY KEY,
cache_name TEXT NOT NULL UNIQUE
)",
(),
)
.expect("failed to create cache_storage table");
(),
)?;
connection
.execute(
"CREATE TABLE IF NOT EXISTS request_response_list (
@ -86,12 +82,11 @@ impl SqliteBackedCache {
UNIQUE (cache_id, request_url)
)",
(),
)
.expect("failed to create request_response_list table");
SqliteBackedCache {
)?;
Ok(SqliteBackedCache {
connection: Arc::new(Mutex::new(connection)),
cache_storage_dir,
}
})
}
}
}

View file

@ -2,7 +2,7 @@
[package]
name = "deno_canvas"
version = "0.44.0"
version = "0.45.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_console"
version = "0.175.0"
version = "0.176.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_cron"
version = "0.55.0"
version = "0.56.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_crypto"
version = "0.189.0"
version = "0.190.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -269,12 +269,6 @@ class Request {
/** @type {AbortSignal} */
get [_signal]() {
const signal = this[_signalCache];
// This signal not been created yet, and the request is still in progress
if (signal === undefined) {
const signal = newSignal();
this[_signalCache] = signal;
return signal;
}
// This signal has not been created yet, but the request has already completed
if (signal === false) {
const signal = newSignal();
@ -282,6 +276,18 @@ class Request {
signal[signalAbort](signalAbortError);
return signal;
}
// This signal not been created yet, and the request is still in progress
if (signal === undefined) {
const signal = newSignal();
this[_signalCache] = signal;
this[_request].onCancel?.(() => {
signal[signalAbort](signalAbortError);
});
return signal;
}
return signal;
}
get [_mimeType]() {

View file

@ -2,7 +2,7 @@
[package]
name = "deno_fetch"
version = "0.199.0"
version = "0.200.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_ffi"
version = "0.162.0"
version = "0.163.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_fs"
version = "0.85.0"
version = "0.86.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -14,6 +14,7 @@ import {
op_http_get_request_headers,
op_http_get_request_method_and_url,
op_http_read_request_body,
op_http_request_on_cancel,
op_http_serve,
op_http_serve_on,
op_http_set_promise_complete,
@ -373,6 +374,18 @@ class InnerRequest {
get external() {
return this.#external;
}
onCancel(callback) {
if (this.#external === null) {
callback();
return;
}
PromisePrototypeThen(
op_http_request_on_cancel(this.#external),
callback,
);
}
}
class CallbackContext {

View file

@ -2,7 +2,7 @@
[package]
name = "deno_http"
version = "0.173.0"
version = "0.174.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -700,6 +700,27 @@ fn set_response(
http.complete();
}
#[op2(fast)]
pub fn op_http_get_request_cancelled(external: *const c_void) -> bool {
let http =
// SAFETY: op is called with external.
unsafe { clone_external!(external, "op_http_get_request_cancelled") };
http.cancelled()
}
#[op2(async)]
pub async fn op_http_request_on_cancel(external: *const c_void) {
let http =
// SAFETY: op is called with external.
unsafe { clone_external!(external, "op_http_request_on_cancel") };
let (tx, rx) = tokio::sync::oneshot::channel();
http.on_cancel(tx);
drop(http);
rx.await.ok();
}
/// Returned promise resolves when body streaming finishes.
/// Call [`op_http_close_after_finish`] when done with the external.
#[op2(async)]

View file

@ -112,7 +112,9 @@ deno_core::extension!(
http_next::op_http_close_after_finish,
http_next::op_http_get_request_header,
http_next::op_http_get_request_headers,
http_next::op_http_request_on_cancel,
http_next::op_http_get_request_method_and_url<HTTP>,
http_next::op_http_get_request_cancelled,
http_next::op_http_read_request_body,
http_next::op_http_serve_on<HTTP>,
http_next::op_http_serve<HTTP>,

View file

@ -27,6 +27,7 @@ use std::rc::Rc;
use std::task::Context;
use std::task::Poll;
use std::task::Waker;
use tokio::sync::oneshot;
pub type Request = hyper::Request<Incoming>;
pub type Response = hyper::Response<HttpRecordResponse>;
@ -211,6 +212,7 @@ pub struct UpgradeUnavailableError;
struct HttpRecordInner {
server_state: SignallingRc<HttpServerState>,
closed_channel: Option<oneshot::Sender<()>>,
request_info: HttpConnectionProperties,
request_parts: http::request::Parts,
request_body: Option<RequestBodyState>,
@ -276,6 +278,7 @@ impl HttpRecord {
response_body_finished: false,
response_body_waker: None,
trailers: None,
closed_channel: None,
been_dropped: false,
finished: false,
needs_close_after_finish: false,
@ -312,6 +315,10 @@ impl HttpRecord {
RefMut::map(self.self_mut(), |inner| &mut inner.needs_close_after_finish)
}
pub fn on_cancel(&self, sender: oneshot::Sender<()>) {
self.self_mut().closed_channel = Some(sender);
}
fn recycle(self: Rc<Self>) {
assert!(
Rc::strong_count(&self) == 1,
@ -390,6 +397,9 @@ impl HttpRecord {
inner.been_dropped = true;
// The request body might include actual resources.
inner.request_body.take();
if let Some(closed_channel) = inner.closed_channel.take() {
let _ = closed_channel.send(());
}
}
/// Complete this record, potentially expunging it if it is fully complete (ie: cancelled as well).

View file

@ -2,7 +2,7 @@
[package]
name = "deno_io"
version = "0.85.0"
version = "0.86.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_kv"
version = "0.83.0"
version = "0.84.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_napi"
version = "0.106.0"
version = "0.107.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "napi_sym"
version = "0.105.0"
version = "0.106.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_net"
version = "0.167.0"
version = "0.168.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
@ -17,11 +17,11 @@ path = "lib.rs"
deno_core.workspace = true
deno_permissions.workspace = true
deno_tls.workspace = true
hickory-proto = "0.24"
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
pin-project.workspace = true
rustls-tokio-stream.workspace = true
serde.workspace = true
socket2.workspace = true
thiserror.workspace = true
tokio.workspace = true
trust-dns-proto = "0.23"
trust-dns-resolver = { version = "0.23", features = ["tokio-runtime", "serde-config"] }

View file

@ -18,6 +18,16 @@ use deno_core::OpState;
use deno_core::RcRef;
use deno_core::Resource;
use deno_core::ResourceId;
use hickory_proto::rr::rdata::caa::Value;
use hickory_proto::rr::record_data::RData;
use hickory_proto::rr::record_type::RecordType;
use hickory_resolver::config::NameServerConfigGroup;
use hickory_resolver::config::ResolverConfig;
use hickory_resolver::config::ResolverOpts;
use hickory_resolver::error::ResolveError;
use hickory_resolver::error::ResolveErrorKind;
use hickory_resolver::system_conf;
use hickory_resolver::AsyncResolver;
use serde::Deserialize;
use serde::Serialize;
use socket2::Domain;
@ -33,16 +43,6 @@ use std::rc::Rc;
use std::str::FromStr;
use tokio::net::TcpStream;
use tokio::net::UdpSocket;
use trust_dns_proto::rr::rdata::caa::Value;
use trust_dns_proto::rr::record_data::RData;
use trust_dns_proto::rr::record_type::RecordType;
use trust_dns_resolver::config::NameServerConfigGroup;
use trust_dns_resolver::config::ResolverConfig;
use trust_dns_resolver::config::ResolverOpts;
use trust_dns_resolver::error::ResolveError;
use trust_dns_resolver::error::ResolveErrorKind;
use trust_dns_resolver::system_conf;
use trust_dns_resolver::AsyncResolver;
#[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "camelCase")]
@ -828,6 +828,21 @@ mod tests {
use deno_core::JsRuntime;
use deno_core::RuntimeOptions;
use deno_permissions::PermissionCheckError;
use hickory_proto::rr::rdata::a::A;
use hickory_proto::rr::rdata::aaaa::AAAA;
use hickory_proto::rr::rdata::caa::KeyValue;
use hickory_proto::rr::rdata::caa::CAA;
use hickory_proto::rr::rdata::mx::MX;
use hickory_proto::rr::rdata::name::ANAME;
use hickory_proto::rr::rdata::name::CNAME;
use hickory_proto::rr::rdata::name::NS;
use hickory_proto::rr::rdata::name::PTR;
use hickory_proto::rr::rdata::naptr::NAPTR;
use hickory_proto::rr::rdata::srv::SRV;
use hickory_proto::rr::rdata::txt::TXT;
use hickory_proto::rr::rdata::SOA;
use hickory_proto::rr::record_data::RData;
use hickory_proto::rr::Name;
use socket2::SockRef;
use std::net::Ipv4Addr;
use std::net::Ipv6Addr;
@ -836,21 +851,6 @@ mod tests {
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use trust_dns_proto::rr::rdata::a::A;
use trust_dns_proto::rr::rdata::aaaa::AAAA;
use trust_dns_proto::rr::rdata::caa::KeyValue;
use trust_dns_proto::rr::rdata::caa::CAA;
use trust_dns_proto::rr::rdata::mx::MX;
use trust_dns_proto::rr::rdata::name::ANAME;
use trust_dns_proto::rr::rdata::name::CNAME;
use trust_dns_proto::rr::rdata::name::NS;
use trust_dns_proto::rr::rdata::name::PTR;
use trust_dns_proto::rr::rdata::naptr::NAPTR;
use trust_dns_proto::rr::rdata::srv::SRV;
use trust_dns_proto::rr::rdata::txt::TXT;
use trust_dns_proto::rr::rdata::SOA;
use trust_dns_proto::rr::record_data::RData;
use trust_dns_proto::rr::Name;
#[test]
fn rdata_to_return_record_a() {

View file

@ -2,7 +2,7 @@
[package]
name = "deno_node"
version = "0.112.0"
version = "0.113.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -47,6 +47,11 @@ pub trait NodePermissions {
url: &Url,
api_name: &str,
) -> Result<(), PermissionCheckError>;
fn check_net(
&mut self,
host: (&str, Option<u16>),
api_name: &str,
) -> Result<(), PermissionCheckError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
#[inline(always)]
fn check_read(
@ -90,6 +95,14 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
deno_permissions::PermissionsContainer::check_net_url(self, url, api_name)
}
fn check_net(
&mut self,
host: (&str, Option<u16>),
api_name: &str,
) -> Result<(), PermissionCheckError> {
deno_permissions::PermissionsContainer::check_net(self, &host, api_name)
}
#[inline(always)]
fn check_read_with_api_name(
&mut self,
@ -398,6 +411,15 @@ deno_core::extension!(deno_node,
ops::process::op_node_process_kill,
ops::process::op_process_abort,
ops::tls::op_get_root_certificates,
ops::inspector::op_inspector_open<P>,
ops::inspector::op_inspector_close,
ops::inspector::op_inspector_url,
ops::inspector::op_inspector_wait,
ops::inspector::op_inspector_connect<P>,
ops::inspector::op_inspector_dispatch,
ops::inspector::op_inspector_disconnect,
ops::inspector::op_inspector_emit_protocol_event,
ops::inspector::op_inspector_enabled,
],
esm_entry_point = "ext:deno_node/02_init.js",
esm = [
@ -606,8 +628,8 @@ deno_core::extension!(deno_node,
"node:http" = "http.ts",
"node:http2" = "http2.ts",
"node:https" = "https.ts",
"node:inspector" = "inspector.ts",
"node:inspector/promises" = "inspector.ts",
"node:inspector" = "inspector.js",
"node:inspector/promises" = "inspector/promises.js",
"node:module" = "01_require.js",
"node:net" = "net.ts",
"node:os" = "os.ts",

View file

@ -4,9 +4,6 @@ use aes::cipher::block_padding::Pkcs7;
use aes::cipher::BlockDecryptMut;
use aes::cipher::BlockEncryptMut;
use aes::cipher::KeyIvInit;
use deno_core::error::range_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::Resource;
use digest::generic_array::GenericArray;
use digest::KeyInit;
@ -50,8 +47,22 @@ pub struct DecipherContext {
decipher: Rc<RefCell<Decipher>>,
}
#[derive(Debug, thiserror::Error)]
pub enum CipherContextError {
#[error("Cipher context is already in use")]
ContextInUse,
#[error("{0}")]
Resource(deno_core::error::AnyError),
#[error(transparent)]
Cipher(#[from] CipherError),
}
impl CipherContext {
pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
pub fn new(
algorithm: &str,
key: &[u8],
iv: &[u8],
) -> Result<Self, CipherContextError> {
Ok(Self {
cipher: Rc::new(RefCell::new(Cipher::new(algorithm, key, iv)?)),
})
@ -74,16 +85,31 @@ impl CipherContext {
auto_pad: bool,
input: &[u8],
output: &mut [u8],
) -> Result<Tag, AnyError> {
) -> Result<Tag, CipherContextError> {
Rc::try_unwrap(self.cipher)
.map_err(|_| type_error("Cipher context is already in use"))?
.map_err(|_| CipherContextError::ContextInUse)?
.into_inner()
.r#final(auto_pad, input, output)
.map_err(Into::into)
}
}
#[derive(Debug, thiserror::Error)]
pub enum DecipherContextError {
#[error("Decipher context is already in use")]
ContextInUse,
#[error("{0}")]
Resource(deno_core::error::AnyError),
#[error(transparent)]
Decipher(#[from] DecipherError),
}
impl DecipherContext {
pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
pub fn new(
algorithm: &str,
key: &[u8],
iv: &[u8],
) -> Result<Self, DecipherContextError> {
Ok(Self {
decipher: Rc::new(RefCell::new(Decipher::new(algorithm, key, iv)?)),
})
@ -103,11 +129,12 @@ impl DecipherContext {
input: &[u8],
output: &mut [u8],
auth_tag: &[u8],
) -> Result<(), AnyError> {
) -> Result<(), DecipherContextError> {
Rc::try_unwrap(self.decipher)
.map_err(|_| type_error("Decipher context is already in use"))?
.map_err(|_| DecipherContextError::ContextInUse)?
.into_inner()
.r#final(auto_pad, input, output, auth_tag)
.map_err(Into::into)
}
}
@ -123,12 +150,26 @@ impl Resource for DecipherContext {
}
}
#[derive(Debug, thiserror::Error)]
pub enum CipherError {
#[error("IV length must be 12 bytes")]
InvalidIvLength,
#[error("Invalid key length")]
InvalidKeyLength,
#[error("Invalid initialization vector")]
InvalidInitializationVector,
#[error("Cannot pad the input data")]
CannotPadInputData,
#[error("Unknown cipher {0}")]
UnknownCipher(String),
}
impl Cipher {
fn new(
algorithm_name: &str,
key: &[u8],
iv: &[u8],
) -> Result<Self, AnyError> {
) -> Result<Self, CipherError> {
use Cipher::*;
Ok(match algorithm_name {
"aes-128-cbc" => {
@ -139,7 +180,7 @@ impl Cipher {
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))),
"aes-128-gcm" => {
if iv.len() != 12 {
return Err(type_error("IV length must be 12 bytes"));
return Err(CipherError::InvalidIvLength);
}
let cipher =
@ -149,7 +190,7 @@ impl Cipher {
}
"aes-256-gcm" => {
if iv.len() != 12 {
return Err(type_error("IV length must be 12 bytes"));
return Err(CipherError::InvalidIvLength);
}
let cipher =
@ -159,15 +200,15 @@ impl Cipher {
}
"aes256" | "aes-256-cbc" => {
if key.len() != 32 {
return Err(range_error("Invalid key length"));
return Err(CipherError::InvalidKeyLength);
}
if iv.len() != 16 {
return Err(type_error("Invalid initialization vector"));
return Err(CipherError::InvalidInitializationVector);
}
Aes256Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into())))
}
_ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
_ => return Err(CipherError::UnknownCipher(algorithm_name.to_string())),
})
}
@ -235,14 +276,14 @@ impl Cipher {
auto_pad: bool,
input: &[u8],
output: &mut [u8],
) -> Result<Tag, AnyError> {
) -> Result<Tag, CipherError> {
assert!(input.len() < 16);
use Cipher::*;
match (self, auto_pad) {
(Aes128Cbc(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot pad the input data"))?;
.map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes128Cbc(mut encryptor), false) => {
@ -255,7 +296,7 @@ impl Cipher {
(Aes128Ecb(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot pad the input data"))?;
.map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes128Ecb(mut encryptor), false) => {
@ -268,7 +309,7 @@ impl Cipher {
(Aes192Ecb(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot pad the input data"))?;
.map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes192Ecb(mut encryptor), false) => {
@ -281,7 +322,7 @@ impl Cipher {
(Aes256Ecb(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot pad the input data"))?;
.map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes256Ecb(mut encryptor), false) => {
@ -296,7 +337,7 @@ impl Cipher {
(Aes256Cbc(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot pad the input data"))?;
.map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes256Cbc(mut encryptor), false) => {
@ -319,12 +360,32 @@ impl Cipher {
}
}
#[derive(Debug, thiserror::Error)]
pub enum DecipherError {
#[error("IV length must be 12 bytes")]
InvalidIvLength,
#[error("Invalid key length")]
InvalidKeyLength,
#[error("Invalid initialization vector")]
InvalidInitializationVector,
#[error("Cannot unpad the input data")]
CannotUnpadInputData,
#[error("Failed to authenticate data")]
DataAuthenticationFailed,
#[error("setAutoPadding(false) not supported for Aes128Gcm yet")]
SetAutoPaddingFalseAes128GcmUnsupported,
#[error("setAutoPadding(false) not supported for Aes256Gcm yet")]
SetAutoPaddingFalseAes256GcmUnsupported,
#[error("Unknown cipher {0}")]
UnknownCipher(String),
}
impl Decipher {
fn new(
algorithm_name: &str,
key: &[u8],
iv: &[u8],
) -> Result<Self, AnyError> {
) -> Result<Self, DecipherError> {
use Decipher::*;
Ok(match algorithm_name {
"aes-128-cbc" => {
@ -335,7 +396,7 @@ impl Decipher {
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))),
"aes-128-gcm" => {
if iv.len() != 12 {
return Err(type_error("IV length must be 12 bytes"));
return Err(DecipherError::InvalidIvLength);
}
let decipher =
@ -345,7 +406,7 @@ impl Decipher {
}
"aes-256-gcm" => {
if iv.len() != 12 {
return Err(type_error("IV length must be 12 bytes"));
return Err(DecipherError::InvalidIvLength);
}
let decipher =
@ -355,15 +416,17 @@ impl Decipher {
}
"aes256" | "aes-256-cbc" => {
if key.len() != 32 {
return Err(range_error("Invalid key length"));
return Err(DecipherError::InvalidKeyLength);
}
if iv.len() != 16 {
return Err(type_error("Invalid initialization vector"));
return Err(DecipherError::InvalidInitializationVector);
}
Aes256Cbc(Box::new(cbc::Decryptor::new(key.into(), iv.into())))
}
_ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
_ => {
return Err(DecipherError::UnknownCipher(algorithm_name.to_string()))
}
})
}
@ -432,14 +495,14 @@ impl Decipher {
input: &[u8],
output: &mut [u8],
auth_tag: &[u8],
) -> Result<(), AnyError> {
) -> Result<(), DecipherError> {
use Decipher::*;
match (self, auto_pad) {
(Aes128Cbc(decryptor), true) => {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot unpad the input data"))?;
.map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes128Cbc(mut decryptor), false) => {
@ -453,7 +516,7 @@ impl Decipher {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot unpad the input data"))?;
.map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes128Ecb(mut decryptor), false) => {
@ -467,7 +530,7 @@ impl Decipher {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot unpad the input data"))?;
.map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes192Ecb(mut decryptor), false) => {
@ -481,7 +544,7 @@ impl Decipher {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot unpad the input data"))?;
.map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes256Ecb(mut decryptor), false) => {
@ -496,28 +559,28 @@ impl Decipher {
if tag.as_slice() == auth_tag {
Ok(())
} else {
Err(type_error("Failed to authenticate data"))
Err(DecipherError::DataAuthenticationFailed)
}
}
(Aes128Gcm(_), false) => Err(type_error(
"setAutoPadding(false) not supported for Aes256Gcm yet",
)),
(Aes128Gcm(_), false) => {
Err(DecipherError::SetAutoPaddingFalseAes128GcmUnsupported)
}
(Aes256Gcm(decipher), true) => {
let tag = decipher.finish();
if tag.as_slice() == auth_tag {
Ok(())
} else {
Err(type_error("Failed to authenticate data"))
Err(DecipherError::DataAuthenticationFailed)
}
}
(Aes256Gcm(_), false) => Err(type_error(
"setAutoPadding(false) not supported for Aes256Gcm yet",
)),
(Aes256Gcm(_), false) => {
Err(DecipherError::SetAutoPaddingFalseAes256GcmUnsupported)
}
(Aes256Cbc(decryptor), true) => {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
.map_err(|_| type_error("Cannot unpad the input data"))?;
.map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes256Cbc(mut decryptor), false) => {

View file

@ -1,6 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::GarbageCollected;
use digest::Digest;
use digest::DynDigest;
@ -19,7 +17,7 @@ impl Hasher {
pub fn new(
algorithm: &str,
output_length: Option<usize>,
) -> Result<Self, AnyError> {
) -> Result<Self, HashError> {
let hash = Hash::new(algorithm, output_length)?;
Ok(Self {
@ -44,7 +42,7 @@ impl Hasher {
pub fn clone_inner(
&self,
output_length: Option<usize>,
) -> Result<Option<Self>, AnyError> {
) -> Result<Option<Self>, HashError> {
let hash = self.hash.borrow();
let Some(hash) = hash.as_ref() else {
return Ok(None);
@ -184,11 +182,19 @@ pub enum Hash {
use Hash::*;
#[derive(Debug, thiserror::Error)]
pub enum HashError {
#[error("Output length mismatch for non-extendable algorithm")]
OutputLengthMismatch,
#[error("Digest method not supported: {0}")]
DigestMethodUnsupported(String),
}
impl Hash {
pub fn new(
algorithm_name: &str,
output_length: Option<usize>,
) -> Result<Self, AnyError> {
) -> Result<Self, HashError> {
match algorithm_name {
"shake128" => return Ok(Shake128(Default::default(), output_length)),
"shake256" => return Ok(Shake256(Default::default(), output_length)),
@ -201,17 +207,13 @@ impl Hash {
let digest: D = Digest::new();
if let Some(length) = output_length {
if length != digest.output_size() {
return Err(generic_error(
"Output length mismatch for non-extendable algorithm",
));
return Err(HashError::OutputLengthMismatch);
}
}
FixedSize(Box::new(digest))
},
_ => {
return Err(generic_error(format!(
"Digest method not supported: {algorithm_name}"
)))
return Err(HashError::DigestMethodUnsupported(algorithm_name.to_string()))
}
);
@ -243,14 +245,12 @@ impl Hash {
pub fn clone_hash(
&self,
output_length: Option<usize>,
) -> Result<Self, AnyError> {
) -> Result<Self, HashError> {
let hash = match self {
FixedSize(context) => {
if let Some(length) = output_length {
if length != context.output_size() {
return Err(generic_error(
"Output length mismatch for non-extendable algorithm",
));
return Err(HashError::OutputLengthMismatch);
}
}
FixedSize(context.box_clone())

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::unsync::spawn_blocking;
use deno_core::JsBuffer;
@ -34,14 +33,14 @@ use rsa::Pkcs1v15Encrypt;
use rsa::RsaPrivateKey;
use rsa::RsaPublicKey;
mod cipher;
pub mod cipher;
mod dh;
mod digest;
pub mod digest;
pub mod keys;
mod md5_sha1;
mod pkcs3;
mod primes;
mod sign;
pub mod sign;
pub mod x509;
use self::digest::match_fixed_digest_with_eager_block_buffer;
@ -58,38 +57,31 @@ pub fn op_node_check_prime(
pub fn op_node_check_prime_bytes(
#[anybuffer] bytes: &[u8],
#[number] checks: usize,
) -> Result<bool, AnyError> {
) -> bool {
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
Ok(primes::is_probably_prime(&candidate, checks))
primes::is_probably_prime(&candidate, checks)
}
#[op2(async)]
pub async fn op_node_check_prime_async(
#[bigint] num: i64,
#[number] checks: usize,
) -> Result<bool, AnyError> {
) -> Result<bool, tokio::task::JoinError> {
// TODO(@littledivy): use rayon for CPU-bound tasks
Ok(
spawn_blocking(move || {
primes::is_probably_prime(&BigInt::from(num), checks)
})
.await?,
)
spawn_blocking(move || primes::is_probably_prime(&BigInt::from(num), checks))
.await
}
#[op2(async)]
pub fn op_node_check_prime_bytes_async(
#[anybuffer] bytes: &[u8],
#[number] checks: usize,
) -> Result<impl Future<Output = Result<bool, AnyError>>, AnyError> {
) -> impl Future<Output = Result<bool, tokio::task::JoinError>> {
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
// TODO(@littledivy): use rayon for CPU-bound tasks
Ok(async move {
Ok(
spawn_blocking(move || primes::is_probably_prime(&candidate, checks))
.await?,
)
})
async move {
spawn_blocking(move || primes::is_probably_prime(&candidate, checks)).await
}
}
#[op2]
@ -97,7 +89,7 @@ pub fn op_node_check_prime_bytes_async(
pub fn op_node_create_hash(
#[string] algorithm: &str,
output_length: Option<u32>,
) -> Result<digest::Hasher, AnyError> {
) -> Result<digest::Hasher, digest::HashError> {
digest::Hasher::new(algorithm, output_length.map(|l| l as usize))
}
@ -145,17 +137,31 @@ pub fn op_node_hash_digest_hex(
pub fn op_node_hash_clone(
#[cppgc] hasher: &digest::Hasher,
output_length: Option<u32>,
) -> Result<Option<digest::Hasher>, AnyError> {
) -> Result<Option<digest::Hasher>, digest::HashError> {
hasher.clone_inner(output_length.map(|l| l as usize))
}
#[derive(Debug, thiserror::Error)]
pub enum PrivateEncryptDecryptError {
#[error(transparent)]
Pkcs8(#[from] pkcs8::Error),
#[error(transparent)]
Spki(#[from] spki::Error),
#[error(transparent)]
Utf8(#[from] std::str::Utf8Error),
#[error(transparent)]
Rsa(#[from] rsa::Error),
#[error("Unknown padding")]
UnknownPadding,
}
#[op2]
#[serde]
pub fn op_node_private_encrypt(
#[serde] key: StringOrBuffer,
#[serde] msg: StringOrBuffer,
#[smi] padding: u32,
) -> Result<ToJsBuffer, AnyError> {
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
let mut rng = rand::thread_rng();
@ -172,7 +178,7 @@ pub fn op_node_private_encrypt(
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
.into(),
),
_ => Err(type_error("Unknown padding")),
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
}
}
@ -182,13 +188,13 @@ pub fn op_node_private_decrypt(
#[serde] key: StringOrBuffer,
#[serde] msg: StringOrBuffer,
#[smi] padding: u32,
) -> Result<ToJsBuffer, AnyError> {
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
match padding {
1 => Ok(key.decrypt(Pkcs1v15Encrypt, &msg)?.into()),
4 => Ok(key.decrypt(Oaep::new::<sha1::Sha1>(), &msg)?.into()),
_ => Err(type_error("Unknown padding")),
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
}
}
@ -198,7 +204,7 @@ pub fn op_node_public_encrypt(
#[serde] key: StringOrBuffer,
#[serde] msg: StringOrBuffer,
#[smi] padding: u32,
) -> Result<ToJsBuffer, AnyError> {
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
let key = RsaPublicKey::from_public_key_pem((&key).try_into()?)?;
let mut rng = rand::thread_rng();
@ -209,7 +215,7 @@ pub fn op_node_public_encrypt(
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
.into(),
),
_ => Err(type_error("Unknown padding")),
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
}
}
@ -220,7 +226,7 @@ pub fn op_node_create_cipheriv(
#[string] algorithm: &str,
#[buffer] key: &[u8],
#[buffer] iv: &[u8],
) -> Result<u32, AnyError> {
) -> Result<u32, cipher::CipherContextError> {
let context = cipher::CipherContext::new(algorithm, key, iv)?;
Ok(state.resource_table.add(context))
}
@ -262,11 +268,14 @@ pub fn op_node_cipheriv_final(
auto_pad: bool,
#[buffer] input: &[u8],
#[anybuffer] output: &mut [u8],
) -> Result<Option<Vec<u8>>, AnyError> {
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
let context = state
.resource_table
.take::<cipher::CipherContext>(rid)
.map_err(cipher::CipherContextError::Resource)?;
let context = Rc::try_unwrap(context)
.map_err(|_| type_error("Cipher context is already in use"))?;
context.r#final(auto_pad, input, output)
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
context.r#final(auto_pad, input, output).map_err(Into::into)
}
#[op2]
@ -274,10 +283,13 @@ pub fn op_node_cipheriv_final(
pub fn op_node_cipheriv_take(
state: &mut OpState,
#[smi] rid: u32,
) -> Result<Option<Vec<u8>>, AnyError> {
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
let context = state
.resource_table
.take::<cipher::CipherContext>(rid)
.map_err(cipher::CipherContextError::Resource)?;
let context = Rc::try_unwrap(context)
.map_err(|_| type_error("Cipher context is already in use"))?;
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
Ok(context.take_tag())
}
@ -288,7 +300,7 @@ pub fn op_node_create_decipheriv(
#[string] algorithm: &str,
#[buffer] key: &[u8],
#[buffer] iv: &[u8],
) -> Result<u32, AnyError> {
) -> Result<u32, cipher::DecipherContextError> {
let context = cipher::DecipherContext::new(algorithm, key, iv)?;
Ok(state.resource_table.add(context))
}
@ -326,10 +338,13 @@ pub fn op_node_decipheriv_decrypt(
pub fn op_node_decipheriv_take(
state: &mut OpState,
#[smi] rid: u32,
) -> Result<(), AnyError> {
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
) -> Result<(), cipher::DecipherContextError> {
let context = state
.resource_table
.take::<cipher::DecipherContext>(rid)
.map_err(cipher::DecipherContextError::Resource)?;
Rc::try_unwrap(context)
.map_err(|_| type_error("Cipher context is already in use"))?;
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
Ok(())
}
@ -341,11 +356,16 @@ pub fn op_node_decipheriv_final(
#[buffer] input: &[u8],
#[anybuffer] output: &mut [u8],
#[buffer] auth_tag: &[u8],
) -> Result<(), AnyError> {
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
) -> Result<(), cipher::DecipherContextError> {
let context = state
.resource_table
.take::<cipher::DecipherContext>(rid)
.map_err(cipher::DecipherContextError::Resource)?;
let context = Rc::try_unwrap(context)
.map_err(|_| type_error("Cipher context is already in use"))?;
context.r#final(auto_pad, input, output, auth_tag)
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
context
.r#final(auto_pad, input, output, auth_tag)
.map_err(Into::into)
}
#[op2]
@ -356,7 +376,7 @@ pub fn op_node_sign(
#[string] digest_type: &str,
#[smi] pss_salt_length: Option<u32>,
#[smi] dsa_signature_encoding: u32,
) -> Result<Box<[u8]>, AnyError> {
) -> Result<Box<[u8]>, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
handle.sign_prehashed(
digest_type,
digest,
@ -373,7 +393,7 @@ pub fn op_node_verify(
#[buffer] signature: &[u8],
#[smi] pss_salt_length: Option<u32>,
#[smi] dsa_signature_encoding: u32,
) -> Result<bool, AnyError> {
) -> Result<bool, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
handle.verify_prehashed(
digest_type,
digest,
@ -383,13 +403,21 @@ pub fn op_node_verify(
)
}
#[derive(Debug, thiserror::Error)]
pub enum Pbkdf2Error {
#[error("unsupported digest: {0}")]
UnsupportedDigest(String),
#[error(transparent)]
Join(#[from] tokio::task::JoinError),
}
fn pbkdf2_sync(
password: &[u8],
salt: &[u8],
iterations: u32,
algorithm_name: &str,
derived_key: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), Pbkdf2Error> {
match_fixed_digest_with_eager_block_buffer!(
algorithm_name,
fn <D>() {
@ -397,10 +425,7 @@ fn pbkdf2_sync(
Ok(())
},
_ => {
Err(type_error(format!(
"unsupported digest: {}",
algorithm_name
)))
Err(Pbkdf2Error::UnsupportedDigest(algorithm_name.to_string()))
}
)
}
@ -424,7 +449,7 @@ pub async fn op_node_pbkdf2_async(
#[smi] iterations: u32,
#[string] digest: String,
#[number] keylen: usize,
) -> Result<ToJsBuffer, AnyError> {
) -> Result<ToJsBuffer, Pbkdf2Error> {
spawn_blocking(move || {
let mut derived_key = vec![0; keylen];
pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key)
@ -450,15 +475,27 @@ pub async fn op_node_fill_random_async(#[smi] len: i32) -> ToJsBuffer {
.unwrap()
}
#[derive(Debug, thiserror::Error)]
pub enum HkdfError {
#[error("expected secret key")]
ExpectedSecretKey,
#[error("HKDF-Expand failed")]
HkdfExpandFailed,
#[error("Unsupported digest: {0}")]
UnsupportedDigest(String),
#[error(transparent)]
Join(#[from] tokio::task::JoinError),
}
fn hkdf_sync(
digest_algorithm: &str,
handle: &KeyObjectHandle,
salt: &[u8],
info: &[u8],
okm: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), HkdfError> {
let Some(ikm) = handle.as_secret_key() else {
return Err(type_error("expected secret key"));
return Err(HkdfError::ExpectedSecretKey);
};
match_fixed_digest_with_eager_block_buffer!(
@ -466,10 +503,10 @@ fn hkdf_sync(
fn <D>() {
let hk = Hkdf::<D>::new(Some(salt), ikm);
hk.expand(info, okm)
.map_err(|_| type_error("HKDF-Expand failed"))
.map_err(|_| HkdfError::HkdfExpandFailed)
},
_ => {
Err(type_error(format!("Unsupported digest: {}", digest_algorithm)))
Err(HkdfError::UnsupportedDigest(digest_algorithm.to_string()))
}
)
}
@ -481,7 +518,7 @@ pub fn op_node_hkdf(
#[buffer] salt: &[u8],
#[buffer] info: &[u8],
#[buffer] okm: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), HkdfError> {
hkdf_sync(digest_algorithm, handle, salt, info, okm)
}
@ -493,7 +530,7 @@ pub async fn op_node_hkdf_async(
#[buffer] salt: JsBuffer,
#[buffer] info: JsBuffer,
#[number] okm_len: usize,
) -> Result<ToJsBuffer, AnyError> {
) -> Result<ToJsBuffer, HkdfError> {
let handle = handle.clone();
spawn_blocking(move || {
let mut okm = vec![0u8; okm_len];
@ -509,27 +546,24 @@ pub fn op_node_dh_compute_secret(
#[buffer] prime: JsBuffer,
#[buffer] private_key: JsBuffer,
#[buffer] their_public_key: JsBuffer,
) -> Result<ToJsBuffer, AnyError> {
) -> ToJsBuffer {
let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref());
let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref());
let primei: BigUint = BigUint::from_bytes_be(prime.as_ref());
let shared_secret: BigUint = pubkey.modpow(&privkey, &primei);
Ok(shared_secret.to_bytes_be().into())
shared_secret.to_bytes_be().into()
}
#[op2(fast)]
#[number]
pub fn op_node_random_int(
#[number] min: i64,
#[number] max: i64,
) -> Result<i64, AnyError> {
pub fn op_node_random_int(#[number] min: i64, #[number] max: i64) -> i64 {
let mut rng = rand::thread_rng();
// Uniform distribution is required to avoid Modulo Bias
// https://en.wikipedia.org/wiki/FisherYates_shuffle#Modulo_bias
let dist = Uniform::from(min..max);
Ok(dist.sample(&mut rng))
dist.sample(&mut rng)
}
#[allow(clippy::too_many_arguments)]
@ -542,7 +576,7 @@ fn scrypt(
parallelization: u32,
_maxmem: u32,
output_buffer: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), deno_core::error::AnyError> {
// Construct Params
let params = scrypt::Params::new(
cost as u8,
@ -573,7 +607,7 @@ pub fn op_node_scrypt_sync(
#[smi] parallelization: u32,
#[smi] maxmem: u32,
#[anybuffer] output_buffer: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), deno_core::error::AnyError> {
scrypt(
password,
salt,
@ -586,6 +620,14 @@ pub fn op_node_scrypt_sync(
)
}
#[derive(Debug, thiserror::Error)]
pub enum ScryptAsyncError {
#[error(transparent)]
Join(#[from] tokio::task::JoinError),
#[error(transparent)]
Other(deno_core::error::AnyError),
}
#[op2(async)]
#[serde]
pub async fn op_node_scrypt_async(
@ -596,10 +638,11 @@ pub async fn op_node_scrypt_async(
#[smi] block_size: u32,
#[smi] parallelization: u32,
#[smi] maxmem: u32,
) -> Result<ToJsBuffer, AnyError> {
) -> Result<ToJsBuffer, ScryptAsyncError> {
spawn_blocking(move || {
let mut output_buffer = vec![0u8; keylen as usize];
let res = scrypt(
scrypt(
password,
salt,
keylen,
@ -608,25 +651,30 @@ pub async fn op_node_scrypt_async(
parallelization,
maxmem,
&mut output_buffer,
);
if res.is_ok() {
Ok(output_buffer.into())
} else {
// TODO(lev): rethrow the error?
Err(generic_error("scrypt failure"))
}
)
.map(|_| output_buffer.into())
.map_err(ScryptAsyncError::Other)
})
.await?
}
#[derive(Debug, thiserror::Error)]
pub enum EcdhEncodePubKey {
#[error("Invalid public key")]
InvalidPublicKey,
#[error("Unsupported curve")]
UnsupportedCurve,
#[error(transparent)]
Sec1(#[from] sec1::Error),
}
#[op2]
#[buffer]
pub fn op_node_ecdh_encode_pubkey(
#[string] curve: &str,
#[buffer] pubkey: &[u8],
compress: bool,
) -> Result<Vec<u8>, AnyError> {
) -> Result<Vec<u8>, EcdhEncodePubKey> {
use elliptic_curve::sec1::FromEncodedPoint;
match curve {
@ -639,7 +687,7 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
return Err(type_error("Invalid public key"));
return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
@ -652,7 +700,7 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
return Err(type_error("Invalid public key"));
return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
@ -665,7 +713,7 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
return Err(type_error("Invalid public key"));
return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
@ -678,14 +726,14 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
return Err(type_error("Invalid public key"));
return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
Ok(pubkey.to_encoded_point(compress).as_ref().to_vec())
}
&_ => Err(type_error("Unsupported curve")),
&_ => Err(EcdhEncodePubKey::UnsupportedCurve),
}
}
@ -695,7 +743,7 @@ pub fn op_node_ecdh_generate_keys(
#[buffer] pubbuf: &mut [u8],
#[buffer] privbuf: &mut [u8],
#[string] format: &str,
) -> Result<(), AnyError> {
) -> Result<(), deno_core::error::AnyError> {
let mut rng = rand::thread_rng();
let compress = format == "compressed";
match curve {
@ -742,7 +790,7 @@ pub fn op_node_ecdh_compute_secret(
#[buffer] this_priv: Option<JsBuffer>,
#[buffer] their_pub: &mut [u8],
#[buffer] secret: &mut [u8],
) -> Result<(), AnyError> {
) {
match curve {
"secp256k1" => {
let their_public_key =
@ -760,8 +808,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
Ok(())
}
"prime256v1" | "secp256r1" => {
let their_public_key =
@ -776,8 +822,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
Ok(())
}
"secp384r1" => {
let their_public_key =
@ -792,8 +836,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
Ok(())
}
"secp224r1" => {
let their_public_key =
@ -808,8 +850,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
Ok(())
}
&_ => todo!(),
}
@ -820,7 +860,7 @@ pub fn op_node_ecdh_compute_public_key(
#[string] curve: &str,
#[buffer] privkey: &[u8],
#[buffer] pubkey: &mut [u8],
) -> Result<(), AnyError> {
) {
match curve {
"secp256k1" => {
let this_private_key =
@ -828,8 +868,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
Ok(())
}
"prime256v1" | "secp256r1" => {
let this_private_key =
@ -837,7 +875,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
Ok(())
}
"secp384r1" => {
let this_private_key =
@ -845,7 +882,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
Ok(())
}
"secp224r1" => {
let this_private_key =
@ -853,7 +889,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
Ok(())
}
&_ => todo!(),
}
@ -874,8 +909,20 @@ pub fn op_node_gen_prime(#[number] size: usize) -> ToJsBuffer {
#[serde]
pub async fn op_node_gen_prime_async(
#[number] size: usize,
) -> Result<ToJsBuffer, AnyError> {
Ok(spawn_blocking(move || gen_prime(size)).await?)
) -> Result<ToJsBuffer, tokio::task::JoinError> {
spawn_blocking(move || gen_prime(size)).await
}
#[derive(Debug, thiserror::Error)]
pub enum DiffieHellmanError {
#[error("Expected private key")]
ExpectedPrivateKey,
#[error("Expected public key")]
ExpectedPublicKey,
#[error("DH parameters mismatch")]
DhParametersMismatch,
#[error("Unsupported key type for diffie hellman, or key type mismatch")]
UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
}
#[op2]
@ -883,117 +930,134 @@ pub async fn op_node_gen_prime_async(
pub fn op_node_diffie_hellman(
#[cppgc] private: &KeyObjectHandle,
#[cppgc] public: &KeyObjectHandle,
) -> Result<Box<[u8]>, AnyError> {
) -> Result<Box<[u8]>, DiffieHellmanError> {
let private = private
.as_private_key()
.ok_or_else(|| type_error("Expected private key"))?;
.ok_or(DiffieHellmanError::ExpectedPrivateKey)?;
let public = public
.as_public_key()
.ok_or_else(|| type_error("Expected public key"))?;
.ok_or(DiffieHellmanError::ExpectedPublicKey)?;
let res = match (private, &*public) {
(
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
) => p224::ecdh::diffie_hellman(
private.to_nonzero_scalar(),
public.as_affine(),
)
.raw_secret_bytes()
.to_vec()
.into_boxed_slice(),
(
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
) => p256::ecdh::diffie_hellman(
private.to_nonzero_scalar(),
public.as_affine(),
)
.raw_secret_bytes()
.to_vec()
.into_boxed_slice(),
(
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
) => p384::ecdh::diffie_hellman(
private.to_nonzero_scalar(),
public.as_affine(),
)
.raw_secret_bytes()
.to_vec()
.into_boxed_slice(),
(
AsymmetricPrivateKey::X25519(private),
AsymmetricPublicKey::X25519(public),
) => private
.diffie_hellman(public)
.to_bytes()
.into_iter()
.collect(),
(AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
if private.params.prime != public.params.prime
|| private.params.base != public.params.base
{
return Err(type_error("DH parameters mismatch"));
let res =
match (private, &*public) {
(
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
) => p224::ecdh::diffie_hellman(
private.to_nonzero_scalar(),
public.as_affine(),
)
.raw_secret_bytes()
.to_vec()
.into_boxed_slice(),
(
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
) => p256::ecdh::diffie_hellman(
private.to_nonzero_scalar(),
public.as_affine(),
)
.raw_secret_bytes()
.to_vec()
.into_boxed_slice(),
(
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
) => p384::ecdh::diffie_hellman(
private.to_nonzero_scalar(),
public.as_affine(),
)
.raw_secret_bytes()
.to_vec()
.into_boxed_slice(),
(
AsymmetricPrivateKey::X25519(private),
AsymmetricPublicKey::X25519(public),
) => private
.diffie_hellman(public)
.to_bytes()
.into_iter()
.collect(),
(AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
if private.params.prime != public.params.prime
|| private.params.base != public.params.base
{
return Err(DiffieHellmanError::DhParametersMismatch);
}
// OSIP - Octet-String-to-Integer primitive
let public_key = public.key.clone().into_vec();
let pubkey = BigUint::from_bytes_be(&public_key);
// Exponentiation (z = y^x mod p)
let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
let private_key = private.key.clone().into_vec();
let private_key = BigUint::from_bytes_be(&private_key);
let shared_secret = pubkey.modpow(&private_key, &prime);
shared_secret.to_bytes_be().into()
}
// OSIP - Octet-String-to-Integer primitive
let public_key = public.key.clone().into_vec();
let pubkey = BigUint::from_bytes_be(&public_key);
// Exponentiation (z = y^x mod p)
let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
let private_key = private.key.clone().into_vec();
let private_key = BigUint::from_bytes_be(&private_key);
let shared_secret = pubkey.modpow(&private_key, &prime);
shared_secret.to_bytes_be().into()
}
_ => {
return Err(type_error(
"Unsupported key type for diffie hellman, or key type mismatch",
))
}
};
_ => return Err(
DiffieHellmanError::UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
),
};
Ok(res)
}
#[derive(Debug, thiserror::Error)]
pub enum SignEd25519Error {
#[error("Expected private key")]
ExpectedPrivateKey,
#[error("Expected Ed25519 private key")]
ExpectedEd25519PrivateKey,
#[error("Invalid Ed25519 private key")]
InvalidEd25519PrivateKey,
}
#[op2(fast)]
pub fn op_node_sign_ed25519(
#[cppgc] key: &KeyObjectHandle,
#[buffer] data: &[u8],
#[buffer] signature: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), SignEd25519Error> {
let private = key
.as_private_key()
.ok_or_else(|| type_error("Expected private key"))?;
.ok_or(SignEd25519Error::ExpectedPrivateKey)?;
let ed25519 = match private {
AsymmetricPrivateKey::Ed25519(private) => private,
_ => return Err(type_error("Expected Ed25519 private key")),
_ => return Err(SignEd25519Error::ExpectedEd25519PrivateKey),
};
let pair = Ed25519KeyPair::from_seed_unchecked(ed25519.as_bytes().as_slice())
.map_err(|_| type_error("Invalid Ed25519 private key"))?;
.map_err(|_| SignEd25519Error::InvalidEd25519PrivateKey)?;
signature.copy_from_slice(pair.sign(data).as_ref());
Ok(())
}
#[derive(Debug, thiserror::Error)]
pub enum VerifyEd25519Error {
#[error("Expected public key")]
ExpectedPublicKey,
#[error("Expected Ed25519 public key")]
ExpectedEd25519PublicKey,
}
#[op2(fast)]
pub fn op_node_verify_ed25519(
#[cppgc] key: &KeyObjectHandle,
#[buffer] data: &[u8],
#[buffer] signature: &[u8],
) -> Result<bool, AnyError> {
) -> Result<bool, VerifyEd25519Error> {
let public = key
.as_public_key()
.ok_or_else(|| type_error("Expected public key"))?;
.ok_or(VerifyEd25519Error::ExpectedPublicKey)?;
let ed25519 = match &*public {
AsymmetricPublicKey::Ed25519(public) => public,
_ => return Err(type_error("Expected Ed25519 public key")),
_ => return Err(VerifyEd25519Error::ExpectedEd25519PublicKey),
};
let verified = ring::signature::UnparsedPublicKey::new(

View file

@ -1,7 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use rand::rngs::OsRng;
use rsa::signature::hazmat::PrehashSigner as _;
use rsa::signature::hazmat::PrehashVerifier as _;
@ -26,7 +23,7 @@ use elliptic_curve::FieldBytesSize;
fn dsa_signature<C: elliptic_curve::PrimeCurve>(
encoding: u32,
signature: ecdsa::Signature<C>,
) -> Result<Box<[u8]>, AnyError>
) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError>
where
MaxSize<C>: ArrayLength<u8>,
<FieldBytesSize<C> as Add>::Output: Add<MaxOverhead> + ArrayLength<u8>,
@ -36,10 +33,54 @@ where
0 => Ok(signature.to_der().to_bytes().to_vec().into_boxed_slice()),
// IEEE P1363
1 => Ok(signature.to_bytes().to_vec().into_boxed_slice()),
_ => Err(type_error("invalid DSA signature encoding")),
_ => Err(
KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding,
),
}
}
#[derive(Debug, thiserror::Error)]
pub enum KeyObjectHandlePrehashedSignAndVerifyError {
#[error("invalid DSA signature encoding")]
InvalidDsaSignatureEncoding,
#[error("key is not a private key")]
KeyIsNotPrivate,
#[error("digest not allowed for RSA signature: {0}")]
DigestNotAllowedForRsaSignature(String),
#[error("failed to sign digest with RSA")]
FailedToSignDigestWithRsa,
#[error("digest not allowed for RSA-PSS signature: {0}")]
DigestNotAllowedForRsaPssSignature(String),
#[error("failed to sign digest with RSA-PSS")]
FailedToSignDigestWithRsaPss,
#[error("failed to sign digest with DSA")]
FailedToSignDigestWithDsa,
#[error("rsa-pss with different mf1 hash algorithm and hash algorithm is not supported")]
RsaPssHashAlgorithmUnsupported,
#[error(
"private key does not allow {actual} to be used, expected {expected}"
)]
PrivateKeyDisallowsUsage { actual: String, expected: String },
#[error("failed to sign digest")]
FailedToSignDigest,
#[error("x25519 key cannot be used for signing")]
X25519KeyCannotBeUsedForSigning,
#[error("Ed25519 key cannot be used for prehashed signing")]
Ed25519KeyCannotBeUsedForPrehashedSigning,
#[error("DH key cannot be used for signing")]
DhKeyCannotBeUsedForSigning,
#[error("key is not a public or private key")]
KeyIsNotPublicOrPrivate,
#[error("Invalid DSA signature")]
InvalidDsaSignature,
#[error("x25519 key cannot be used for verification")]
X25519KeyCannotBeUsedForVerification,
#[error("Ed25519 key cannot be used for prehashed verification")]
Ed25519KeyCannotBeUsedForPrehashedVerification,
#[error("DH key cannot be used for verification")]
DhKeyCannotBeUsedForVerification,
}
impl KeyObjectHandle {
pub fn sign_prehashed(
&self,
@ -47,10 +88,10 @@ impl KeyObjectHandle {
digest: &[u8],
pss_salt_length: Option<u32>,
dsa_signature_encoding: u32,
) -> Result<Box<[u8]>, AnyError> {
) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError> {
let private_key = self
.as_private_key()
.ok_or_else(|| type_error("key is not a private key"))?;
.ok_or(KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate)?;
match private_key {
AsymmetricPrivateKey::Rsa(key) => {
@ -63,17 +104,14 @@ impl KeyObjectHandle {
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
},
_ => {
return Err(type_error(format!(
"digest not allowed for RSA signature: {}",
digest_type
)))
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
}
)
};
let signature = signer
.sign(Some(&mut OsRng), key, digest)
.map_err(|_| generic_error("failed to sign digest with RSA"))?;
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa)?;
Ok(signature.into())
}
AsymmetricPrivateKey::RsaPss(key) => {
@ -81,9 +119,7 @@ impl KeyObjectHandle {
let mut salt_length = None;
if let Some(details) = &key.details {
if details.hash_algorithm != details.mf1_hash_algorithm {
return Err(type_error(
"rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
));
return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
}
hash_algorithm = Some(details.hash_algorithm);
salt_length = Some(details.salt_length as usize);
@ -96,10 +132,10 @@ impl KeyObjectHandle {
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
if let Some(hash_algorithm) = hash_algorithm.take() {
if Some(hash_algorithm) != algorithm {
return Err(type_error(format!(
"private key does not allow {} to be used, expected {}",
digest_type, hash_algorithm.as_str()
)));
return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
actual: digest_type.to_string(),
expected: hash_algorithm.as_str().to_string(),
});
}
}
if let Some(salt_length) = salt_length {
@ -109,15 +145,12 @@ impl KeyObjectHandle {
}
},
_ => {
return Err(type_error(format!(
"digest not allowed for RSA-PSS signature: {}",
digest_type
)))
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
}
);
let signature = pss
.sign(Some(&mut OsRng), &key.key, digest)
.map_err(|_| generic_error("failed to sign digest with RSA-PSS"))?;
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss)?;
Ok(signature.into())
}
AsymmetricPrivateKey::Dsa(key) => {
@ -127,15 +160,12 @@ impl KeyObjectHandle {
key.sign_prehashed_rfc6979::<D>(digest)
},
_ => {
return Err(type_error(format!(
"digest not allowed for RSA signature: {}",
digest_type
)))
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
}
);
let signature =
res.map_err(|_| generic_error("failed to sign digest with DSA"))?;
res.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa)?;
Ok(signature.into())
}
AsymmetricPrivateKey::Ec(key) => match key {
@ -143,7 +173,7 @@ impl KeyObjectHandle {
let signing_key = p224::ecdsa::SigningKey::from(key);
let signature: p224::ecdsa::Signature = signing_key
.sign_prehash(digest)
.map_err(|_| type_error("failed to sign digest"))?;
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
dsa_signature(dsa_signature_encoding, signature)
}
@ -151,7 +181,7 @@ impl KeyObjectHandle {
let signing_key = p256::ecdsa::SigningKey::from(key);
let signature: p256::ecdsa::Signature = signing_key
.sign_prehash(digest)
.map_err(|_| type_error("failed to sign digest"))?;
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
dsa_signature(dsa_signature_encoding, signature)
}
@ -159,19 +189,17 @@ impl KeyObjectHandle {
let signing_key = p384::ecdsa::SigningKey::from(key);
let signature: p384::ecdsa::Signature = signing_key
.sign_prehash(digest)
.map_err(|_| type_error("failed to sign digest"))?;
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
dsa_signature(dsa_signature_encoding, signature)
}
},
AsymmetricPrivateKey::X25519(_) => {
Err(type_error("x25519 key cannot be used for signing"))
Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning)
}
AsymmetricPrivateKey::Ed25519(_) => Err(type_error(
"Ed25519 key cannot be used for prehashed signing",
)),
AsymmetricPrivateKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning),
AsymmetricPrivateKey::Dh(_) => {
Err(type_error("DH key cannot be used for signing"))
Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning)
}
}
}
@ -183,10 +211,10 @@ impl KeyObjectHandle {
signature: &[u8],
pss_salt_length: Option<u32>,
dsa_signature_encoding: u32,
) -> Result<bool, AnyError> {
let public_key = self
.as_public_key()
.ok_or_else(|| type_error("key is not a public or private key"))?;
) -> Result<bool, KeyObjectHandlePrehashedSignAndVerifyError> {
let public_key = self.as_public_key().ok_or(
KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate,
)?;
match &*public_key {
AsymmetricPublicKey::Rsa(key) => {
@ -199,10 +227,7 @@ impl KeyObjectHandle {
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
},
_ => {
return Err(type_error(format!(
"digest not allowed for RSA signature: {}",
digest_type
)))
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
}
)
};
@ -214,9 +239,7 @@ impl KeyObjectHandle {
let mut salt_length = None;
if let Some(details) = &key.details {
if details.hash_algorithm != details.mf1_hash_algorithm {
return Err(type_error(
"rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
));
return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
}
hash_algorithm = Some(details.hash_algorithm);
salt_length = Some(details.salt_length as usize);
@ -229,10 +252,10 @@ impl KeyObjectHandle {
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
if let Some(hash_algorithm) = hash_algorithm.take() {
if Some(hash_algorithm) != algorithm {
return Err(type_error(format!(
"private key does not allow {} to be used, expected {}",
digest_type, hash_algorithm.as_str()
)));
return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
actual: digest_type.to_string(),
expected: hash_algorithm.as_str().to_string(),
});
}
}
if let Some(salt_length) = salt_length {
@ -242,17 +265,14 @@ impl KeyObjectHandle {
}
},
_ => {
return Err(type_error(format!(
"digest not allowed for RSA-PSS signature: {}",
digest_type
)))
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
}
);
Ok(pss.verify(&key.key, digest, signature).is_ok())
}
AsymmetricPublicKey::Dsa(key) => {
let signature = dsa::Signature::from_der(signature)
.map_err(|_| type_error("Invalid DSA signature"))?;
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature)?;
Ok(key.verify_prehash(digest, &signature).is_ok())
}
AsymmetricPublicKey::Ec(key) => match key {
@ -294,13 +314,11 @@ impl KeyObjectHandle {
}
},
AsymmetricPublicKey::X25519(_) => {
Err(type_error("x25519 key cannot be used for verification"))
Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification)
}
AsymmetricPublicKey::Ed25519(_) => Err(type_error(
"Ed25519 key cannot be used for prehashed verification",
)),
AsymmetricPublicKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification),
AsymmetricPublicKey::Dh(_) => {
Err(type_error("DH key cannot be used for verification"))
Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification)
}
}
}

View file

@ -1,11 +1,11 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
use deno_core::op2;
use x509_parser::der_parser::asn1_rs::Any;
use x509_parser::der_parser::asn1_rs::Tag;
use x509_parser::der_parser::oid::Oid;
pub use x509_parser::error::X509Error;
use x509_parser::extensions;
use x509_parser::pem;
use x509_parser::prelude::*;
@ -65,7 +65,7 @@ impl<'a> Deref for CertificateView<'a> {
#[cppgc]
pub fn op_node_x509_parse(
#[buffer] buf: &[u8],
) -> Result<Certificate, AnyError> {
) -> Result<Certificate, X509Error> {
let source = match pem::parse_x509_pem(buf) {
Ok((_, pem)) => CertificateSources::Pem(pem),
Err(_) => CertificateSources::Der(buf.to_vec().into_boxed_slice()),
@ -81,7 +81,7 @@ pub fn op_node_x509_parse(
X509Certificate::from_der(buf).map(|(_, cert)| cert)?
}
};
Ok::<_, AnyError>(CertificateView { cert })
Ok::<_, X509Error>(CertificateView { cert })
},
)?;
@ -89,23 +89,23 @@ pub fn op_node_x509_parse(
}
#[op2(fast)]
pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> Result<bool, AnyError> {
pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> bool {
let cert = cert.inner.get().deref();
Ok(cert.is_ca())
cert.is_ca()
}
#[op2(fast)]
pub fn op_node_x509_check_email(
#[cppgc] cert: &Certificate,
#[string] email: &str,
) -> Result<bool, AnyError> {
) -> bool {
let cert = cert.inner.get().deref();
let subject = cert.subject();
if subject
.iter_email()
.any(|e| e.as_str().unwrap_or("") == email)
{
return Ok(true);
return true;
}
let subject_alt = cert
@ -121,62 +121,60 @@ pub fn op_node_x509_check_email(
for name in &subject_alt.general_names {
if let extensions::GeneralName::RFC822Name(n) = name {
if *n == email {
return Ok(true);
return true;
}
}
}
}
Ok(false)
false
}
#[op2]
#[string]
pub fn op_node_x509_fingerprint(
#[cppgc] cert: &Certificate,
) -> Result<Option<String>, AnyError> {
Ok(cert.fingerprint::<sha1::Sha1>())
pub fn op_node_x509_fingerprint(#[cppgc] cert: &Certificate) -> Option<String> {
cert.fingerprint::<sha1::Sha1>()
}
#[op2]
#[string]
pub fn op_node_x509_fingerprint256(
#[cppgc] cert: &Certificate,
) -> Result<Option<String>, AnyError> {
Ok(cert.fingerprint::<sha2::Sha256>())
) -> Option<String> {
cert.fingerprint::<sha2::Sha256>()
}
#[op2]
#[string]
pub fn op_node_x509_fingerprint512(
#[cppgc] cert: &Certificate,
) -> Result<Option<String>, AnyError> {
Ok(cert.fingerprint::<sha2::Sha512>())
) -> Option<String> {
cert.fingerprint::<sha2::Sha512>()
}
#[op2]
#[string]
pub fn op_node_x509_get_issuer(
#[cppgc] cert: &Certificate,
) -> Result<String, AnyError> {
) -> Result<String, X509Error> {
let cert = cert.inner.get().deref();
Ok(x509name_to_string(cert.issuer(), oid_registry())?)
x509name_to_string(cert.issuer(), oid_registry())
}
#[op2]
#[string]
pub fn op_node_x509_get_subject(
#[cppgc] cert: &Certificate,
) -> Result<String, AnyError> {
) -> Result<String, X509Error> {
let cert = cert.inner.get().deref();
Ok(x509name_to_string(cert.subject(), oid_registry())?)
x509name_to_string(cert.subject(), oid_registry())
}
#[op2]
#[cppgc]
pub fn op_node_x509_public_key(
#[cppgc] cert: &Certificate,
) -> Result<KeyObjectHandle, AnyError> {
) -> Result<KeyObjectHandle, super::keys::X509PublicKeyError> {
let cert = cert.inner.get().deref();
let public_key = &cert.tbs_certificate.subject_pki;
@ -245,37 +243,29 @@ fn x509name_to_string(
#[op2]
#[string]
pub fn op_node_x509_get_valid_from(
#[cppgc] cert: &Certificate,
) -> Result<String, AnyError> {
pub fn op_node_x509_get_valid_from(#[cppgc] cert: &Certificate) -> String {
let cert = cert.inner.get().deref();
Ok(cert.validity().not_before.to_string())
cert.validity().not_before.to_string()
}
#[op2]
#[string]
pub fn op_node_x509_get_valid_to(
#[cppgc] cert: &Certificate,
) -> Result<String, AnyError> {
pub fn op_node_x509_get_valid_to(#[cppgc] cert: &Certificate) -> String {
let cert = cert.inner.get().deref();
Ok(cert.validity().not_after.to_string())
cert.validity().not_after.to_string()
}
#[op2]
#[string]
pub fn op_node_x509_get_serial_number(
#[cppgc] cert: &Certificate,
) -> Result<String, AnyError> {
pub fn op_node_x509_get_serial_number(#[cppgc] cert: &Certificate) -> String {
let cert = cert.inner.get().deref();
let mut s = cert.serial.to_str_radix(16);
s.make_ascii_uppercase();
Ok(s)
s
}
#[op2(fast)]
pub fn op_node_x509_key_usage(
#[cppgc] cert: &Certificate,
) -> Result<u16, AnyError> {
pub fn op_node_x509_key_usage(#[cppgc] cert: &Certificate) -> u16 {
let cert = cert.inner.get().deref();
let key_usage = cert
.extensions()
@ -286,5 +276,5 @@ pub fn op_node_x509_key_usage(
_ => None,
});
Ok(key_usage.map(|k| k.flags).unwrap_or(0))
key_usage.map(|k| k.flags).unwrap_or(0)
}

161
ext/node/ops/inspector.rs Normal file
View file

@ -0,0 +1,161 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::NodePermissions;
use deno_core::anyhow::Error;
use deno_core::error::generic_error;
use deno_core::futures::channel::mpsc;
use deno_core::op2;
use deno_core::v8;
use deno_core::GarbageCollected;
use deno_core::InspectorSessionKind;
use deno_core::InspectorSessionOptions;
use deno_core::JsRuntimeInspector;
use deno_core::OpState;
use std::cell::RefCell;
use std::rc::Rc;
#[op2(fast)]
pub fn op_inspector_enabled() -> bool {
// TODO: hook up to InspectorServer
false
}
#[op2]
pub fn op_inspector_open<P>(
_state: &mut OpState,
_port: Option<u16>,
#[string] _host: Option<String>,
) -> Result<(), Error>
where
P: NodePermissions + 'static,
{
// TODO: hook up to InspectorServer
/*
let server = state.borrow_mut::<InspectorServer>();
if let Some(host) = host {
server.set_host(host);
}
if let Some(port) = port {
server.set_port(port);
}
state
.borrow_mut::<P>()
.check_net((server.host(), Some(server.port())), "inspector.open")?;
*/
Ok(())
}
#[op2(fast)]
pub fn op_inspector_close() {
// TODO: hook up to InspectorServer
}
#[op2]
#[string]
pub fn op_inspector_url() -> Option<String> {
// TODO: hook up to InspectorServer
None
}
#[op2(fast)]
pub fn op_inspector_wait(state: &OpState) -> bool {
match state.try_borrow::<Rc<RefCell<JsRuntimeInspector>>>() {
Some(inspector) => {
inspector
.borrow_mut()
.wait_for_session_and_break_on_next_statement();
true
}
None => false,
}
}
#[op2(fast)]
pub fn op_inspector_emit_protocol_event(
#[string] _event_name: String,
#[string] _params: String,
) {
// TODO: inspector channel & protocol notifications
}
struct JSInspectorSession {
tx: RefCell<Option<mpsc::UnboundedSender<String>>>,
}
impl GarbageCollected for JSInspectorSession {}
#[op2]
#[cppgc]
pub fn op_inspector_connect<'s, P>(
isolate: *mut v8::Isolate,
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
connect_to_main_thread: bool,
callback: v8::Local<'s, v8::Function>,
) -> Result<JSInspectorSession, Error>
where
P: NodePermissions + 'static,
{
state
.borrow_mut::<P>()
.check_sys("inspector", "inspector.Session.connect")?;
if connect_to_main_thread {
return Err(generic_error("connectToMainThread not supported"));
}
let context = scope.get_current_context();
let context = v8::Global::new(scope, context);
let callback = v8::Global::new(scope, callback);
let inspector = state
.borrow::<Rc<RefCell<JsRuntimeInspector>>>()
.borrow_mut();
let tx = inspector.create_raw_session(
InspectorSessionOptions {
kind: InspectorSessionKind::NonBlocking {
wait_for_disconnect: false,
},
},
// The inspector connection does not keep the event loop alive but
// when the inspector sends a message to the frontend, the JS that
// that runs may keep the event loop alive so we have to call back
// synchronously, instead of using the usual LocalInspectorSession
// UnboundedReceiver<InspectorMsg> API.
Box::new(move |message| {
// SAFETY: This function is called directly by the inspector, so
// 1) The isolate is still valid
// 2) We are on the same thread as the Isolate
let scope = unsafe { &mut v8::CallbackScope::new(&mut *isolate) };
let context = v8::Local::new(scope, context.clone());
let scope = &mut v8::ContextScope::new(scope, context);
let scope = &mut v8::TryCatch::new(scope);
let recv = v8::undefined(scope);
if let Some(message) = v8::String::new(scope, &message.content) {
let callback = v8::Local::new(scope, callback.clone());
callback.call(scope, recv.into(), &[message.into()]);
}
}),
);
Ok(JSInspectorSession {
tx: RefCell::new(Some(tx)),
})
}
#[op2(fast)]
pub fn op_inspector_dispatch(
#[cppgc] session: &JSInspectorSession,
#[string] message: String,
) {
if let Some(tx) = &*session.tx.borrow() {
let _ = tx.unbounded_send(message);
}
}
#[op2(fast)]
pub fn op_inspector_disconnect(#[cppgc] session: &JSInspectorSession) {
drop(session.tx.borrow_mut().take());
}

View file

@ -7,6 +7,7 @@ pub mod fs;
pub mod http;
pub mod http2;
pub mod idna;
pub mod inspector;
pub mod ipc;
pub mod os;
pub mod process;

View file

@ -14,6 +14,7 @@ import { nextTick } from "ext:deno_node/_next_tick.ts";
import {
isAnyArrayBuffer,
isArrayBufferView,
isUint8Array,
} from "ext:deno_node/internal/util/types.ts";
var kRangeErrorMessage = "Cannot create final Buffer. It would be larger " +
@ -158,6 +159,12 @@ export const inflateRawSync = function (buffer, opts) {
function sanitizeInput(input) {
if (typeof input === "string") input = Buffer.from(input);
if (isArrayBufferView(input) && !isUint8Array(input)) {
input = Buffer.from(input.buffer, input.byteOffset, input.byteLength);
} else if (isAnyArrayBuffer(input)) {
input = Buffer.from(input);
}
if (
!Buffer.isBuffer(input) &&
(input.buffer && !input.buffer.constructor === ArrayBuffer)

View file

@ -0,0 +1,210 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
import process from "node:process";
import { EventEmitter } from "node:events";
import { primordials } from "ext:core/mod.js";
import {
op_get_extras_binding_object,
op_inspector_close,
op_inspector_connect,
op_inspector_disconnect,
op_inspector_dispatch,
op_inspector_emit_protocol_event,
op_inspector_enabled,
op_inspector_open,
op_inspector_url,
op_inspector_wait,
} from "ext:core/ops";
import {
isUint32,
validateFunction,
validateInt32,
validateObject,
validateString,
} from "ext:deno_node/internal/validators.mjs";
import {
ERR_INSPECTOR_ALREADY_ACTIVATED,
ERR_INSPECTOR_ALREADY_CONNECTED,
ERR_INSPECTOR_CLOSED,
ERR_INSPECTOR_COMMAND,
ERR_INSPECTOR_NOT_ACTIVE,
ERR_INSPECTOR_NOT_CONNECTED,
ERR_INSPECTOR_NOT_WORKER,
} from "ext:deno_node/internal/errors.ts";
const {
SymbolDispose,
JSONParse,
JSONStringify,
SafeMap,
} = primordials;
class Session extends EventEmitter {
#connection = null;
#nextId = 1;
#messageCallbacks = new SafeMap();
connect() {
if (this.#connection) {
throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session");
}
this.#connection = op_inspector_connect(false, (m) => this.#onMessage(m));
}
connectToMainThread() {
if (isMainThread) {
throw new ERR_INSPECTOR_NOT_WORKER();
}
if (this.#connection) {
throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session");
}
this.#connection = op_inspector_connect(true, (m) => this.#onMessage(m));
}
#onMessage(message) {
const parsed = JSONParse(message);
try {
if (parsed.id) {
const callback = this.#messageCallbacks.get(parsed.id);
this.#messageCallbacks.delete(parsed.id);
if (callback) {
if (parsed.error) {
return callback(
new ERR_INSPECTOR_COMMAND(
parsed.error.code,
parsed.error.message,
),
);
}
callback(null, parsed.result);
}
} else {
this.emit(parsed.method, parsed);
this.emit("inspectorNotification", parsed);
}
} catch (error) {
process.emitWarning(error);
}
}
post(method, params, callback) {
validateString(method, "method");
if (!callback && typeof params === "function") {
callback = params;
params = null;
}
if (params) {
validateObject(params, "params");
}
if (callback) {
validateFunction(callback, "callback");
}
if (!this.#connection) {
throw new ERR_INSPECTOR_NOT_CONNECTED();
}
const id = this.#nextId++;
const message = { id, method };
if (params) {
message.params = params;
}
if (callback) {
this.#messageCallbacks.set(id, callback);
}
op_inspector_dispatch(this.#connection, JSONStringify(message));
}
disconnect() {
if (!this.#connection) {
return;
}
op_inspector_disconnect(this.#connection);
this.#connection = null;
// deno-lint-ignore prefer-primordials
for (const callback of this.#messageCallbacks.values()) {
process.nextTick(callback, new ERR_INSPECTOR_CLOSED());
}
this.#messageCallbacks.clear();
this.#nextId = 1;
}
}
function open(port, host, wait) {
if (op_inspector_enabled()) {
throw new ERR_INSPECTOR_ALREADY_ACTIVATED();
}
// inspectorOpen() currently does not typecheck its arguments and adding
// such checks would be a potentially breaking change. However, the native
// open() function requires the port to fit into a 16-bit unsigned integer,
// causing an integer overflow otherwise, so we at least need to prevent that.
if (isUint32(port)) {
validateInt32(port, "port", 0, 65535);
} else {
// equiv of handling args[0]->IsUint32()
port = undefined;
}
if (typeof host !== "string") {
// equiv of handling args[1]->IsString()
host = undefined;
}
op_inspector_open(port, host);
if (wait) {
op_inspector_wait();
}
return {
__proto__: null,
[SymbolDispose]() {
_debugEnd();
},
};
}
function close() {
op_inspector_close();
}
function url() {
return op_inspector_url();
}
function waitForDebugger() {
if (!op_inspector_wait()) {
throw new ERR_INSPECTOR_NOT_ACTIVE();
}
}
function broadcastToFrontend(eventName, params) {
validateString(eventName, "eventName");
if (params) {
validateObject(params, "params");
}
op_inspector_emit_protocol_event(eventName, JSONStringify(params ?? {}));
}
const Network = {
requestWillBeSent: (params) =>
broadcastToFrontend("Network.requestWillBeSent", params),
responseReceived: (params) =>
broadcastToFrontend("Network.responseReceived", params),
loadingFinished: (params) =>
broadcastToFrontend("Network.loadingFinished", params),
loadingFailed: (params) =>
broadcastToFrontend("Network.loadingFailed", params),
};
const console = op_get_extras_binding_object().console;
export { close, console, Network, open, Session, url, waitForDebugger };
export default {
open,
close,
url,
waitForDebugger,
console,
Session,
Network,
};

View file

@ -1,82 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
import { EventEmitter } from "node:events";
import { notImplemented } from "ext:deno_node/_utils.ts";
import { primordials } from "ext:core/mod.js";
const {
SafeMap,
} = primordials;
class Session extends EventEmitter {
#connection = null;
#nextId = 1;
#messageCallbacks = new SafeMap();
/** Connects the session to the inspector back-end. */
connect() {
notImplemented("inspector.Session.prototype.connect");
}
/** Connects the session to the main thread
* inspector back-end. */
connectToMainThread() {
notImplemented("inspector.Session.prototype.connectToMainThread");
}
/** Posts a message to the inspector back-end. */
post(
_method: string,
_params?: Record<string, unknown>,
_callback?: (...args: unknown[]) => void,
) {
notImplemented("inspector.Session.prototype.post");
}
/** Immediately closes the session, all pending
* message callbacks will be called with an
* error.
*/
disconnect() {
notImplemented("inspector.Session.prototype.disconnect");
}
}
/** Activates inspector on host and port.
* See https://nodejs.org/api/inspector.html#inspectoropenport-host-wait */
function open(_port?: number, _host?: string, _wait?: boolean) {
notImplemented("inspector.Session.prototype.open");
}
/** Deactivate the inspector. Blocks until there are no active connections.
* See https://nodejs.org/api/inspector.html#inspectorclose */
function close() {
notImplemented("inspector.Session.prototype.close");
}
/** Return the URL of the active inspector, or undefined if there is none.
* See https://nodejs.org/api/inspector.html#inspectorurl */
function url() {
// TODO(kt3k): returns undefined for now, which means the inspector is not activated.
return undefined;
}
/** Blocks until a client (existing or connected later) has sent Runtime.runIfWaitingForDebugger command.
* See https://nodejs.org/api/inspector.html#inspectorwaitfordebugger */
function waitForDebugger() {
notImplemented("inspector.wairForDebugger");
}
const console = globalThis.console;
export { close, console, open, Session, url, waitForDebugger };
export default {
close,
console,
open,
Session,
url,
waitForDebugger,
};

View file

@ -0,0 +1,20 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
import inspector from "node:inspector";
import { promisify } from "ext:deno_node/internal/util.mjs";
class Session extends inspector.Session {
constructor() {
super();
}
}
Session.prototype.post = promisify(inspector.Session.prototype.post);
export * from "node:inspector";
export { Session };
export default {
...inspector,
Session,
};

View file

@ -2,7 +2,7 @@
[package]
name = "deno_tls"
version = "0.162.0"
version = "0.163.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_url"
version = "0.175.0"
version = "0.176.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_web"
version = "0.206.0"
version = "0.207.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_webgpu"
version = "0.142.0"
version = "0.143.0"
authors = ["the Deno authors"]
edition.workspace = true
license = "MIT"

View file

@ -2,7 +2,7 @@
[package]
name = "deno_webidl"
version = "0.175.0"
version = "0.176.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_websocket"
version = "0.180.0"
version = "0.181.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_webstorage"
version = "0.170.0"
version = "0.171.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_resolver"
version = "0.7.0"
version = "0.8.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "node_resolver"
version = "0.14.0"
version = "0.15.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_runtime"
version = "0.184.0"
version = "0.185.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -1051,6 +1051,34 @@ mod node {
use super::get_serde_json_error_class;
use super::get_url_parse_error_class;
pub use deno_node::ops::blocklist::BlocklistError;
pub use deno_node::ops::crypto::cipher::CipherContextError;
pub use deno_node::ops::crypto::cipher::CipherError;
pub use deno_node::ops::crypto::cipher::DecipherContextError;
pub use deno_node::ops::crypto::cipher::DecipherError;
pub use deno_node::ops::crypto::digest::HashError;
pub use deno_node::ops::crypto::keys::AsymmetricPrivateKeyDerError;
pub use deno_node::ops::crypto::keys::AsymmetricPrivateKeyError;
pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyDerError;
pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyError;
pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyJwkError;
pub use deno_node::ops::crypto::keys::EcJwkError;
pub use deno_node::ops::crypto::keys::EdRawError;
pub use deno_node::ops::crypto::keys::ExportPrivateKeyPemError;
pub use deno_node::ops::crypto::keys::ExportPublicKeyPemError;
pub use deno_node::ops::crypto::keys::GenerateRsaPssError;
pub use deno_node::ops::crypto::keys::RsaJwkError;
pub use deno_node::ops::crypto::keys::RsaPssParamsParseError;
pub use deno_node::ops::crypto::keys::X509PublicKeyError;
pub use deno_node::ops::crypto::sign::KeyObjectHandlePrehashedSignAndVerifyError;
pub use deno_node::ops::crypto::x509::X509Error;
pub use deno_node::ops::crypto::DiffieHellmanError;
pub use deno_node::ops::crypto::EcdhEncodePubKey;
pub use deno_node::ops::crypto::HkdfError;
pub use deno_node::ops::crypto::Pbkdf2Error;
pub use deno_node::ops::crypto::PrivateEncryptDecryptError;
pub use deno_node::ops::crypto::ScryptAsyncError;
pub use deno_node::ops::crypto::SignEd25519Error;
pub use deno_node::ops::crypto::VerifyEd25519Error;
pub use deno_node::ops::fs::FsError;
pub use deno_node::ops::http2::Http2Error;
pub use deno_node::ops::idna::IdnaError;
@ -1189,6 +1217,324 @@ mod node {
ZlibError::Other(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
pub fn get_crypto_cipher_context_error(
e: &CipherContextError,
) -> &'static str {
match e {
CipherContextError::ContextInUse => "TypeError",
CipherContextError::Cipher(e) => get_crypto_cipher_error(e),
CipherContextError::Resource(e) => {
get_error_class_name(e).unwrap_or("Error")
}
}
}
pub fn get_crypto_cipher_error(e: &CipherError) -> &'static str {
match e {
CipherError::InvalidIvLength => "TypeError",
CipherError::InvalidKeyLength => "RangeError",
CipherError::InvalidInitializationVector => "TypeError",
CipherError::CannotPadInputData => "TypeError",
CipherError::UnknownCipher(_) => "TypeError",
}
}
pub fn get_crypto_decipher_context_error(
e: &DecipherContextError,
) -> &'static str {
match e {
DecipherContextError::ContextInUse => "TypeError",
DecipherContextError::Decipher(e) => get_crypto_decipher_error(e),
DecipherContextError::Resource(e) => {
get_error_class_name(e).unwrap_or("Error")
}
}
}
pub fn get_crypto_decipher_error(e: &DecipherError) -> &'static str {
match e {
DecipherError::InvalidIvLength => "TypeError",
DecipherError::InvalidKeyLength => "RangeError",
DecipherError::InvalidInitializationVector => "TypeError",
DecipherError::CannotUnpadInputData => "TypeError",
DecipherError::DataAuthenticationFailed => "TypeError",
DecipherError::SetAutoPaddingFalseAes128GcmUnsupported => "TypeError",
DecipherError::SetAutoPaddingFalseAes256GcmUnsupported => "TypeError",
DecipherError::UnknownCipher(_) => "TypeError",
}
}
pub fn get_x509_error(_: &X509Error) -> &'static str {
"Error"
}
pub fn get_crypto_key_object_handle_prehashed_sign_and_verify_error(
e: &KeyObjectHandlePrehashedSignAndVerifyError,
) -> &'static str {
match e {
KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(_) => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa => "Error",
KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(_) => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss => "Error",
KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage { .. } => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification => "TypeError",
KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification => "TypeError",
}
}
pub fn get_crypto_hash_error(_: &HashError) -> &'static str {
"Error"
}
pub fn get_asymmetric_public_key_jwk_error(
e: &AsymmetricPublicKeyJwkError,
) -> &'static str {
match e {
AsymmetricPublicKeyJwkError::UnsupportedJwkEcCurveP224 => "TypeError",
AsymmetricPublicKeyJwkError::JwkExportNotImplementedForKeyType => {
"TypeError"
}
AsymmetricPublicKeyJwkError::KeyIsNotAsymmetricPublicKey => "TypeError",
}
}
pub fn get_generate_rsa_pss_error(_: &GenerateRsaPssError) -> &'static str {
"TypeError"
}
pub fn get_asymmetric_private_key_der_error(
e: &AsymmetricPrivateKeyDerError,
) -> &'static str {
match e {
AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::ExportingNonRsaPrivateKeyAsPkcs1Unsupported => "TypeError",
AsymmetricPrivateKeyDerError::InvalidEcPrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::ExportingNonEcPrivateKeyAsSec1Unsupported => "TypeError",
AsymmetricPrivateKeyDerError::ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported => "Error",
AsymmetricPrivateKeyDerError::InvalidDsaPrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::InvalidDhPrivateKey => "TypeError",
AsymmetricPrivateKeyDerError::UnsupportedKeyType(_) => "TypeError",
}
}
pub fn get_asymmetric_public_key_der_error(
_: &AsymmetricPublicKeyDerError,
) -> &'static str {
"TypeError"
}
pub fn get_export_public_key_pem_error(
e: &ExportPublicKeyPemError,
) -> &'static str {
match e {
ExportPublicKeyPemError::AsymmetricPublicKeyDer(e) => {
get_asymmetric_public_key_der_error(e)
}
ExportPublicKeyPemError::VeryLargeData => "TypeError",
ExportPublicKeyPemError::Der(_) => "Error",
}
}
pub fn get_export_private_key_pem_error(
e: &ExportPrivateKeyPemError,
) -> &'static str {
match e {
ExportPrivateKeyPemError::AsymmetricPublicKeyDer(e) => {
get_asymmetric_private_key_der_error(e)
}
ExportPrivateKeyPemError::VeryLargeData => "TypeError",
ExportPrivateKeyPemError::Der(_) => "Error",
}
}
pub fn get_x509_public_key_error(e: &X509PublicKeyError) -> &'static str {
match e {
X509PublicKeyError::X509(_) => "Error",
X509PublicKeyError::Rsa(_) => "Error",
X509PublicKeyError::Asn1(_) => "Error",
X509PublicKeyError::Ec(_) => "Error",
X509PublicKeyError::UnsupportedEcNamedCurve => "TypeError",
X509PublicKeyError::MissingEcParameters => "TypeError",
X509PublicKeyError::MalformedDssPublicKey => "TypeError",
X509PublicKeyError::UnsupportedX509KeyType => "TypeError",
}
}
pub fn get_rsa_jwk_error(e: &RsaJwkError) -> &'static str {
match e {
RsaJwkError::Base64(_) => "Error",
RsaJwkError::Rsa(_) => "Error",
RsaJwkError::MissingRsaPrivateComponent => "TypeError",
}
}
pub fn get_ec_jwk_error(e: &EcJwkError) -> &'static str {
match e {
EcJwkError::Ec(_) => "Error",
EcJwkError::UnsupportedCurve(_) => "TypeError",
}
}
pub fn get_ed_raw_error(e: &EdRawError) -> &'static str {
match e {
EdRawError::Ed25519Signature(_) => "Error",
EdRawError::InvalidEd25519Key => "TypeError",
EdRawError::UnsupportedCurve => "TypeError",
}
}
pub fn get_pbkdf2_error(e: &Pbkdf2Error) -> &'static str {
match e {
Pbkdf2Error::UnsupportedDigest(_) => "TypeError",
Pbkdf2Error::Join(_) => "Error",
}
}
pub fn get_scrypt_async_error(e: &ScryptAsyncError) -> &'static str {
match e {
ScryptAsyncError::Join(_) => "Error",
ScryptAsyncError::Other(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
pub fn get_hkdf_error_error(e: &HkdfError) -> &'static str {
match e {
HkdfError::ExpectedSecretKey => "TypeError",
HkdfError::HkdfExpandFailed => "TypeError",
HkdfError::UnsupportedDigest(_) => "TypeError",
HkdfError::Join(_) => "Error",
}
}
pub fn get_rsa_pss_params_parse_error(
_: &RsaPssParamsParseError,
) -> &'static str {
"TypeError"
}
pub fn get_asymmetric_private_key_error(
e: &AsymmetricPrivateKeyError,
) -> &'static str {
match e {
AsymmetricPrivateKeyError::InvalidPemPrivateKeyInvalidUtf8(_) => "TypeError",
AsymmetricPrivateKeyError::InvalidEncryptedPemPrivateKey => "TypeError",
AsymmetricPrivateKeyError::InvalidPemPrivateKey => "TypeError",
AsymmetricPrivateKeyError::EncryptedPrivateKeyRequiresPassphraseToDecrypt => "TypeError",
AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey => "TypeError",
AsymmetricPrivateKeyError::InvalidSec1PrivateKey => "TypeError",
AsymmetricPrivateKeyError::UnsupportedPemLabel(_) => "TypeError",
AsymmetricPrivateKeyError::RsaPssParamsParse(e) => get_rsa_pss_params_parse_error(e),
AsymmetricPrivateKeyError::InvalidEncryptedPkcs8PrivateKey => "TypeError",
AsymmetricPrivateKeyError::InvalidPkcs8PrivateKey => "TypeError",
AsymmetricPrivateKeyError::Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase => "TypeError",
AsymmetricPrivateKeyError::Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase => "TypeError",
AsymmetricPrivateKeyError::UnsupportedEcNamedCurve => "TypeError",
AsymmetricPrivateKeyError::InvalidPrivateKey => "TypeError",
AsymmetricPrivateKeyError::InvalidDsaPrivateKey => "TypeError",
AsymmetricPrivateKeyError::MalformedOrMissingNamedCurveInEcParameters => "TypeError",
AsymmetricPrivateKeyError::UnsupportedKeyType(_) => "TypeError",
AsymmetricPrivateKeyError::UnsupportedKeyFormat(_) => "TypeError",
AsymmetricPrivateKeyError::InvalidX25519PrivateKey => "TypeError",
AsymmetricPrivateKeyError::X25519PrivateKeyIsWrongLength => "TypeError",
AsymmetricPrivateKeyError::InvalidEd25519PrivateKey => "TypeError",
AsymmetricPrivateKeyError::MissingDhParameters => "TypeError",
AsymmetricPrivateKeyError::UnsupportedPrivateKeyOid => "TypeError",
}
}
pub fn get_asymmetric_public_key_error(
e: &AsymmetricPublicKeyError,
) -> &'static str {
match e {
AsymmetricPublicKeyError::InvalidPemPrivateKeyInvalidUtf8(_) => {
"TypeError"
}
AsymmetricPublicKeyError::InvalidPemPublicKey => "TypeError",
AsymmetricPublicKeyError::InvalidPkcs1PublicKey => "TypeError",
AsymmetricPublicKeyError::AsymmetricPrivateKey(e) => {
get_asymmetric_private_key_error(e)
}
AsymmetricPublicKeyError::InvalidX509Certificate => "TypeError",
AsymmetricPublicKeyError::X509(_) => "Error",
AsymmetricPublicKeyError::X509PublicKey(e) => {
get_x509_public_key_error(e)
}
AsymmetricPublicKeyError::UnsupportedPemLabel(_) => "TypeError",
AsymmetricPublicKeyError::InvalidSpkiPublicKey => "TypeError",
AsymmetricPublicKeyError::UnsupportedKeyType(_) => "TypeError",
AsymmetricPublicKeyError::UnsupportedKeyFormat(_) => "TypeError",
AsymmetricPublicKeyError::Spki(_) => "Error",
AsymmetricPublicKeyError::Pkcs1(_) => "Error",
AsymmetricPublicKeyError::RsaPssParamsParse(_) => "TypeError",
AsymmetricPublicKeyError::MalformedDssPublicKey => "TypeError",
AsymmetricPublicKeyError::MalformedOrMissingNamedCurveInEcParameters => {
"TypeError"
}
AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInEcSpki => {
"TypeError"
}
AsymmetricPublicKeyError::Ec(_) => "Error",
AsymmetricPublicKeyError::UnsupportedEcNamedCurve => "TypeError",
AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInX25519Spki => {
"TypeError"
}
AsymmetricPublicKeyError::X25519PublicKeyIsTooShort => "TypeError",
AsymmetricPublicKeyError::InvalidEd25519PublicKey => "TypeError",
AsymmetricPublicKeyError::MissingDhParameters => "TypeError",
AsymmetricPublicKeyError::MalformedDhParameters => "TypeError",
AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInDhSpki => {
"TypeError"
}
AsymmetricPublicKeyError::UnsupportedPrivateKeyOid => "TypeError",
}
}
pub fn get_private_encrypt_decrypt_error(
e: &PrivateEncryptDecryptError,
) -> &'static str {
match e {
PrivateEncryptDecryptError::Pkcs8(_) => "Error",
PrivateEncryptDecryptError::Spki(_) => "Error",
PrivateEncryptDecryptError::Utf8(_) => "Error",
PrivateEncryptDecryptError::Rsa(_) => "Error",
PrivateEncryptDecryptError::UnknownPadding => "TypeError",
}
}
pub fn get_ecdh_encode_pub_key_error(e: &EcdhEncodePubKey) -> &'static str {
match e {
EcdhEncodePubKey::InvalidPublicKey => "TypeError",
EcdhEncodePubKey::UnsupportedCurve => "TypeError",
EcdhEncodePubKey::Sec1(_) => "Error",
}
}
pub fn get_diffie_hellman_error(_: &DiffieHellmanError) -> &'static str {
"TypeError"
}
pub fn get_sign_ed25519_error(_: &SignEd25519Error) -> &'static str {
"TypeError"
}
pub fn get_verify_ed25519_error(_: &VerifyEd25519Error) -> &'static str {
"TypeError"
}
}
fn get_os_error(error: &OsError) -> &'static str {
@ -1273,6 +1619,114 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
e.downcast_ref::<node::ZlibError>()
.map(node::get_zlib_error)
})
.or_else(|| {
e.downcast_ref::<node::CipherError>()
.map(node::get_crypto_cipher_error)
})
.or_else(|| {
e.downcast_ref::<node::CipherContextError>()
.map(node::get_crypto_cipher_context_error)
})
.or_else(|| {
e.downcast_ref::<node::DecipherError>()
.map(node::get_crypto_decipher_error)
})
.or_else(|| {
e.downcast_ref::<node::DecipherContextError>()
.map(node::get_crypto_decipher_context_error)
})
.or_else(|| {
e.downcast_ref::<node::X509Error>()
.map(node::get_x509_error)
})
.or_else(|| {
e.downcast_ref::<node::KeyObjectHandlePrehashedSignAndVerifyError>()
.map(node::get_crypto_key_object_handle_prehashed_sign_and_verify_error)
})
.or_else(|| {
e.downcast_ref::<node::HashError>()
.map(node::get_crypto_hash_error)
})
.or_else(|| {
e.downcast_ref::<node::AsymmetricPublicKeyJwkError>()
.map(node::get_asymmetric_public_key_jwk_error)
})
.or_else(|| {
e.downcast_ref::<node::GenerateRsaPssError>()
.map(node::get_generate_rsa_pss_error)
})
.or_else(|| {
e.downcast_ref::<node::AsymmetricPrivateKeyDerError>()
.map(node::get_asymmetric_private_key_der_error)
})
.or_else(|| {
e.downcast_ref::<node::AsymmetricPublicKeyDerError>()
.map(node::get_asymmetric_public_key_der_error)
})
.or_else(|| {
e.downcast_ref::<node::ExportPublicKeyPemError>()
.map(node::get_export_public_key_pem_error)
})
.or_else(|| {
e.downcast_ref::<node::ExportPrivateKeyPemError>()
.map(node::get_export_private_key_pem_error)
})
.or_else(|| {
e.downcast_ref::<node::RsaJwkError>()
.map(node::get_rsa_jwk_error)
})
.or_else(|| {
e.downcast_ref::<node::EcJwkError>()
.map(node::get_ec_jwk_error)
})
.or_else(|| {
e.downcast_ref::<node::EdRawError>()
.map(node::get_ed_raw_error)
})
.or_else(|| {
e.downcast_ref::<node::Pbkdf2Error>()
.map(node::get_pbkdf2_error)
})
.or_else(|| {
e.downcast_ref::<node::ScryptAsyncError>()
.map(node::get_scrypt_async_error)
})
.or_else(|| {
e.downcast_ref::<node::HkdfError>()
.map(node::get_hkdf_error_error)
})
.or_else(|| {
e.downcast_ref::<node::RsaPssParamsParseError>()
.map(node::get_rsa_pss_params_parse_error)
})
.or_else(|| {
e.downcast_ref::<node::AsymmetricPrivateKeyError>()
.map(node::get_asymmetric_private_key_error)
})
.or_else(|| {
e.downcast_ref::<node::AsymmetricPublicKeyError>()
.map(node::get_asymmetric_public_key_error)
})
.or_else(|| {
e.downcast_ref::<node::PrivateEncryptDecryptError>()
.map(node::get_private_encrypt_decrypt_error)
})
.or_else(|| {
e.downcast_ref::<node::EcdhEncodePubKey>()
.map(node::get_ecdh_encode_pub_key_error)
})
.or_else(|| {
e.downcast_ref::<node::DiffieHellmanError>()
.map(node::get_diffie_hellman_error)
})
.or_else(|| {
e.downcast_ref::<node::SignEd25519Error>()
.map(node::get_sign_ed25519_error)
})
.or_else(|| {
e.downcast_ref::<node::VerifyEd25519Error>()
.map(node::get_verify_ed25519_error)
})
.or_else(|| e.downcast_ref::<NApiError>().map(get_napi_error_class))
.or_else(|| e.downcast_ref::<WebError>().map(get_web_error_class))
.or_else(|| {

View file

@ -2,7 +2,7 @@
[package]
name = "deno_permissions"
version = "0.35.0"
version = "0.36.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -82,6 +82,13 @@ impl deno_node::NodePermissions for Permissions {
) -> Result<(), PermissionCheckError> {
unreachable!("snapshotting!")
}
fn check_net(
&mut self,
_host: (&str, Option<u16>),
_api_name: &str,
) -> Result<(), PermissionCheckError> {
unreachable!("snapshotting!")
}
fn check_read_path<'a>(
&mut self,
_path: &'a Path,

View file

@ -562,7 +562,7 @@ impl WebWorker {
extension_transpiler: Some(Rc::new(|specifier, source| {
maybe_transpile_source(specifier, source)
})),
inspector: services.maybe_inspector_server.is_some(),
inspector: true,
feature_checker: Some(services.feature_checker),
op_metrics_factory_fn,
import_meta_resolve_callback: Some(Box::new(
@ -579,18 +579,18 @@ impl WebWorker {
js_runtime.op_state().borrow_mut().put(op_summary_metrics);
}
// Put inspector handle into the op state so we can put a breakpoint when
// executing a CJS entrypoint.
let op_state = js_runtime.op_state();
let inspector = js_runtime.inspector();
op_state.borrow_mut().put(inspector);
if let Some(server) = services.maybe_inspector_server {
server.register_inspector(
options.main_module.to_string(),
&mut js_runtime,
false,
);
// Put inspector handle into the op state so we can put a breakpoint when
// executing a CJS entrypoint.
let op_state = js_runtime.op_state();
let inspector = js_runtime.inspector();
op_state.borrow_mut().put(inspector);
}
let (internal_handle, external_handle) = {

View file

@ -488,7 +488,7 @@ impl MainWorker {
extension_transpiler: Some(Rc::new(|specifier, source| {
maybe_transpile_source(specifier, source)
})),
inspector: options.maybe_inspector_server.is_some(),
inspector: true,
is_main: true,
feature_checker: Some(services.feature_checker.clone()),
op_metrics_factory_fn,
@ -546,6 +546,12 @@ impl MainWorker {
js_runtime.op_state().borrow_mut().put(op_summary_metrics);
}
// Put inspector handle into the op state so we can put a breakpoint when
// executing a CJS entrypoint.
let op_state = js_runtime.op_state();
let inspector = js_runtime.inspector();
op_state.borrow_mut().put(inspector);
if let Some(server) = options.maybe_inspector_server.clone() {
server.register_inspector(
main_module.to_string(),
@ -553,13 +559,8 @@ impl MainWorker {
options.should_break_on_first_statement
|| options.should_wait_for_inspector_session,
);
// Put inspector handle into the op state so we can put a breakpoint when
// executing a CJS entrypoint.
let op_state = js_runtime.op_state();
let inspector = js_runtime.inspector();
op_state.borrow_mut().put(inspector);
}
let (
bootstrap_fn_global,
dispatch_load_event_fn_global,

View file

@ -47,6 +47,8 @@ deno_tls.workspace = true
fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] }
file_test_runner = "0.7.2"
flaky_test = "=0.2.2"
hickory-client = "=0.24"
hickory-server = "=0.24"
http.workspace = true
http-body-util.workspace = true
hyper.workspace = true
@ -60,8 +62,6 @@ serde.workspace = true
test_util.workspace = true
tokio.workspace = true
tower-lsp.workspace = true
trust-dns-client = "=0.23.2"
trust-dns-server = "=0.23.2"
url.workspace = true
uuid = { workspace = true, features = ["serde"] }
zeromq.workspace = true

View file

@ -6628,6 +6628,23 @@ export class DuckConfig {
}]
}]
}
}, {
"title": "Add all missing imports",
"kind": "quickfix",
"diagnostics": [{
"range": {
"start": { "line": 0, "character": 50 },
"end": { "line": 0, "character": 67 }
},
"severity": 1,
"code": 2304,
"source": "deno-ts",
"message": "Cannot find name 'DuckConfigOptions'."
}],
"data": {
"specifier": "file:///a/file00.ts",
"fixId": "fixMissingImport"
}
}, {
"title": "Add import from \"./file01.ts\"",
"kind": "quickfix",
@ -6656,23 +6673,6 @@ export class DuckConfig {
}]
}]
}
}, {
"title": "Add all missing imports",
"kind": "quickfix",
"diagnostics": [{
"range": {
"start": { "line": 0, "character": 50 },
"end": { "line": 0, "character": 67 }
},
"severity": 1,
"code": 2304,
"source": "deno-ts",
"message": "Cannot find name 'DuckConfigOptions'."
}],
"data": {
"specifier": "file:///a/file00.ts",
"fixId": "fixMissingImport"
}
}])
);
let res = client.write_request(
@ -8125,6 +8125,151 @@ fn lsp_npm_completions_auto_import_and_quick_fix_no_import_map() {
client.shutdown();
}
#[test]
fn lsp_npm_auto_import_and_quick_fix_byonm() {
let context = TestContextBuilder::new()
.use_http_server()
.use_temp_cwd()
.add_npm_env_vars()
.build();
let temp_dir = context.temp_dir();
temp_dir.write("deno.json", json!({}).to_string());
temp_dir.write(
"package.json",
json!({
"dependencies": {
"cowsay": "*",
},
})
.to_string(),
);
context
.new_command()
.args("install")
.run()
.skip_output_check();
temp_dir.write("other.ts", "import \"cowsay\";\n");
let mut client = context.new_lsp_command().build();
client.initialize_default();
let diagnostics = client.did_open(json!({
"textDocument": {
"uri": temp_dir.url().join("file.ts").unwrap(),
"languageId": "typescript",
"version": 1,
"text": "think({ text: \"foo\" });\n",
},
}));
let list = client.get_completion_list(
temp_dir.url().join("file.ts").unwrap(),
(0, 5),
json!({ "triggerKind": 1 }),
);
assert!(!list.is_incomplete);
let item = list
.items
.iter()
.find(|item| item.label == "think")
.unwrap();
let res = client.write_request("completionItem/resolve", item);
assert_eq!(
res,
json!({
"label": "think",
"labelDetails": {
"description": "cowsay",
},
"kind": 3,
"detail": "function think(options: IOptions): string",
"documentation": {
"kind": "markdown",
"value": "\n\n*@param* \noptions ## Face :\nEither choose a mode (set the value as true) **_or_**\nset your own defined eyes and tongue to `e` and `T`.\n- ### `e` : eyes\n- ### `T` : tongue\n\n## Cow :\nEither specify a cow name (e.g. \"fox\") **_or_**\nset the value of `r` to true which selects a random cow.\n- ### `r` : random selection\n- ### `f` : cow name - from `cows` folder\n\n## Modes :\nModes are just ready-to-use faces, here's their list:\n- #### `b` : borg\n- #### `d` : dead \n- #### `g` : greedy\n- #### `p` : paranoia\n- #### `s` : stoned\n- #### `t` : tired\n- #### `w` : youthful\n- #### `y` : wired \n\n*@example* \n```\n// custom cow and face\ncowsay.think({\n text: 'Hello world!',\n e: '^^', // eyes\n T: 'U ', // tongue\n f: 'USA' // name of the cow from `cows` folder\n})\n\n// using a random cow\ncowsay.think({\n text: 'Hello world!',\n e: 'xx', // eyes\n r: true, // random mode - use a random cow.\n})\n\n// using a mode\ncowsay.think({\n text: 'Hello world!',\n y: true, // using y mode - youthful mode\n})\n```",
},
"sortText": "￿16_0",
"additionalTextEdits": [
{
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 0 },
},
"newText": "import { think } from \"cowsay\";\n\n",
},
],
}),
);
let diagnostics = diagnostics
.messages_with_file_and_source(
temp_dir.url().join("file.ts").unwrap().as_str(),
"deno-ts",
)
.diagnostics;
let res = client.write_request(
"textDocument/codeAction",
json!(json!({
"textDocument": {
"uri": temp_dir.url().join("file.ts").unwrap(),
},
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 5 },
},
"context": {
"diagnostics": &diagnostics,
"only": ["quickfix"],
},
})),
);
assert_eq!(
res,
json!([
{
"title": "Add import from \"cowsay\"",
"kind": "quickfix",
"diagnostics": &diagnostics,
"edit": {
"documentChanges": [{
"textDocument": {
"uri": temp_dir.url().join("file.ts").unwrap(),
"version": 1,
},
"edits": [{
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 0 },
},
"newText": "import { think } from \"cowsay\";\n\n",
}],
}],
},
},
{
"title": "Add missing function declaration 'think'",
"kind": "quickfix",
"diagnostics": &diagnostics,
"edit": {
"documentChanges": [
{
"textDocument": {
"uri": temp_dir.url().join("file.ts").unwrap(),
"version": 1,
},
"edits": [
{
"range": {
"start": { "line": 1, "character": 0 },
"end": { "line": 1, "character": 0 },
},
"newText": "\nfunction think(arg0: { text: string; }) {\n throw new Error(\"Function not implemented.\");\n}\n",
},
],
},
],
},
},
]),
);
client.shutdown();
}
#[test]
fn lsp_completions_node_specifier() {
let context = TestContextBuilder::new().use_temp_cwd().build();
@ -8237,8 +8382,8 @@ fn lsp_infer_return_type() {
let context = TestContextBuilder::new().use_temp_cwd().build();
let temp_dir = context.temp_dir();
temp_dir.write("deno.json", json!({}).to_string());
let types_file = source_file(
temp_dir.path().join("types.d.ts"),
temp_dir.write(
"types.d.ts",
r#"
export interface SomeInterface {
someField: number;
@ -8319,7 +8464,7 @@ fn lsp_infer_return_type() {
"start": { "line": 1, "character": 20 },
"end": { "line": 1, "character": 20 },
},
"newText": format!(": import(\"{}\").SomeInterface", types_file.url()),
"newText": ": import(\"./types.d.ts\").SomeInterface",
},
],
},

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@
"name": "@denotest/node-addon",
"version": "1.0.0",
"scripts": {
"install": "node-gyp configure build"
"install": "node-gyp configure --verbose build"
},
"dependencies": {
"node-gyp": "10.1.0"

View file

@ -1,4 +1,4 @@
<div class="container">content</div>
<div class="container">content<br></div>
<script>
let counter = 0;

View file

@ -0,0 +1 @@
console.log("Hello World");

View file

@ -0,0 +1,4 @@
{
"args": "run --reload 001_hello.js",
"output": "001_hello.js.out"
}

View file

@ -0,0 +1 @@
console.log("Hello World");

View file

@ -0,0 +1,4 @@
{
"args": "run --quiet --reload 002_hello.ts",
"output": "002_hello.ts.out"
}

View file

@ -0,0 +1,3 @@
import { printHello } from "./print_hello.ts";
printHello();

View file

@ -0,0 +1,4 @@
{
"args": "run --quiet --reload 003_relative_import.ts",
"output": "003_relative_import.ts.out"
}

View file

@ -0,0 +1,3 @@
export function printHello() {
console.log("Hello");
}

View file

@ -0,0 +1,4 @@
{
"args": "run --quiet --reload 004_set_timeout.ts",
"output": "004_set_timeout.ts.out"
}

View file

@ -1,4 +1,4 @@
import { printHello3, returnsFoo2, returnsHi } from "../subdir/mod1.ts";
import { printHello3, returnsFoo2, returnsHi } from "./mod1.ts";
printHello3();

View file

@ -0,0 +1,4 @@
{
"args": "run --quiet --reload 005_more_imports.ts",
"output": "005_more_imports.ts.out"
}

View file

@ -0,0 +1,17 @@
import { printHello2, returnsFoo } from "./subdir2/mod2.ts";
export function returnsHi(): string {
return "Hi";
}
export function returnsFoo2(): string {
return returnsFoo();
}
export function printHello3() {
printHello2();
}
export function throwsError() {
throw Error("exception from mod1");
}

View file

@ -0,0 +1,3 @@
export function printHello() {
console.log("Hello");
}

View file

@ -0,0 +1,9 @@
import { printHello } from "../print_hello.ts";
export function returnsFoo(): string {
return "Foo";
}
export function printHello2() {
printHello();
}

View file

@ -0,0 +1,3 @@
import { printHello } from "http://localhost:4545/subdir/mod2.ts";
printHello();
console.log("success");

Some files were not shown because too many files have changed in this diff Show more