0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-08 07:16:56 -05:00

Merge branch 'main' into thread-local-permission-prompter

This commit is contained in:
Carlos Galarza 2024-12-03 08:38:56 -05:00 committed by GitHub
commit 7fcb4ade45
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
82 changed files with 2687 additions and 956 deletions

268
Cargo.lock generated
View file

@ -231,7 +231,7 @@ dependencies = [
"nom 7.1.3", "nom 7.1.3",
"num-traits", "num-traits",
"rusticata-macros", "rusticata-macros",
"thiserror", "thiserror 1.0.64",
"time", "time",
] ]
@ -1220,6 +1220,7 @@ dependencies = [
"deno_lint", "deno_lint",
"deno_lockfile", "deno_lockfile",
"deno_npm", "deno_npm",
"deno_npm_cache",
"deno_package_json", "deno_package_json",
"deno_path_util", "deno_path_util",
"deno_resolver", "deno_resolver",
@ -1292,7 +1293,7 @@ dependencies = [
"test_server", "test_server",
"text-size", "text-size",
"text_lines", "text_lines",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-util", "tokio-util",
"tracing", "tracing",
@ -1321,13 +1322,14 @@ dependencies = [
[[package]] [[package]]
name = "deno_ast" name = "deno_ast"
version = "0.43.3" version = "0.44.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48d00b724e06d2081a141ec1155756a0b465d413d8e2a7515221f61d482eb2ee" checksum = "eebc7aaabfdb3ddcad32aee1b62d250149dc8b35dfbdccbb125df2bdc62da952"
dependencies = [ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"deno_error",
"deno_media_type", "deno_media_type",
"deno_terminal 0.1.1", "deno_terminal 0.2.0",
"dprint-swc-ext", "dprint-swc-ext",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
@ -1358,7 +1360,7 @@ dependencies = [
"swc_visit", "swc_visit",
"swc_visit_macros", "swc_visit_macros",
"text_lines", "text_lines",
"thiserror", "thiserror 2.0.3",
"unicode-width", "unicode-width",
"url", "url",
] ]
@ -1378,7 +1380,7 @@ version = "0.174.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"uuid", "uuid",
] ]
@ -1392,15 +1394,15 @@ dependencies = [
"rusqlite", "rusqlite",
"serde", "serde",
"sha2", "sha2",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
] ]
[[package]] [[package]]
name = "deno_cache_dir" name = "deno_cache_dir"
version = "0.13.2" version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c1f52170cd7715f8006da54cde1444863a0d6fbd9c11d037a737db2dec8e22" checksum = "cca43605c8cbce6c6787e0daf227864487c07c2b31d438c0bf43d1b38da94b7f"
dependencies = [ dependencies = [
"base32", "base32",
"deno_media_type", "deno_media_type",
@ -1412,7 +1414,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"sha2", "sha2",
"thiserror", "thiserror 1.0.64",
"url", "url",
] ]
@ -1424,14 +1426,14 @@ dependencies = [
"deno_webgpu", "deno_webgpu",
"image", "image",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
name = "deno_config" name = "deno_config"
version = "0.39.2" version = "0.39.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38fb809500238be2b10eee42944a47b3ac38974e1edbb47f73afcfca7df143bf" checksum = "ce717af3fe6788dae63965d58d5637fd62be8fe4f345f189137ffc06c51837d2"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_package_json", "deno_package_json",
@ -1447,7 +1449,7 @@ dependencies = [
"phf", "phf",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"url", "url",
] ]
@ -1508,7 +1510,7 @@ dependencies = [
"chrono", "chrono",
"deno_core", "deno_core",
"saffron", "saffron",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
] ]
@ -1543,7 +1545,7 @@ dependencies = [
"sha2", "sha2",
"signature", "signature",
"spki", "spki",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"uuid", "uuid",
"x25519-dalek", "x25519-dalek",
@ -1551,9 +1553,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_doc" name = "deno_doc"
version = "0.161.1" version = "0.161.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32d994915f85e873865fc341e592080a487b0a987d06177016b2d93fd62162f8" checksum = "3af787319136f3e7f73ef551c618aeec70794522e36cd75ae35132a3bad983ef"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -1578,6 +1580,29 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "deno_error"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "199c66ffd17ee1a948904d33f3d3f364573951c1f9fb3f859bfe7770bf33862a"
dependencies = [
"deno_error_macro",
"libc",
"serde",
"serde_json",
]
[[package]]
name = "deno_error_macro"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cd99df6ae75443907e1f959fc42ec6dcea67a7bd083e76cf23a117102c9a2ce"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.87",
]
[[package]] [[package]]
name = "deno_fetch" name = "deno_fetch"
version = "0.204.0" version = "0.204.0"
@ -1602,7 +1627,7 @@ dependencies = [
"rustls-webpki", "rustls-webpki",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
"tokio-socks", "tokio-socks",
@ -1627,7 +1652,7 @@ dependencies = [
"serde", "serde",
"serde-value", "serde-value",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"winapi", "winapi",
] ]
@ -1650,16 +1675,16 @@ dependencies = [
"rand", "rand",
"rayon", "rayon",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"winapi", "winapi",
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
name = "deno_graph" name = "deno_graph"
version = "0.86.2" version = "0.86.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c3f4be49dad28e794ff4eeb2daaf7956c97f8557097ef6f9c3ff1292e0a5c28" checksum = "fc78ed0b4bbcb4197300f0d6e7d1edc2d2c5019cdb9dedba7ff229158441885b"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1679,7 +1704,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"sha2", "sha2",
"thiserror", "thiserror 2.0.3",
"twox-hash", "twox-hash",
"url", "url",
"wasm_dep_analyzer", "wasm_dep_analyzer",
@ -1719,7 +1744,7 @@ dependencies = [
"scopeguard", "scopeguard",
"serde", "serde",
"smallvec", "smallvec",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-util", "tokio-util",
] ]
@ -1773,15 +1798,15 @@ dependencies = [
"rand", "rand",
"rusqlite", "rusqlite",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"url", "url",
] ]
[[package]] [[package]]
name = "deno_lint" name = "deno_lint"
version = "0.68.0" version = "0.68.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb994e6d1b18223df0a756c7948143b35682941d615edffef60d5b38822f38ac" checksum = "ce713d564f76efd90535061113210bdc6b942ed6327b33eb1d5f76a5daf8e7a5"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_ast", "deno_ast",
@ -1797,14 +1822,14 @@ dependencies = [
[[package]] [[package]]
name = "deno_lockfile" name = "deno_lockfile"
version = "0.23.1" version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "579117d5815aa9bae0212637d6f4d5f45f9649bb2c8988dca434077545535039" checksum = "559c19feb00af0c34f0bd4a20e56e12463fafd5c5069d6005f3ce33008027eea"
dependencies = [ dependencies = [
"deno_semver", "deno_semver",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 2.0.3",
] ]
[[package]] [[package]]
@ -1829,7 +1854,7 @@ dependencies = [
"libuv-sys-lite", "libuv-sys-lite",
"log", "log",
"napi_sym", "napi_sym",
"thiserror", "thiserror 1.0.64",
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
@ -1859,7 +1884,7 @@ dependencies = [
"rustls-tokio-stream", "rustls-tokio-stream",
"serde", "serde",
"socket2", "socket2",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
] ]
@ -1943,7 +1968,7 @@ dependencies = [
"sm3", "sm3",
"spki", "spki",
"stable_deref_trait", "stable_deref_trait",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-eld", "tokio-eld",
"url", "url",
@ -1957,9 +1982,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_npm" name = "deno_npm"
version = "0.25.4" version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6b4dc4a9f1cff63d5638e7d93042f24f46300d1cc77b86f3caaa699a7ddccf7" checksum = "f2f125a5dba7839c46394a0a9c835da9fe60f5f412587ab4956a76492a1cc6a8"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1970,7 +1995,36 @@ dependencies = [
"monch", "monch",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 2.0.3",
"url",
]
[[package]]
name = "deno_npm_cache"
version = "0.0.1"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.7",
"boxed_error",
"deno_cache_dir",
"deno_core",
"deno_npm",
"deno_semver",
"deno_unsync",
"faster-hex",
"flate2",
"futures",
"http 1.1.0",
"log",
"parking_lot",
"percent-encoding",
"rand",
"ring",
"serde_json",
"tar",
"tempfile",
"thiserror 1.0.64",
"url", "url",
] ]
@ -1987,20 +2041,22 @@ dependencies = [
"strum", "strum",
"strum_macros", "strum_macros",
"syn 2.0.87", "syn 2.0.87",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
name = "deno_package_json" name = "deno_package_json"
version = "0.1.2" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6cbc4c4d3eb0960b58e8f43f9fc2d3f620fcac9a03cd85203e08db5b04e83c1f" checksum = "80b0a3d81c592624a1ae15332a04b4dc2b7c163ef1dfc7c60171f736d1babdf5"
dependencies = [ dependencies = [
"deno_error",
"deno_path_util",
"deno_semver", "deno_semver",
"indexmap 2.3.0", "indexmap 2.3.0",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 2.0.3",
"url", "url",
] ]
@ -2011,7 +2067,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff25f6e08e7a0214bbacdd6f7195c7f1ebcd850c87a624e4ff06326b68b42d99" checksum = "ff25f6e08e7a0214bbacdd6f7195c7f1ebcd850c87a624e4ff06326b68b42d99"
dependencies = [ dependencies = [
"percent-encoding", "percent-encoding",
"thiserror", "thiserror 1.0.64",
"url", "url",
] ]
@ -2028,7 +2084,7 @@ dependencies = [
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"which 4.4.2", "which 4.4.2",
"winapi", "winapi",
] ]
@ -2048,7 +2104,7 @@ dependencies = [
"deno_semver", "deno_semver",
"node_resolver", "node_resolver",
"test_server", "test_server",
"thiserror", "thiserror 1.0.64",
"url", "url",
] ]
@ -2111,7 +2167,7 @@ dependencies = [
"signal-hook-registry", "signal-hook-registry",
"tempfile", "tempfile",
"test_server", "test_server",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-metrics", "tokio-metrics",
"twox-hash", "twox-hash",
@ -2123,14 +2179,15 @@ dependencies = [
[[package]] [[package]]
name = "deno_semver" name = "deno_semver"
version = "0.5.16" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c957c6a57c38b7dde2315df0da0ec228911e56a74f185b108a488d0401841a67" checksum = "4756be7351289726087408984db18b9eb5e0186907673f39f858d119d0162071"
dependencies = [ dependencies = [
"deno_error",
"monch", "monch",
"once_cell", "once_cell",
"serde", "serde",
"thiserror", "thiserror 2.0.3",
"url", "url",
] ]
@ -2147,7 +2204,7 @@ dependencies = [
"nix", "nix",
"os_pipe", "os_pipe",
"path-dedot", "path-dedot",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
] ]
@ -2203,7 +2260,7 @@ dependencies = [
"rustls-tokio-stream", "rustls-tokio-stream",
"rustls-webpki", "rustls-webpki",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"webpki-roots", "webpki-roots",
] ]
@ -2233,10 +2290,11 @@ dependencies = [
[[package]] [[package]]
name = "deno_unsync" name = "deno_unsync"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f36b4ef61a04ce201b925a5dffa90f88437d37fee4836c758470dd15ba7f05e" checksum = "d774fd83f26b24f0805a6ab8b26834a0d06ceac0db517b769b1e4633c96a2057"
dependencies = [ dependencies = [
"futures",
"parking_lot", "parking_lot",
"tokio", "tokio",
] ]
@ -2249,7 +2307,7 @@ dependencies = [
"deno_console", "deno_console",
"deno_core", "deno_core",
"deno_webidl", "deno_webidl",
"thiserror", "thiserror 1.0.64",
"urlpattern", "urlpattern",
] ]
@ -2270,7 +2328,7 @@ dependencies = [
"flate2", "flate2",
"futures", "futures",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"uuid", "uuid",
] ]
@ -2282,7 +2340,7 @@ dependencies = [
"deno_core", "deno_core",
"raw-window-handle", "raw-window-handle",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"wgpu-core", "wgpu-core",
"wgpu-types", "wgpu-types",
@ -2314,7 +2372,7 @@ dependencies = [
"once_cell", "once_cell",
"rustls-tokio-stream", "rustls-tokio-stream",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
] ]
@ -2325,7 +2383,7 @@ dependencies = [
"deno_core", "deno_core",
"deno_web", "deno_web",
"rusqlite", "rusqlite",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
@ -2397,7 +2455,7 @@ dependencies = [
"rand", "rand",
"rusqlite", "rusqlite",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"uuid", "uuid",
@ -2690,9 +2748,9 @@ dependencies = [
[[package]] [[package]]
name = "dprint-plugin-typescript" name = "dprint-plugin-typescript"
version = "0.93.2" version = "0.93.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ff29fd136541e59d51946f0d2d353fefc886776f61a799ebfb5838b06cef13b" checksum = "5804d1809f6191a9261f423c41cd51a50e49567d61caa5a8f6224eea94ae0d12"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_ast", "deno_ast",
@ -2838,7 +2896,7 @@ dependencies = [
"debug-ignore", "debug-ignore",
"indexmap 2.3.0", "indexmap 2.3.0",
"log", "log",
"thiserror", "thiserror 1.0.64",
"zerocopy", "zerocopy",
] ]
@ -2998,7 +3056,7 @@ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
"log", "log",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
] ]
@ -3033,7 +3091,7 @@ dependencies = [
"rand", "rand",
"sha1", "sha1",
"simdutf8", "simdutf8",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"utf-8", "utf-8",
] ]
@ -3091,7 +3149,7 @@ dependencies = [
"deno_terminal 0.1.1", "deno_terminal 0.1.1",
"parking_lot", "parking_lot",
"regex", "regex",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
@ -3565,7 +3623,7 @@ dependencies = [
"pest_derive", "pest_derive",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
@ -3651,7 +3709,7 @@ dependencies = [
"once_cell", "once_cell",
"radix_trie", "radix_trie",
"rand", "rand",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tracing", "tracing",
] ]
@ -3674,7 +3732,7 @@ dependencies = [
"once_cell", "once_cell",
"rand", "rand",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"tinyvec", "tinyvec",
"tokio", "tokio",
"tracing", "tracing",
@ -3698,7 +3756,7 @@ dependencies = [
"resolv-conf", "resolv-conf",
"serde", "serde",
"smallvec", "smallvec",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tracing", "tracing",
] ]
@ -3716,7 +3774,7 @@ dependencies = [
"futures-util", "futures-util",
"hickory-proto", "hickory-proto",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"time", "time",
"tokio", "tokio",
"tokio-util", "tokio-util",
@ -4156,7 +4214,7 @@ dependencies = [
"percent-encoding", "percent-encoding",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"url", "url",
] ]
@ -4362,7 +4420,7 @@ dependencies = [
"anyhow", "anyhow",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"uuid", "uuid",
] ]
@ -4655,9 +4713,9 @@ dependencies = [
[[package]] [[package]]
name = "markup_fmt" name = "markup_fmt"
version = "0.16.0" version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f303c36143671ac6c54112eb5aa95649b169dae783fdb6ead2c0e88b408c425c" checksum = "fa7605bb4ad755a9ab5c96f2ce3bfd4eb8acd559b842c041fc8a5f84d63aed3a"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"css_dataset", "css_dataset",
@ -4812,7 +4870,7 @@ dependencies = [
"serde", "serde",
"spirv", "spirv",
"termcolor", "termcolor",
"thiserror", "thiserror 1.0.64",
"unicode-xid", "unicode-xid",
] ]
@ -4902,7 +4960,7 @@ dependencies = [
"path-clean", "path-clean",
"regex", "regex",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"url", "url",
] ]
@ -5099,7 +5157,7 @@ dependencies = [
"js-sys", "js-sys",
"once_cell", "once_cell",
"pin-project-lite", "pin-project-lite",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
@ -5129,7 +5187,7 @@ dependencies = [
"opentelemetry_sdk", "opentelemetry_sdk",
"prost", "prost",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tonic", "tonic",
"tracing", "tracing",
@ -5171,7 +5229,7 @@ dependencies = [
"percent-encoding", "percent-encoding",
"rand", "rand",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"tracing", "tracing",
] ]
@ -5355,7 +5413,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "311fb059dee1a7b802f036316d790138c613a4e8b180c822e3925a662e9f0c95" checksum = "311fb059dee1a7b802f036316d790138c613a4e8b180c822e3925a662e9f0c95"
dependencies = [ dependencies = [
"memchr", "memchr",
"thiserror", "thiserror 1.0.64",
"ucd-trie", "ucd-trie",
] ]
@ -5769,7 +5827,7 @@ dependencies = [
"indexmap 2.3.0", "indexmap 2.3.0",
"quick-xml", "quick-xml",
"strip-ansi-escapes", "strip-ansi-escapes",
"thiserror", "thiserror 1.0.64",
"uuid", "uuid",
] ]
@ -5794,7 +5852,7 @@ dependencies = [
"quinn-udp", "quinn-udp",
"rustc-hash 1.1.0", "rustc-hash 1.1.0",
"rustls", "rustls",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tracing", "tracing",
] ]
@ -5811,7 +5869,7 @@ dependencies = [
"rustc-hash 2.0.0", "rustc-hash 2.0.0",
"rustls", "rustls",
"slab", "slab",
"thiserror", "thiserror 1.0.64",
"tinyvec", "tinyvec",
"tracing", "tracing",
] ]
@ -5960,7 +6018,7 @@ checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
dependencies = [ dependencies = [
"getrandom", "getrandom",
"libredox", "libredox",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
@ -6584,7 +6642,7 @@ dependencies = [
"num-bigint", "num-bigint",
"serde", "serde",
"smallvec", "smallvec",
"thiserror", "thiserror 1.0.64",
"v8", "v8",
] ]
@ -7587,7 +7645,16 @@ version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl 1.0.64",
]
[[package]]
name = "thiserror"
version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa"
dependencies = [
"thiserror-impl 2.0.3",
] ]
[[package]] [[package]]
@ -7601,6 +7668,17 @@ dependencies = [
"syn 2.0.87", "syn 2.0.87",
] ]
[[package]]
name = "thiserror-impl"
version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.87",
]
[[package]] [[package]]
name = "thousands" name = "thousands"
version = "0.2.0" version = "0.2.0"
@ -7753,7 +7831,7 @@ checksum = "51165dfa029d2a65969413a6cc96f354b86b464498702f174a4efa13608fd8c0"
dependencies = [ dependencies = [
"either", "either",
"futures-util", "futures-util",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
] ]
@ -8170,7 +8248,7 @@ dependencies = [
"indexmap 2.3.0", "indexmap 2.3.0",
"num-bigint", "num-bigint",
"serde", "serde",
"thiserror", "thiserror 1.0.64",
"wtf8", "wtf8",
] ]
@ -8342,7 +8420,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f270206a91783fd90625c8bb0d8fbd459d0b1d1bf209b656f713f01ae7c04b8" checksum = "7f270206a91783fd90625c8bb0d8fbd459d0b1d1bf209b656f713f01ae7c04b8"
dependencies = [ dependencies = [
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]
@ -8396,7 +8474,7 @@ dependencies = [
"rustc-hash 1.1.0", "rustc-hash 1.1.0",
"serde", "serde",
"smallvec", "smallvec",
"thiserror", "thiserror 1.0.64",
"web-sys", "web-sys",
"wgpu-hal", "wgpu-hal",
"wgpu-types", "wgpu-types",
@ -8437,7 +8515,7 @@ dependencies = [
"raw-window-handle", "raw-window-handle",
"rustc-hash 1.1.0", "rustc-hash 1.1.0",
"smallvec", "smallvec",
"thiserror", "thiserror 1.0.64",
"wasm-bindgen", "wasm-bindgen",
"web-sys", "web-sys",
"wgpu-types", "wgpu-types",
@ -8503,7 +8581,7 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b2b1bf557d947847a30eb73f79aa6cdb3eaf3ce02f5e9599438f77896a62b3c" checksum = "5b2b1bf557d947847a30eb73f79aa6cdb3eaf3ce02f5e9599438f77896a62b3c"
dependencies = [ dependencies = [
"thiserror", "thiserror 1.0.64",
"windows", "windows",
] ]
@ -8785,7 +8863,7 @@ dependencies = [
"nom 7.1.3", "nom 7.1.3",
"oid-registry", "oid-registry",
"rusticata-macros", "rusticata-macros",
"thiserror", "thiserror 1.0.64",
"time", "time",
] ]
@ -8929,7 +9007,7 @@ dependencies = [
"parking_lot", "parking_lot",
"rand", "rand",
"regex", "regex",
"thiserror", "thiserror 1.0.64",
"tokio", "tokio",
"tokio-util", "tokio-util",
"uuid", "uuid",
@ -8970,7 +9048,7 @@ dependencies = [
"flate2", "flate2",
"indexmap 2.3.0", "indexmap 2.3.0",
"memchr", "memchr",
"thiserror", "thiserror 1.0.64",
] ]
[[package]] [[package]]

View file

@ -30,6 +30,7 @@ members = [
"ext/webstorage", "ext/webstorage",
"resolvers/deno", "resolvers/deno",
"resolvers/node", "resolvers/node",
"resolvers/npm_cache",
"runtime", "runtime",
"runtime/permissions", "runtime/permissions",
"tests", "tests",
@ -46,18 +47,18 @@ license = "MIT"
repository = "https://github.com/denoland/deno" repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.43.3", features = ["transpiling"] } deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.323.0" } deno_core = { version = "0.323.0" }
deno_bench_util = { version = "0.174.0", path = "./bench_util" } deno_bench_util = { version = "0.174.0", path = "./bench_util" }
deno_config = { version = "=0.39.2", features = ["workspace", "sync"] } deno_config = { version = "=0.39.3", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.1" deno_lockfile = "=0.23.2"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] } deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.25.4" deno_npm = "=0.26.0"
deno_path_util = "=0.2.1" deno_path_util = "=0.2.1"
deno_permissions = { version = "0.40.0", path = "./runtime/permissions" } deno_permissions = { version = "0.40.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.189.0", path = "./runtime" } deno_runtime = { version = "0.189.0", path = "./runtime" }
deno_semver = "=0.5.16" deno_semver = "=0.6.0"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.110.0", path = "./ext/napi/sym" } napi_sym = { version = "0.110.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
@ -93,6 +94,7 @@ deno_websocket = { version = "0.185.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.175.0", path = "./ext/webstorage" } deno_webstorage = { version = "0.175.0", path = "./ext/webstorage" }
# resolvers # resolvers
deno_npm_cache = { version = "0.0.1", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.12.0", path = "./resolvers/deno" } deno_resolver = { version = "0.12.0", path = "./resolvers/deno" }
node_resolver = { version = "0.19.0", path = "./resolvers/node" } node_resolver = { version = "0.19.0", path = "./resolvers/node" }
@ -115,8 +117,9 @@ console_static_text = "=0.8.1"
dashmap = "5.5.3" dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.0" data-url = "=0.3.0"
deno_cache_dir = "=0.13.2" deno_cache_dir = "=0.14.0"
deno_package_json = { version = "0.1.2", default-features = false } deno_package_json = { version = "0.2.1", default-features = false }
deno_unsync = "0.4.2"
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] } elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }

View file

@ -72,11 +72,12 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
deno_cache_dir.workspace = true deno_cache_dir.workspace = true
deno_config.workspace = true deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.161.1", features = ["rust", "comrak"] } deno_doc = { version = "=0.161.2", features = ["rust", "comrak"] }
deno_graph = { version = "=0.86.2" } deno_graph = { version = "=0.86.3" }
deno_lint = { version = "=0.68.0", features = ["docs"] } deno_lint = { version = "=0.68.2", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm.workspace = true deno_npm.workspace = true
deno_npm_cache.workspace = true
deno_package_json.workspace = true deno_package_json.workspace = true
deno_path_util.workspace = true deno_path_util.workspace = true
deno_resolver.workspace = true deno_resolver.workspace = true
@ -108,7 +109,7 @@ dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.4" dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.2" dprint-plugin-typescript = "=0.93.3"
env_logger = "=0.10.0" env_logger = "=0.10.0"
fancy-regex = "=0.10.0" fancy-regex = "=0.10.0"
faster-hex.workspace = true faster-hex.workspace = true
@ -130,7 +131,7 @@ libz-sys.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true
malva = "=0.11.0" malva = "=0.11.0"
markup_fmt = "=0.16.0" markup_fmt = "=0.18.0"
memmem.workspace = true memmem.workspace = true
monch.workspace = true monch.workspace = true
notify.workspace = true notify.workspace = true

View file

@ -18,12 +18,10 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
fn read_to_string_lossy( fn read_to_string_lossy(
&self, &self,
path: &std::path::Path, path: &std::path::Path,
) -> Result<String, std::io::Error> { ) -> Result<std::borrow::Cow<'static, str>, std::io::Error> {
self self
.0 .0
.read_text_file_lossy_sync(path, None) .read_text_file_lossy_sync(path, None)
// todo(https://github.com/denoland/deno_config/pull/140): avoid clone
.map(|s| s.into_owned())
.map_err(|err| err.into_io_error()) .map_err(|err| err.into_io_error())
} }

View file

@ -109,9 +109,12 @@ impl CliLockfile {
let Some(pkg_json) = maybe_pkg_json else { let Some(pkg_json) = maybe_pkg_json else {
return Default::default(); return Default::default();
}; };
pkg_json let deps = pkg_json.resolve_local_package_json_deps();
.resolve_local_package_json_deps()
deps
.dependencies
.values() .values()
.chain(deps.dev_dependencies.values())
.filter_map(|dep| dep.as_ref().ok()) .filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep { .filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => { PackageJsonDepValue::Req(req) => {

View file

@ -27,6 +27,7 @@ use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCacheSetting;
use deno_path_util::normalize_path; use deno_path_util::normalize_path;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_telemetry::OtelConfig; use deno_telemetry::OtelConfig;
@ -238,20 +239,25 @@ pub enum CacheSetting {
} }
impl CacheSetting { impl CacheSetting {
pub fn should_use_for_npm_package(&self, package_name: &str) -> bool { pub fn as_npm_cache_setting(&self) -> NpmCacheSetting {
match self { match self {
CacheSetting::ReloadAll => false, CacheSetting::Only => NpmCacheSetting::Only,
CacheSetting::ReloadSome(list) => { CacheSetting::ReloadAll => NpmCacheSetting::ReloadAll,
if list.iter().any(|i| i == "npm:") { CacheSetting::ReloadSome(values) => {
return false; if values.iter().any(|v| v == "npm:") {
NpmCacheSetting::ReloadAll
} else {
NpmCacheSetting::ReloadSome {
npm_package_names: values
.iter()
.filter_map(|v| v.strip_prefix("npm:"))
.map(|n| n.to_string())
.collect(),
}
} }
let specifier = format!("npm:{package_name}");
if list.contains(&specifier) {
return false;
}
true
} }
_ => true, CacheSetting::RespectHeaders => unreachable!(), // not supported
CacheSetting::Use => NpmCacheSetting::Use,
} }
} }
} }

View file

@ -8,8 +8,10 @@ use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError; use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::VersionReq;
use thiserror::Error; use thiserror::Error;
#[derive(Debug)] #[derive(Debug)]
@ -95,8 +97,14 @@ impl NpmInstallDepsProvider {
if let Some(pkg_json) = &folder.pkg_json { if let Some(pkg_json) = &folder.pkg_json {
let deps = pkg_json.resolve_local_package_json_deps(); let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_pkgs = Vec::with_capacity(deps.len()); let mut pkg_pkgs = Vec::with_capacity(
for (alias, dep) in deps { deps.dependencies.len() + deps.dev_dependencies.len(),
);
for (alias, dep) in deps
.dependencies
.into_iter()
.chain(deps.dev_dependencies.into_iter())
{
let dep = match dep { let dep = match dep {
Ok(dep) => dep, Ok(dep) => dep,
Err(err) => { Err(err) => {
@ -131,7 +139,16 @@ impl NpmInstallDepsProvider {
}); });
} }
} }
PackageJsonDepValue::Workspace(version_req) => { PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
version_req
}
PackageJsonDepWorkspaceReq::Tilde
| PackageJsonDepWorkspaceReq::Caret => {
VersionReq::parse_from_npm("*").unwrap()
}
};
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| { if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_name_and_version_req(&alias, &version_req) pkg.matches_name_and_version_req(&alias, &version_req)
}) { }) {

15
cli/cache/mod.rs vendored
View file

@ -23,6 +23,7 @@ use deno_graph::source::Loader;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker; use node_resolver::InNpmPackageChecker;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -67,8 +68,11 @@ pub const CACHE_PERM: u32 = 0o644;
pub struct RealDenoCacheEnv; pub struct RealDenoCacheEnv;
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv { impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> { fn read_file_bytes(
std::fs::read(path) &self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
std::fs::read(path).map(Cow::Owned)
} }
fn atomic_write_file( fn atomic_write_file(
@ -112,12 +116,13 @@ pub struct DenoCacheEnvFsAdapter<'a>(
); );
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> { impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> { fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
self self
.0 .0
.read_file_sync(path, None) .read_file_sync(path, None)
// todo(https://github.com/denoland/deno_cache_dir/pull/66): avoid clone
.map(|bytes| bytes.into_owned())
.map_err(|err| err.into_io_error()) .map_err(|err| err.into_io_error())
} }

View file

@ -504,7 +504,12 @@ impl CliFactory {
let resolver = cli_options let resolver = cli_options
.create_workspace_resolver( .create_workspace_resolver(
self.file_fetcher()?, self.file_fetcher()?,
if cli_options.use_byonm() { if cli_options.use_byonm()
&& !matches!(
cli_options.sub_command(),
DenoSubcommand::Publish(_)
)
{
PackageJsonDepResolution::Disabled PackageJsonDepResolution::Disabled
} else { } else {
// todo(dsherret): this should be false for nodeModulesDir: true // todo(dsherret): this should be false for nodeModulesDir: true

View file

@ -1540,7 +1540,7 @@ mod tests {
.unwrap() .unwrap()
.unwrap() .unwrap()
.content; .content;
String::from_utf8(bytes).unwrap() String::from_utf8(bytes.into_owned()).unwrap()
} }
#[track_caller] #[track_caller]

View file

@ -41,6 +41,7 @@ use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet; use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
@ -2092,7 +2093,7 @@ impl<T: Clone> CachedFsItems<T> {
#[derive(Default)] #[derive(Default)]
struct InnerData { struct InnerData {
stat_calls: CachedFsItems<deno_config::fs::FsMetadata>, stat_calls: CachedFsItems<deno_config::fs::FsMetadata>,
read_to_string_calls: CachedFsItems<String>, read_to_string_calls: CachedFsItems<Cow<'static, str>>,
} }
#[derive(Default)] #[derive(Default)]
@ -2113,7 +2114,7 @@ impl DenoConfigFs for CachedDenoConfigFs {
fn read_to_string_lossy( fn read_to_string_lossy(
&self, &self,
path: &Path, path: &Path,
) -> Result<String, std::io::Error> { ) -> Result<Cow<'static, str>, std::io::Error> {
self self
.0 .0
.lock() .lock()

View file

@ -925,7 +925,7 @@ impl FileSystemDocuments {
let content = bytes_to_content( let content = bytes_to_content(
specifier, specifier,
media_type, media_type,
cached_file.content, cached_file.content.into_owned(),
maybe_charset, maybe_charset,
) )
.ok()?; .ok()?;

View file

@ -262,7 +262,7 @@ fn read_cached_url(
cache cache
.get(&cache.cache_item_key(url).ok()?, None) .get(&cache.cache_item_key(url).ok()?, None)
.ok()? .ok()?
.map(|f| f.content) .map(|f| f.content.into_owned())
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -5,8 +5,6 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use cache::RegistryInfoDownloader;
use cache::TarballCache;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
@ -42,22 +40,23 @@ use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind; use crate::args::NpmProcessStateKind;
use crate::args::PackageJsonDepValueParseWithLocationError; use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::sync::AtomicFlag; use crate::util::sync::AtomicFlag;
use self::cache::NpmCache;
use self::registry::CliNpmRegistryApi; use self::registry::CliNpmRegistryApi;
use self::resolution::NpmResolution; use self::resolution::NpmResolution;
use self::resolvers::create_npm_fs_resolver; use self::resolvers::create_npm_fs_resolver;
use self::resolvers::NpmPackageFsResolver; use self::resolvers::NpmPackageFsResolver;
use super::CliNpmCache;
use super::CliNpmCacheEnv;
use super::CliNpmRegistryInfoProvider;
use super::CliNpmResolver; use super::CliNpmResolver;
use super::CliNpmTarballCache;
use super::InnerCliNpmResolverRef; use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError; use super::ResolvePkgFolderFromDenoReqError;
pub mod cache;
mod registry; mod registry;
mod resolution; mod resolution;
mod resolvers; mod resolvers;
@ -85,8 +84,9 @@ pub struct CliManagedNpmResolverCreateOptions {
pub async fn create_managed_npm_resolver_for_lsp( pub async fn create_managed_npm_resolver_for_lsp(
options: CliManagedNpmResolverCreateOptions, options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> { ) -> Arc<dyn CliNpmResolver> {
let npm_cache = create_cache(&options); let cache_env = create_cache_env(&options);
let npm_api = create_api(&options, npm_cache.clone()); let npm_cache = create_cache(cache_env.clone(), &options);
let npm_api = create_api(npm_cache.clone(), cache_env.clone(), &options);
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
deno_core::unsync::spawn(async move { deno_core::unsync::spawn(async move {
let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await { let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await {
@ -97,8 +97,8 @@ pub async fn create_managed_npm_resolver_for_lsp(
} }
}; };
create_inner( create_inner(
cache_env,
options.fs, options.fs,
options.http_client_provider,
options.maybe_lockfile, options.maybe_lockfile,
npm_api, npm_api,
npm_cache, npm_cache,
@ -118,12 +118,13 @@ pub async fn create_managed_npm_resolver_for_lsp(
pub async fn create_managed_npm_resolver( pub async fn create_managed_npm_resolver(
options: CliManagedNpmResolverCreateOptions, options: CliManagedNpmResolverCreateOptions,
) -> Result<Arc<dyn CliNpmResolver>, AnyError> { ) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
let npm_cache = create_cache(&options); let npm_cache_env = create_cache_env(&options);
let npm_api = create_api(&options, npm_cache.clone()); let npm_cache = create_cache(npm_cache_env.clone(), &options);
let npm_api = create_api(npm_cache.clone(), npm_cache_env.clone(), &options);
let snapshot = resolve_snapshot(&npm_api, options.snapshot).await?; let snapshot = resolve_snapshot(&npm_api, options.snapshot).await?;
Ok(create_inner( Ok(create_inner(
npm_cache_env,
options.fs, options.fs,
options.http_client_provider,
options.maybe_lockfile, options.maybe_lockfile,
npm_api, npm_api,
npm_cache, npm_cache,
@ -139,11 +140,11 @@ pub async fn create_managed_npm_resolver(
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn create_inner( fn create_inner(
env: Arc<CliNpmCacheEnv>,
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>, fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
http_client_provider: Arc<HttpClientProvider>,
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>, npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<CliNpmCache>,
npm_rc: Arc<ResolvedNpmRc>, npm_rc: Arc<ResolvedNpmRc>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
text_only_progress_bar: crate::util::progress_bar::ProgressBar, text_only_progress_bar: crate::util::progress_bar::ProgressBar,
@ -157,12 +158,10 @@ fn create_inner(
snapshot, snapshot,
maybe_lockfile.clone(), maybe_lockfile.clone(),
)); ));
let tarball_cache = Arc::new(TarballCache::new( let tarball_cache = Arc::new(CliNpmTarballCache::new(
npm_cache.clone(), npm_cache.clone(),
fs.clone(), env,
http_client_provider.clone(),
npm_rc.clone(), npm_rc.clone(),
text_only_progress_bar.clone(),
)); ));
let fs_resolver = create_npm_fs_resolver( let fs_resolver = create_npm_fs_resolver(
fs.clone(), fs.clone(),
@ -190,25 +189,39 @@ fn create_inner(
)) ))
} }
fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc<NpmCache> { fn create_cache_env(
Arc::new(NpmCache::new( options: &CliManagedNpmResolverCreateOptions,
) -> Arc<CliNpmCacheEnv> {
Arc::new(CliNpmCacheEnv::new(
options.fs.clone(),
options.http_client_provider.clone(),
options.text_only_progress_bar.clone(),
))
}
fn create_cache(
env: Arc<CliNpmCacheEnv>,
options: &CliManagedNpmResolverCreateOptions,
) -> Arc<CliNpmCache> {
Arc::new(CliNpmCache::new(
options.npm_cache_dir.clone(), options.npm_cache_dir.clone(),
options.cache_setting.clone(), options.cache_setting.as_npm_cache_setting(),
env,
options.npmrc.clone(), options.npmrc.clone(),
)) ))
} }
fn create_api( fn create_api(
cache: Arc<CliNpmCache>,
env: Arc<CliNpmCacheEnv>,
options: &CliManagedNpmResolverCreateOptions, options: &CliManagedNpmResolverCreateOptions,
npm_cache: Arc<NpmCache>,
) -> Arc<CliNpmRegistryApi> { ) -> Arc<CliNpmRegistryApi> {
Arc::new(CliNpmRegistryApi::new( Arc::new(CliNpmRegistryApi::new(
npm_cache.clone(), cache.clone(),
Arc::new(RegistryInfoDownloader::new( Arc::new(CliNpmRegistryInfoProvider::new(
npm_cache, cache,
options.http_client_provider.clone(), env,
options.npmrc.clone(), options.npmrc.clone(),
options.text_only_progress_bar.clone(),
)), )),
)) ))
} }
@ -292,10 +305,10 @@ pub struct ManagedCliNpmResolver {
fs_resolver: Arc<dyn NpmPackageFsResolver>, fs_resolver: Arc<dyn NpmPackageFsResolver>,
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>, npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
text_only_progress_bar: ProgressBar, text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo, npm_system_info: NpmSystemInfo,
top_level_install_flag: AtomicFlag, top_level_install_flag: AtomicFlag,
@ -317,10 +330,10 @@ impl ManagedCliNpmResolver {
fs_resolver: Arc<dyn NpmPackageFsResolver>, fs_resolver: Arc<dyn NpmPackageFsResolver>,
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>, npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
text_only_progress_bar: ProgressBar, text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo, npm_system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig, lifecycle_scripts: LifecycleScriptsConfig,

View file

@ -14,27 +14,28 @@ use deno_core::parking_lot::Mutex;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi; use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError; use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm_cache::NpmCacheSetting;
use crate::args::CacheSetting; use crate::npm::CliNpmCache;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::util::sync::AtomicFlag; use crate::util::sync::AtomicFlag;
use super::cache::NpmCache; // todo(#27198): Remove this and move functionality down into
use super::cache::RegistryInfoDownloader; // RegistryInfoProvider, which already does most of this.
#[derive(Debug)] #[derive(Debug)]
pub struct CliNpmRegistryApi(Option<Arc<CliNpmRegistryApiInner>>); pub struct CliNpmRegistryApi(Option<Arc<CliNpmRegistryApiInner>>);
impl CliNpmRegistryApi { impl CliNpmRegistryApi {
pub fn new( pub fn new(
cache: Arc<NpmCache>, cache: Arc<CliNpmCache>,
registry_info_downloader: Arc<RegistryInfoDownloader>, registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
) -> Self { ) -> Self {
Self(Some(Arc::new(CliNpmRegistryApiInner { Self(Some(Arc::new(CliNpmRegistryApiInner {
cache, cache,
force_reload_flag: Default::default(), force_reload_flag: Default::default(),
mem_cache: Default::default(), mem_cache: Default::default(),
previously_reloaded_packages: Default::default(), previously_reloaded_packages: Default::default(),
registry_info_downloader, registry_info_provider,
}))) })))
} }
@ -83,11 +84,11 @@ enum CacheItem {
#[derive(Debug)] #[derive(Debug)]
struct CliNpmRegistryApiInner { struct CliNpmRegistryApiInner {
cache: Arc<NpmCache>, cache: Arc<CliNpmCache>,
force_reload_flag: AtomicFlag, force_reload_flag: AtomicFlag,
mem_cache: Mutex<HashMap<String, CacheItem>>, mem_cache: Mutex<HashMap<String, CacheItem>>,
previously_reloaded_packages: Mutex<HashSet<String>>, previously_reloaded_packages: Mutex<HashSet<String>>,
registry_info_downloader: Arc<RegistryInfoDownloader>, registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
} }
impl CliNpmRegistryApiInner { impl CliNpmRegistryApiInner {
@ -118,7 +119,7 @@ impl CliNpmRegistryApiInner {
return Ok(result); return Ok(result);
} }
} }
api.registry_info_downloader api.registry_info_provider
.load_package_info(&name) .load_package_info(&name)
.await .await
.map_err(Arc::new) .map_err(Arc::new)
@ -159,7 +160,7 @@ impl CliNpmRegistryApiInner {
// is disabled or if we're already reloading // is disabled or if we're already reloading
if matches!( if matches!(
self.cache.cache_setting(), self.cache.cache_setting(),
CacheSetting::Only | CacheSetting::ReloadAll NpmCacheSetting::Only | NpmCacheSetting::ReloadAll
) { ) {
return false; return false;
} }

View file

@ -8,11 +8,10 @@ use deno_core::error::AnyError;
use deno_lockfile::NpmPackageDependencyLockfileInfo; use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo; use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::registry::NpmRegistryApi; use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::AddPkgReqsOptions;
use deno_npm::resolution::NpmPackagesPartitioned; use deno_npm::resolution::NpmPackagesPartitioned;
use deno_npm::resolution::NpmResolutionError; use deno_npm::resolution::NpmResolutionError;
use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::NpmResolutionSnapshotPendingResolver;
use deno_npm::resolution::NpmResolutionSnapshotPendingResolverOptions;
use deno_npm::resolution::PackageCacheFolderIdNotFoundError; use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
use deno_npm::resolution::PackageNotFoundFromReferrerError; use deno_npm::resolution::PackageNotFoundFromReferrerError;
use deno_npm::resolution::PackageNvNotFoundError; use deno_npm::resolution::PackageNvNotFoundError;
@ -283,8 +282,9 @@ async fn add_package_reqs_to_snapshot(
/* this string is used in tests */ /* this string is used in tests */
"Running npm resolution." "Running npm resolution."
); );
let pending_resolver = get_npm_pending_resolver(api); let result = snapshot
let result = pending_resolver.add_pkg_reqs(snapshot, package_reqs).await; .add_pkg_reqs(api, get_add_pkg_reqs_options(package_reqs))
.await;
api.clear_memory_cache(); api.clear_memory_cache();
let result = match &result.dep_graph_result { let result = match &result.dep_graph_result {
Err(NpmResolutionError::Resolution(err)) if api.mark_force_reload() => { Err(NpmResolutionError::Resolution(err)) if api.mark_force_reload() => {
@ -293,7 +293,9 @@ async fn add_package_reqs_to_snapshot(
// try again // try again
let snapshot = get_new_snapshot(); let snapshot = get_new_snapshot();
let result = pending_resolver.add_pkg_reqs(snapshot, package_reqs).await; let result = snapshot
.add_pkg_reqs(api, get_add_pkg_reqs_options(package_reqs))
.await;
api.clear_memory_cache(); api.clear_memory_cache();
result result
} }
@ -309,19 +311,15 @@ async fn add_package_reqs_to_snapshot(
result result
} }
fn get_npm_pending_resolver( fn get_add_pkg_reqs_options(package_reqs: &[PackageReq]) -> AddPkgReqsOptions {
api: &CliNpmRegistryApi, AddPkgReqsOptions {
) -> NpmResolutionSnapshotPendingResolver<CliNpmRegistryApi> { package_reqs,
NpmResolutionSnapshotPendingResolver::new( // WARNING: When bumping this version, check if anything needs to be
NpmResolutionSnapshotPendingResolverOptions { // updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
api, types_node_version_req: Some(
// WARNING: When bumping this version, check if anything needs to be VersionReq::parse_from_npm("22.0.0 - 22.5.4").unwrap(),
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js ),
types_node_version_req: Some( }
VersionReq::parse_from_npm("22.0.0 - 22.5.4").unwrap(),
),
},
)
} }
fn populate_lockfile_from_snapshot( fn populate_lockfile_from_snapshot(

View file

@ -24,7 +24,7 @@ use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveError;
use crate::npm::managed::cache::TarballCache; use crate::npm::CliNpmTarballCache;
/// Part of the resolution that interacts with the file system. /// Part of the resolution that interacts with the file system.
#[async_trait(?Send)] #[async_trait(?Send)]
@ -140,7 +140,7 @@ impl RegistryReadPermissionChecker {
/// Caches all the packages in parallel. /// Caches all the packages in parallel.
pub async fn cache_packages( pub async fn cache_packages(
packages: &[NpmResolutionPackage], packages: &[NpmResolutionPackage],
tarball_cache: &Arc<TarballCache>, tarball_cache: &Arc<CliNpmTarballCache>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let mut futures_unordered = futures::stream::FuturesUnordered::new(); let mut futures_unordered = futures::stream::FuturesUnordered::new();
for package in packages { for package in packages {

View file

@ -8,6 +8,8 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use crate::colors; use crate::colors;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use async_trait::async_trait; use async_trait::async_trait;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -24,8 +26,6 @@ use node_resolver::errors::ReferrerNotFoundError;
use crate::args::LifecycleScriptsConfig; use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution; use super::super::resolution::NpmResolution;
use super::common::cache_packages; use super::common::cache_packages;
use super::common::lifecycle_scripts::LifecycleScriptsStrategy; use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
@ -35,8 +35,8 @@ use super::common::RegistryReadPermissionChecker;
/// Resolves packages from the global npm cache. /// Resolves packages from the global npm cache.
#[derive(Debug)] #[derive(Debug)]
pub struct GlobalNpmPackageResolver { pub struct GlobalNpmPackageResolver {
cache: Arc<NpmCache>, cache: Arc<CliNpmCache>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker, registry_read_permission_checker: RegistryReadPermissionChecker,
@ -45,9 +45,9 @@ pub struct GlobalNpmPackageResolver {
impl GlobalNpmPackageResolver { impl GlobalNpmPackageResolver {
pub fn new( pub fn new(
cache: Arc<NpmCache>, cache: Arc<CliNpmCache>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig, lifecycle_scripts: LifecycleScriptsConfig,

View file

@ -17,6 +17,8 @@ use std::sync::Arc;
use crate::args::LifecycleScriptsConfig; use crate::args::LifecycleScriptsConfig;
use crate::colors; use crate::colors;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use async_trait::async_trait; use async_trait::async_trait;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::mixed_case_package_name_decode; use deno_cache_dir::npm::mixed_case_package_name_decode;
@ -52,8 +54,6 @@ use crate::util::fs::LaxSingleProcessFsFlag;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressMessagePrompt; use crate::util::progress_bar::ProgressMessagePrompt;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution; use super::super::resolution::NpmResolution;
use super::common::bin_entries; use super::common::bin_entries;
use super::common::NpmPackageFsResolver; use super::common::NpmPackageFsResolver;
@ -63,12 +63,12 @@ use super::common::RegistryReadPermissionChecker;
/// and resolves packages from it. /// and resolves packages from it.
#[derive(Debug)] #[derive(Debug)]
pub struct LocalNpmPackageResolver { pub struct LocalNpmPackageResolver {
cache: Arc<NpmCache>, cache: Arc<CliNpmCache>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
progress_bar: ProgressBar, progress_bar: ProgressBar,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
root_node_modules_path: PathBuf, root_node_modules_path: PathBuf,
root_node_modules_url: Url, root_node_modules_url: Url,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
@ -79,12 +79,12 @@ pub struct LocalNpmPackageResolver {
impl LocalNpmPackageResolver { impl LocalNpmPackageResolver {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
cache: Arc<NpmCache>, cache: Arc<CliNpmCache>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
progress_bar: ProgressBar, progress_bar: ProgressBar,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
node_modules_folder: PathBuf, node_modules_folder: PathBuf,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig, lifecycle_scripts: LifecycleScriptsConfig,
@ -284,10 +284,10 @@ fn local_node_modules_package_contents_path(
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
async fn sync_resolution_with_fs( async fn sync_resolution_with_fs(
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
cache: &Arc<NpmCache>, cache: &Arc<CliNpmCache>,
npm_install_deps_provider: &NpmInstallDepsProvider, npm_install_deps_provider: &NpmInstallDepsProvider,
progress_bar: &ProgressBar, progress_bar: &ProgressBar,
tarball_cache: &Arc<TarballCache>, tarball_cache: &Arc<CliNpmTarballCache>,
root_node_modules_dir_path: &Path, root_node_modules_dir_path: &Path,
system_info: &NpmSystemInfo, system_info: &NpmSystemInfo,
lifecycle_scripts: &LifecycleScriptsConfig, lifecycle_scripts: &LifecycleScriptsConfig,

View file

@ -12,6 +12,8 @@ use deno_runtime::deno_fs::FileSystem;
use crate::args::LifecycleScriptsConfig; use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
pub use self::common::NpmPackageFsResolver; pub use self::common::NpmPackageFsResolver;
@ -19,18 +21,16 @@ pub use self::common::NpmPackageFsResolver;
use self::global::GlobalNpmPackageResolver; use self::global::GlobalNpmPackageResolver;
use self::local::LocalNpmPackageResolver; use self::local::LocalNpmPackageResolver;
use super::cache::NpmCache;
use super::cache::TarballCache;
use super::resolution::NpmResolution; use super::resolution::NpmResolution;
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn create_npm_fs_resolver( pub fn create_npm_fs_resolver(
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: &Arc<NpmInstallDepsProvider>, npm_install_deps_provider: &Arc<NpmInstallDepsProvider>,
progress_bar: &ProgressBar, progress_bar: &ProgressBar,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<CliNpmTarballCache>,
maybe_node_modules_path: Option<PathBuf>, maybe_node_modules_path: Option<PathBuf>,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig, lifecycle_scripts: LifecycleScriptsConfig,

View file

@ -1,33 +1,39 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
mod byonm; mod byonm;
mod common;
mod managed; mod managed;
use std::borrow::Cow; use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use common::maybe_auth_header_for_npm_registry;
use dashmap::DashMap; use dashmap::DashMap;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmInNpmPackageChecker; use deno_resolver::npm::ByonmInNpmPackageChecker;
use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::CliNpmReqResolver; use deno_resolver::npm::CliNpmReqResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use managed::cache::registry_info::get_package_url; use http::HeaderName;
use http::HeaderValue;
use managed::create_managed_in_npm_pkg_checker; use managed::create_managed_in_npm_pkg_checker;
use node_resolver::InNpmPackageChecker; use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver; use node_resolver::NpmPackageFolderResolver;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::hard_link_dir_recursive;
use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::progress_bar::ProgressBar;
pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions; pub use self::byonm::CliByonmNpmResolverCreateOptions;
@ -36,6 +42,99 @@ pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver; pub use self::managed::ManagedCliNpmResolver;
pub type CliNpmTarballCache = deno_npm_cache::TarballCache<CliNpmCacheEnv>;
pub type CliNpmCache = deno_npm_cache::NpmCache<CliNpmCacheEnv>;
pub type CliNpmRegistryInfoProvider =
deno_npm_cache::RegistryInfoProvider<CliNpmCacheEnv>;
#[derive(Debug)]
pub struct CliNpmCacheEnv {
fs: Arc<dyn FileSystem>,
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
}
impl CliNpmCacheEnv {
pub fn new(
fs: Arc<dyn FileSystem>,
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
) -> Self {
Self {
fs,
http_client_provider,
progress_bar,
}
}
}
#[async_trait::async_trait(?Send)]
impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv {
fn exists(&self, path: &Path) -> bool {
self.fs.exists_sync(path)
}
fn hard_link_dir_recursive(
&self,
from: &Path,
to: &Path,
) -> Result<(), AnyError> {
// todo(dsherret): use self.fs here instead
hard_link_dir_recursive(from, to)
}
fn atomic_write_file_with_retries(
&self,
file_path: &Path,
data: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries_and_fs(
&AtomicWriteFileFsAdapter {
fs: self.fs.as_ref(),
write_mode: crate::cache::CACHE_PERM,
},
file_path,
data,
)
}
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
maybe_auth_header: Option<(HeaderName, HeaderValue)>,
) -> Result<Option<Vec<u8>>, deno_npm_cache::DownloadError> {
let guard = self.progress_bar.update(url.as_str());
let client = self.http_client_provider.get_or_create().map_err(|err| {
deno_npm_cache::DownloadError {
status_code: None,
error: err,
}
})?;
client
.download_with_progress_and_retries(url, maybe_auth_header, &guard)
.await
.map_err(|err| {
use crate::http_util::DownloadError::*;
let status_code = match &err {
Fetch { .. }
| UrlParse { .. }
| HttpParse { .. }
| Json { .. }
| ToStr { .. }
| NoRedirectHeader { .. }
| TooManyRedirects => None,
BadResponse(bad_response_error) => {
Some(bad_response_error.status_code)
}
};
deno_npm_cache::DownloadError {
status_code,
error: err.into(),
}
})
}
}
pub enum CliNpmResolverCreateOptions { pub enum CliNpmResolverCreateOptions {
Managed(CliManagedNpmResolverCreateOptions), Managed(CliManagedNpmResolverCreateOptions),
Byonm(CliByonmNpmResolverCreateOptions), Byonm(CliByonmNpmResolverCreateOptions),
@ -179,13 +278,15 @@ impl NpmFetchResolver {
if let Some(info) = self.info_by_name.get(name) { if let Some(info) = self.info_by_name.get(name) {
return info.value().clone(); return info.value().clone();
} }
// todo(#27198): use RegistryInfoProvider instead
let fetch_package_info = || async { let fetch_package_info = || async {
let info_url = get_package_url(&self.npmrc, name); let info_url = deno_npm_cache::get_package_url(&self.npmrc, name);
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let registry_config = self.npmrc.get_registry_config(name); let registry_config = self.npmrc.get_registry_config(name);
// TODO(bartlomieju): this should error out, not use `.ok()`. // TODO(bartlomieju): this should error out, not use `.ok()`.
let maybe_auth_header = let maybe_auth_header =
maybe_auth_header_for_npm_registry(registry_config).ok()?; deno_npm_cache::maybe_auth_header_for_npm_registry(registry_config)
.ok()?;
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher

View file

@ -440,8 +440,10 @@ pub fn format_html(
) )
} }
_ => { _ => {
let mut typescript_config = let mut typescript_config_builder =
get_resolved_typescript_config(fmt_options); get_typescript_config_builder(fmt_options);
typescript_config_builder.file_indent_level(hints.indent_level);
let mut typescript_config = typescript_config_builder.build();
typescript_config.line_width = hints.print_width as u32; typescript_config.line_width = hints.print_width as u32;
dprint_plugin_typescript::format_text( dprint_plugin_typescript::format_text(
&path, &path,
@ -919,9 +921,9 @@ fn files_str(len: usize) -> &'static str {
} }
} }
fn get_resolved_typescript_config( fn get_typescript_config_builder(
options: &FmtOptionsConfig, options: &FmtOptionsConfig,
) -> dprint_plugin_typescript::configuration::Configuration { ) -> dprint_plugin_typescript::configuration::ConfigurationBuilder {
let mut builder = let mut builder =
dprint_plugin_typescript::configuration::ConfigurationBuilder::new(); dprint_plugin_typescript::configuration::ConfigurationBuilder::new();
builder.deno(); builder.deno();
@ -953,7 +955,13 @@ fn get_resolved_typescript_config(
}); });
} }
builder.build() builder
}
fn get_resolved_typescript_config(
options: &FmtOptionsConfig,
) -> dprint_plugin_typescript::configuration::Configuration {
get_typescript_config_builder(options).build()
} }
fn get_resolved_markdown_config( fn get_resolved_markdown_config(
@ -1075,6 +1083,7 @@ fn get_resolved_markup_fmt_config(
}; };
let language_options = LanguageOptions { let language_options = LanguageOptions {
script_formatter: Some(markup_fmt::config::ScriptFormatter::Dprint),
quotes: Quotes::Double, quotes: Quotes::Double,
format_comments: false, format_comments: false,
script_indent: true, script_indent: true,

View file

@ -169,7 +169,7 @@ impl Diagnostic for PublishDiagnostic {
.. ..
}) => DiagnosticLevel::Warning, }) => DiagnosticLevel::Warning,
FastCheck(_) => DiagnosticLevel::Error, FastCheck(_) => DiagnosticLevel::Error,
SpecifierUnfurl(_) => DiagnosticLevel::Warning, SpecifierUnfurl(d) => d.level(),
InvalidPath { .. } => DiagnosticLevel::Error, InvalidPath { .. } => DiagnosticLevel::Error,
DuplicatePath { .. } => DiagnosticLevel::Error, DuplicatePath { .. } => DiagnosticLevel::Error,
UnsupportedFileType { .. } => DiagnosticLevel::Warning, UnsupportedFileType { .. } => DiagnosticLevel::Warning,
@ -187,7 +187,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => diagnostic.code(), FastCheck(diagnostic) => diagnostic.code(),
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.code()), SpecifierUnfurl(diagnostic) => diagnostic.code(),
InvalidPath { .. } => Cow::Borrowed("invalid-path"), InvalidPath { .. } => Cow::Borrowed("invalid-path"),
DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"), DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"),
UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"), UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"),
@ -207,7 +207,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => diagnostic.message(), FastCheck(diagnostic) => diagnostic.message(),
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.message()), SpecifierUnfurl(diagnostic) => diagnostic.message(),
InvalidPath { message, .. } => Cow::Borrowed(message.as_str()), InvalidPath { message, .. } => Cow::Borrowed(message.as_str()),
DuplicatePath { .. } => { DuplicatePath { .. } => {
Cow::Borrowed("package path is a case insensitive duplicate of another path in the package") Cow::Borrowed("package path is a case insensitive duplicate of another path in the package")
@ -243,17 +243,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => diagnostic.location(), FastCheck(diagnostic) => diagnostic.location(),
SpecifierUnfurl(diagnostic) => match diagnostic { SpecifierUnfurl(diagnostic) => diagnostic.location(),
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
},
InvalidPath { path, .. } => { InvalidPath { path, .. } => {
DiagnosticLocation::Path { path: path.clone() } DiagnosticLocation::Path { path: path.clone() }
} }
@ -325,24 +315,8 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => diagnostic.snippet(), FastCheck(d) => d.snippet(),
SpecifierUnfurl(diagnostic) => match diagnostic { SpecifierUnfurl(d) => d.snippet(),
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
}],
}),
},
InvalidPath { .. } => None, InvalidPath { .. } => None,
DuplicatePath { .. } => None, DuplicatePath { .. } => None,
UnsupportedFileType { .. } => None, UnsupportedFileType { .. } => None,
@ -380,7 +354,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => diagnostic.hint(), FastCheck(diagnostic) => diagnostic.hint(),
SpecifierUnfurl(_) => None, SpecifierUnfurl(d) => d.hint(),
InvalidPath { .. } => Some( InvalidPath { .. } => Some(
Cow::Borrowed("rename or remove the file, or add it to 'publish.exclude' in the config file"), Cow::Borrowed("rename or remove the file, or add it to 'publish.exclude' in the config file"),
), ),
@ -436,9 +410,9 @@ impl Diagnostic for PublishDiagnostic {
None => None, None => None,
} }
} }
SyntaxError(diagnostic) => diagnostic.snippet_fixed(), SyntaxError(d) => d.snippet_fixed(),
SpecifierUnfurl(d) => d.snippet_fixed(),
FastCheck(_) FastCheck(_)
| SpecifierUnfurl(_)
| InvalidPath { .. } | InvalidPath { .. }
| DuplicatePath { .. } | DuplicatePath { .. }
| UnsupportedFileType { .. } | UnsupportedFileType { .. }
@ -453,16 +427,8 @@ impl Diagnostic for PublishDiagnostic {
fn info(&self) -> Cow<'_, [Cow<'_, str>]> { fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => { FastCheck(d) => d.info(),
diagnostic.info() SpecifierUnfurl(d) => d.info(),
}
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
]),
},
InvalidPath { .. } => Cow::Borrowed(&[ InvalidPath { .. } => Cow::Borrowed(&[
Cow::Borrowed("to portably support all platforms, including windows, the allowed characters in package paths are limited"), Cow::Borrowed("to portably support all platforms, including windows, the allowed characters in package paths are limited"),
]), ]),
@ -503,10 +469,8 @@ impl Diagnostic for PublishDiagnostic {
fn docs_url(&self) -> Option<Cow<'_, str>> { fn docs_url(&self) -> Option<Cow<'_, str>> {
use PublishDiagnostic::*; use PublishDiagnostic::*;
match &self { match &self {
FastCheck(diagnostic) => diagnostic.docs_url(), FastCheck(d) => d.docs_url(),
SpecifierUnfurl(diagnostic) => match diagnostic { SpecifierUnfurl(d) => d.docs_url(),
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => None,
},
InvalidPath { .. } => { InvalidPath { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/invalid-path")) Some(Cow::Borrowed("https://jsr.io/go/invalid-path"))
} }

View file

@ -14,7 +14,6 @@ use base64::Engine;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::deno_json::ConfigFile; use deno_config::deno_json::ConfigFile;
use deno_config::workspace::JsrPackageConfig; use deno_config::workspace::JsrPackageConfig;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
@ -44,8 +43,6 @@ use crate::cache::ParsedSourceCache;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::graph_util::ModuleGraphCreator; use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient; use crate::http_util::HttpClient;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check::CheckOptions; use crate::tools::check::CheckOptions;
use crate::tools::lint::collect_no_slow_type_diagnostics; use crate::tools::lint::collect_no_slow_type_diagnostics;
use crate::tools::registry::diagnostics::PublishDiagnostic; use crate::tools::registry::diagnostics::PublishDiagnostic;
@ -123,19 +120,8 @@ pub async fn publish(
} }
let specifier_unfurler = Arc::new(SpecifierUnfurler::new( let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
if cli_options.unstable_sloppy_imports() { cli_factory.sloppy_imports_resolver()?.cloned(),
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new( cli_factory.workspace_resolver().await?.clone(),
cli_factory.fs().clone(),
)))
} else {
None
},
cli_options
.create_workspace_resolver(
cli_factory.file_fetcher()?,
PackageJsonDepResolution::Enabled,
)
.await?,
cli_options.unstable_bare_node_builtins(), cli_options.unstable_bare_node_builtins(),
)); ));

View file

@ -19,8 +19,7 @@ use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::serde_json; use deno_core::serde_json;
use deno_graph::FillFromLockfileOptions; use deno_graph::FillFromLockfileOptions;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepsMap;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonRc; use deno_package_json::PackageJsonRc;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
@ -32,7 +31,6 @@ use deno_semver::VersionReq;
use import_map::ImportMap; use import_map::ImportMap;
use import_map::ImportMapWithDiagnostics; use import_map::ImportMapWithDiagnostics;
use import_map::SpecifierMapEntry; use import_map::SpecifierMapEntry;
use indexmap::IndexMap;
use tokio::sync::Semaphore; use tokio::sync::Semaphore;
use crate::args::CliLockfile; use crate::args::CliLockfile;
@ -269,94 +267,6 @@ enum PackageJsonDepKind {
Dev, Dev,
} }
type PackageJsonDeps = IndexMap<
String,
Result<
(PackageJsonDepKind, PackageJsonDepValue),
PackageJsonDepValueParseError,
>,
>;
/// Resolve the package.json's dependencies.
// TODO(nathanwhit): Remove once we update deno_package_json with dev deps split out
fn resolve_local_package_json_deps(
package_json: &PackageJsonRc,
) -> PackageJsonDeps {
/// Gets the name and raw version constraint for a registry info or
/// package.json dependency entry taking into account npm package aliases.
fn parse_dep_entry_name_and_raw_version<'a>(
key: &'a str,
value: &'a str,
) -> (&'a str, &'a str) {
if let Some(package_and_version) = value.strip_prefix("npm:") {
if let Some((name, version)) = package_and_version.rsplit_once('@') {
// if empty, then the name was scoped and there's no version
if name.is_empty() {
(package_and_version, "*")
} else {
(name, version)
}
} else {
(package_and_version, "*")
}
} else {
(key, value)
}
}
fn parse_entry(
key: &str,
value: &str,
) -> Result<PackageJsonDepValue, PackageJsonDepValueParseError> {
if let Some(workspace_key) = value.strip_prefix("workspace:") {
let version_req = VersionReq::parse_from_npm(workspace_key)?;
return Ok(PackageJsonDepValue::Workspace(version_req));
}
if value.starts_with("file:")
|| value.starts_with("git:")
|| value.starts_with("http:")
|| value.starts_with("https:")
{
return Err(PackageJsonDepValueParseError::Unsupported {
scheme: value.split(':').next().unwrap().to_string(),
});
}
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value);
let result = VersionReq::parse_from_npm(version_req);
match result {
Ok(version_req) => Ok(PackageJsonDepValue::Req(PackageReq {
name: name.to_string(),
version_req,
})),
Err(err) => Err(PackageJsonDepValueParseError::VersionReq(err)),
}
}
fn insert_deps(
deps: Option<&IndexMap<String, String>>,
result: &mut PackageJsonDeps,
kind: PackageJsonDepKind,
) {
if let Some(deps) = deps {
for (key, value) in deps {
result.entry(key.to_string()).or_insert_with(|| {
parse_entry(key, value).map(|entry| (kind, entry))
});
}
}
}
let deps = package_json.dependencies.as_ref();
let dev_deps = package_json.dev_dependencies.as_ref();
let mut result = IndexMap::new();
// favors the deps over dev_deps
insert_deps(deps, &mut result, PackageJsonDepKind::Normal);
insert_deps(dev_deps, &mut result, PackageJsonDepKind::Dev);
result
}
fn add_deps_from_deno_json( fn add_deps_from_deno_json(
deno_json: &Arc<ConfigFile>, deno_json: &Arc<ConfigFile>,
mut filter: impl DepFilter, mut filter: impl DepFilter,
@ -406,40 +316,64 @@ fn add_deps_from_deno_json(
fn add_deps_from_package_json( fn add_deps_from_package_json(
package_json: &PackageJsonRc, package_json: &PackageJsonRc,
mut filter: impl DepFilter, filter: impl DepFilter,
deps: &mut Vec<Dep>, deps: &mut Vec<Dep>,
) { ) {
let package_json_deps = resolve_local_package_json_deps(package_json); let package_json_deps = package_json.resolve_local_package_json_deps();
for (k, v) in package_json_deps {
let (package_dep_kind, v) = match v { fn iterate(
Ok((k, v)) => (k, v), package_json: &PackageJsonRc,
Err(e) => { mut filter: impl DepFilter,
log::warn!("bad package json dep value: {e}"); package_dep_kind: PackageJsonDepKind,
continue; package_json_deps: PackageJsonDepsMap,
} deps: &mut Vec<Dep>,
}; ) {
match v { for (k, v) in package_json_deps {
deno_package_json::PackageJsonDepValue::Req(req) => { let v = match v {
let alias = k.as_str(); Ok(v) => v,
let alias = (alias != req.name).then(|| alias.to_string()); Err(e) => {
if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) { log::warn!("bad package json dep value: {e}");
continue; continue;
} }
let id = DepId(deps.len()); };
deps.push(Dep { match v {
id, deno_package_json::PackageJsonDepValue::Req(req) => {
kind: DepKind::Npm, let alias = k.as_str();
location: DepLocation::PackageJson( let alias = (alias != req.name).then(|| alias.to_string());
package_json.clone(), if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) {
KeyPath::from_parts([package_dep_kind.into(), k.into()]), continue;
), }
req, let id = DepId(deps.len());
alias, deps.push(Dep {
}) id,
kind: DepKind::Npm,
location: DepLocation::PackageJson(
package_json.clone(),
KeyPath::from_parts([package_dep_kind.into(), k.into()]),
),
req,
alias,
})
}
deno_package_json::PackageJsonDepValue::Workspace(_) => continue,
} }
deno_package_json::PackageJsonDepValue::Workspace(_) => continue,
} }
} }
iterate(
package_json,
filter,
PackageJsonDepKind::Normal,
package_json_deps.dependencies,
deps,
);
iterate(
package_json,
filter,
PackageJsonDepKind::Dev,
package_json_deps.dev_dependencies,
deps,
);
} }
fn deps_from_workspace( fn deps_from_workspace(

View file

@ -1,19 +1,35 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::diagnostics::DiagnosticLevel;
use deno_ast::diagnostics::DiagnosticLocation;
use deno_ast::diagnostics::DiagnosticSnippet;
use deno_ast::diagnostics::DiagnosticSnippetHighlight;
use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
use deno_ast::diagnostics::DiagnosticSourcePos;
use deno_ast::diagnostics::DiagnosticSourceRange;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
use deno_ast::SourceTextProvider;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::DependencyDescriptor; use deno_graph::DependencyDescriptor;
use deno_graph::DynamicTemplatePart; use deno_graph::DynamicTemplatePart;
use deno_graph::ParserModuleAnalyzer; use deno_graph::ParserModuleAnalyzer;
use deno_graph::TypeScriptReference; use deno_graph::TypeScriptReference;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_node::is_builtin_node_module; use deno_runtime::deno_node::is_builtin_node_module;
use deno_semver::Version;
use deno_semver::VersionReq;
use crate::resolver::CliSloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
@ -24,34 +40,163 @@ pub enum SpecifierUnfurlerDiagnostic {
text_info: SourceTextInfo, text_info: SourceTextInfo,
range: SourceRange, range: SourceRange,
}, },
ResolvingNpmWorkspacePackage {
specifier: ModuleSpecifier,
package_name: String,
text_info: SourceTextInfo,
range: SourceRange,
reason: String,
},
} }
impl SpecifierUnfurlerDiagnostic { impl Diagnostic for SpecifierUnfurlerDiagnostic {
pub fn code(&self) -> &'static str { fn level(&self) -> DiagnosticLevel {
match self { match self {
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import", SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
} DiagnosticLevel::Warning
} }
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
pub fn message(&self) -> &'static str { DiagnosticLevel::Error
match self {
Self::UnanalyzableDynamicImport { .. } => {
"unable to analyze dynamic import"
} }
} }
} }
fn code(&self) -> Cow<'_, str> {
match self {
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import",
Self::ResolvingNpmWorkspacePackage { .. } => "npm-workspace-package",
}
.into()
}
fn message(&self) -> Cow<'_, str> {
match self {
Self::UnanalyzableDynamicImport { .. } => {
"unable to analyze dynamic import".into()
}
Self::ResolvingNpmWorkspacePackage {
package_name,
reason,
..
} => format!(
"failed resolving npm workspace package '{}': {}",
package_name, reason
)
.into(),
}
}
fn location(&self) -> deno_ast::diagnostics::DiagnosticLocation {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
specifier,
text_info,
range,
..
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
}
}
fn snippet(&self) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
}],
}),
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unresolved import".into()),
}],
}),
}
}
fn hint(&self) -> Option<Cow<'_, str>> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
None
}
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => Some(
"make sure the npm workspace package is resolvable and has a version field in its package.json".into()
),
}
}
fn snippet_fixed(
&self,
) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
None
}
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
]),
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
Cow::Borrowed(&[])
},
}
}
fn docs_url(&self) -> Option<Cow<'_, str>> {
None
}
}
enum UnfurlSpecifierError {
Workspace {
package_name: String,
reason: String,
},
} }
pub struct SpecifierUnfurler { pub struct SpecifierUnfurler {
sloppy_imports_resolver: Option<CliSloppyImportsResolver>, sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: WorkspaceResolver, workspace_resolver: Arc<WorkspaceResolver>,
bare_node_builtins: bool, bare_node_builtins: bool,
} }
impl SpecifierUnfurler { impl SpecifierUnfurler {
pub fn new( pub fn new(
sloppy_imports_resolver: Option<CliSloppyImportsResolver>, sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: WorkspaceResolver, workspace_resolver: Arc<WorkspaceResolver>,
bare_node_builtins: bool, bare_node_builtins: bool,
) -> Self { ) -> Self {
debug_assert_eq!( debug_assert_eq!(
@ -65,11 +210,45 @@ impl SpecifierUnfurler {
} }
} }
fn unfurl_specifier_reporting_diagnostic(
&self,
referrer: &ModuleSpecifier,
specifier: &str,
text_info: &SourceTextInfo,
range: &deno_graph::PositionRange,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> Option<String> {
match self.unfurl_specifier(referrer, specifier) {
Ok(maybe_unfurled) => maybe_unfurled,
Err(diagnostic) => match diagnostic {
UnfurlSpecifierError::Workspace {
package_name,
reason,
} => {
let range = to_range(text_info, range);
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
specifier: referrer.clone(),
package_name,
text_info: text_info.clone(),
range: SourceRange::new(
text_info.start_pos() + range.start,
text_info.start_pos() + range.end,
),
reason,
},
);
None
}
},
}
}
fn unfurl_specifier( fn unfurl_specifier(
&self, &self,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
specifier: &str, specifier: &str,
) -> Option<String> { ) -> Result<Option<String>, UnfurlSpecifierError> {
let resolved = if let Ok(resolved) = let resolved = if let Ok(resolved) =
self.workspace_resolver.resolve(specifier, referrer) self.workspace_resolver.resolve(specifier, referrer)
{ {
@ -120,8 +299,40 @@ impl SpecifierUnfurler {
)) ))
.ok() .ok()
} }
PackageJsonDepValue::Workspace(version_req) => { PackageJsonDepValue::Workspace(workspace_version_req) => {
// todo(#24612): consider warning or error when this is also a jsr package? let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
Cow::Borrowed(version_req)
}
PackageJsonDepWorkspaceReq::Caret => {
let version = self
.find_workspace_npm_dep_version(alias)
.map_err(|err| UnfurlSpecifierError::Workspace {
package_name: alias.to_string(),
reason: err.to_string(),
})?;
// version was validated, so ok to unwrap
Cow::Owned(
VersionReq::parse_from_npm(&format!("^{}", version))
.unwrap(),
)
}
PackageJsonDepWorkspaceReq::Tilde => {
let version = self
.find_workspace_npm_dep_version(alias)
.map_err(|err| UnfurlSpecifierError::Workspace {
package_name: alias.to_string(),
reason: err.to_string(),
})?;
// version was validated, so ok to unwrap
Cow::Owned(
VersionReq::parse_from_npm(&format!("~{}", version))
.unwrap(),
)
}
};
// todo(#24612): warn when this is also a jsr package telling
// people to map the specifiers in the import map
ModuleSpecifier::parse(&format!( ModuleSpecifier::parse(&format!(
"npm:{}@{}{}", "npm:{}@{}{}",
alias, alias,
@ -151,10 +362,14 @@ impl SpecifierUnfurler {
None if self.bare_node_builtins && is_builtin_node_module(specifier) => { None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
format!("node:{specifier}").parse().unwrap() format!("node:{specifier}").parse().unwrap()
} }
None => ModuleSpecifier::options() None => match ModuleSpecifier::options()
.base_url(Some(referrer)) .base_url(Some(referrer))
.parse(specifier) .parse(specifier)
.ok()?, .ok()
{
Some(value) => value,
None => return Ok(None),
},
}; };
// TODO(lucacasonato): this requires integration in deno_graph first // TODO(lucacasonato): this requires integration in deno_graph first
// let resolved = if let Ok(specifier) = // let resolved = if let Ok(specifier) =
@ -188,7 +403,7 @@ impl SpecifierUnfurler {
}; };
let relative_resolved = relative_url(&resolved, referrer); let relative_resolved = relative_url(&resolved, referrer);
if relative_resolved == specifier { if relative_resolved == specifier {
None // nothing to unfurl Ok(None) // nothing to unfurl
} else { } else {
log::debug!( log::debug!(
"Unfurled specifier: {} from {} -> {}", "Unfurled specifier: {} from {} -> {}",
@ -196,7 +411,29 @@ impl SpecifierUnfurler {
referrer, referrer,
relative_resolved relative_resolved
); );
Some(relative_resolved) Ok(Some(relative_resolved))
}
}
fn find_workspace_npm_dep_version(
&self,
pkg_name: &str,
) -> Result<Version, anyhow::Error> {
// todo(#24612): warn when this is also a jsr package telling
// people to map the specifiers in the import map
let pkg_json = self
.workspace_resolver
.package_jsons()
.find(|pkg| pkg.name.as_deref() == Some(pkg_name))
.ok_or_else(|| {
anyhow::anyhow!("unable to find npm package in workspace")
})?;
if let Some(version) = &pkg_json.version {
Ok(Version::parse_from_npm(version)?)
} else {
Err(anyhow::anyhow!(
"missing version in package.json of npm package",
))
} }
} }
@ -208,6 +445,7 @@ impl SpecifierUnfurler {
text_info: &SourceTextInfo, text_info: &SourceTextInfo,
dep: &deno_graph::DynamicDependencyDescriptor, dep: &deno_graph::DynamicDependencyDescriptor,
text_changes: &mut Vec<deno_ast::TextChange>, text_changes: &mut Vec<deno_ast::TextChange>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> bool { ) -> bool {
match &dep.argument { match &dep.argument {
deno_graph::DynamicArgument::String(specifier) => { deno_graph::DynamicArgument::String(specifier) => {
@ -217,8 +455,14 @@ impl SpecifierUnfurler {
let Some(relative_index) = maybe_relative_index else { let Some(relative_index) = maybe_relative_index else {
return true; // always say it's analyzable for a string return true; // always say it's analyzable for a string
}; };
let unfurled = self.unfurl_specifier(module_url, specifier); let maybe_unfurled = self.unfurl_specifier_reporting_diagnostic(
if let Some(unfurled) = unfurled { module_url,
specifier,
text_info,
&dep.argument_range,
diagnostic_reporter,
);
if let Some(unfurled) = maybe_unfurled {
let start = range.start + relative_index; let start = range.start + relative_index;
text_changes.push(deno_ast::TextChange { text_changes.push(deno_ast::TextChange {
range: start..start + specifier.len(), range: start..start + specifier.len(),
@ -238,7 +482,13 @@ impl SpecifierUnfurler {
if !specifier.ends_with('/') { if !specifier.ends_with('/') {
return false; return false;
} }
let unfurled = self.unfurl_specifier(module_url, specifier); let unfurled = self.unfurl_specifier_reporting_diagnostic(
module_url,
specifier,
text_info,
&dep.argument_range,
diagnostic_reporter,
);
let Some(unfurled) = unfurled else { let Some(unfurled) = unfurled else {
return true; // nothing to unfurl return true; // nothing to unfurl
}; };
@ -280,8 +530,15 @@ impl SpecifierUnfurler {
let analyze_specifier = let analyze_specifier =
|specifier: &str, |specifier: &str,
range: &deno_graph::PositionRange, range: &deno_graph::PositionRange,
text_changes: &mut Vec<deno_ast::TextChange>| { text_changes: &mut Vec<deno_ast::TextChange>,
if let Some(unfurled) = self.unfurl_specifier(url, specifier) { diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic)| {
if let Some(unfurled) = self.unfurl_specifier_reporting_diagnostic(
url,
specifier,
text_info,
range,
diagnostic_reporter,
) {
text_changes.push(deno_ast::TextChange { text_changes.push(deno_ast::TextChange {
range: to_range(text_info, range), range: to_range(text_info, range),
new_text: unfurled, new_text: unfurled,
@ -295,11 +552,17 @@ impl SpecifierUnfurler {
&dep.specifier, &dep.specifier,
&dep.specifier_range, &dep.specifier_range,
&mut text_changes, &mut text_changes,
diagnostic_reporter,
); );
} }
DependencyDescriptor::Dynamic(dep) => { DependencyDescriptor::Dynamic(dep) => {
let success = let success = self.try_unfurl_dynamic_dep(
self.try_unfurl_dynamic_dep(url, text_info, dep, &mut text_changes); url,
text_info,
dep,
&mut text_changes,
diagnostic_reporter,
);
if !success { if !success {
let start_pos = text_info.line_start(dep.argument_range.start.line) let start_pos = text_info.line_start(dep.argument_range.start.line)
@ -326,6 +589,7 @@ impl SpecifierUnfurler {
&specifier_with_range.text, &specifier_with_range.text,
&specifier_with_range.range, &specifier_with_range.range,
&mut text_changes, &mut text_changes,
diagnostic_reporter,
); );
} }
for jsdoc in &module_info.jsdoc_imports { for jsdoc in &module_info.jsdoc_imports {
@ -333,6 +597,7 @@ impl SpecifierUnfurler {
&jsdoc.specifier.text, &jsdoc.specifier.text,
&jsdoc.specifier.range, &jsdoc.specifier.range,
&mut text_changes, &mut text_changes,
diagnostic_reporter,
); );
} }
if let Some(specifier_with_range) = &module_info.jsx_import_source { if let Some(specifier_with_range) = &module_info.jsx_import_source {
@ -340,6 +605,7 @@ impl SpecifierUnfurler {
&specifier_with_range.text, &specifier_with_range.text,
&specifier_with_range.range, &specifier_with_range.range,
&mut text_changes, &mut text_changes,
diagnostic_reporter,
); );
} }
@ -458,10 +724,10 @@ mod tests {
); );
let fs = Arc::new(RealFs); let fs = Arc::new(RealFs);
let unfurler = SpecifierUnfurler::new( let unfurler = SpecifierUnfurler::new(
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new( Some(Arc::new(CliSloppyImportsResolver::new(
fs, SloppyImportsCachedFs::new(fs),
))), ))),
workspace_resolver, Arc::new(workspace_resolver),
true, true,
); );
@ -547,4 +813,114 @@ const warn2 = await import(`${expr}`);
assert_eq!(unfurled_source, expected_source); assert_eq!(unfurled_source, expected_source);
} }
} }
#[test]
fn test_unfurling_npm_dep_workspace_specifier() {
let cwd = testdata_path().join("unfurl").to_path_buf();
let pkg_json_add = PackageJson::load_from_value(
cwd.join("add/package.json"),
json!({ "name": "add", "version": "0.1.0", }),
);
let pkg_json_subtract = PackageJson::load_from_value(
cwd.join("subtract/package.json"),
json!({ "name": "subtract", "version": "0.2.0", }),
);
let pkg_json_publishing = PackageJson::load_from_value(
cwd.join("publish/package.json"),
json!({
"name": "@denotest/main",
"version": "1.0.0",
"dependencies": {
"add": "workspace:~",
"subtract": "workspace:^",
"non-existent": "workspace:~",
}
}),
);
let root_pkg_json = PackageJson::load_from_value(
cwd.join("package.json"),
json!({ "workspaces": ["./publish", "./subtract", "./add"] }),
);
let workspace_resolver = WorkspaceResolver::new_raw(
Arc::new(ModuleSpecifier::from_directory_path(&cwd).unwrap()),
None,
vec![ResolverWorkspaceJsrPackage {
is_patch: false,
base: ModuleSpecifier::from_directory_path(
cwd.join("publish/jsr.json"),
)
.unwrap(),
name: "@denotest/main".to_string(),
version: Some(Version::parse_standard("1.0.0").unwrap()),
exports: IndexMap::from([(".".to_string(), "mod.ts".to_string())]),
}],
vec![
Arc::new(root_pkg_json),
Arc::new(pkg_json_add),
Arc::new(pkg_json_subtract),
Arc::new(pkg_json_publishing),
],
deno_config::workspace::PackageJsonDepResolution::Enabled,
);
let fs = Arc::new(RealFs);
let unfurler = SpecifierUnfurler::new(
Some(Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(fs),
))),
Arc::new(workspace_resolver),
true,
);
{
let source_code = r#"import add from "add";
import subtract from "subtract";
console.log(add, subtract);
"#;
let specifier =
ModuleSpecifier::from_file_path(cwd.join("publish").join("mod.ts"))
.unwrap();
let source = parse_ast(&specifier, source_code);
let mut d = Vec::new();
let mut reporter = |diagnostic| d.push(diagnostic);
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
assert_eq!(d.len(), 0);
// it will inline the version
let expected_source = r#"import add from "npm:add@~0.1.0";
import subtract from "npm:subtract@^0.2.0";
console.log(add, subtract);
"#;
assert_eq!(unfurled_source, expected_source);
}
{
let source_code = r#"import nonExistent from "non-existent";
console.log(nonExistent);
"#;
let specifier =
ModuleSpecifier::from_file_path(cwd.join("publish").join("other.ts"))
.unwrap();
let source = parse_ast(&specifier, source_code);
let mut d = Vec::new();
let mut reporter = |diagnostic| d.push(diagnostic);
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
assert_eq!(d.len(), 1);
match &d[0] {
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
package_name,
reason,
..
} => {
assert_eq!(package_name, "non-existent");
assert_eq!(reason, "unable to find npm package in workspace");
}
_ => unreachable!(),
}
// won't make any changes, but the above will be a fatal error
assert!(matches!(d[0].level(), DiagnosticLevel::Error));
assert_eq!(unfurled_source, source_code);
}
}
} }

View file

@ -51,19 +51,6 @@ pub fn get_extension(file_path: &Path) -> Option<String> {
.map(|e| e.to_lowercase()); .map(|e| e.to_lowercase());
} }
pub fn get_atomic_dir_path(file_path: &Path) -> PathBuf {
let rand = gen_rand_path_component();
let new_file_name = format!(
".{}_{}",
file_path
.file_name()
.map(|f| f.to_string_lossy())
.unwrap_or(Cow::Borrowed("")),
rand
);
file_path.with_file_name(new_file_name)
}
pub fn get_atomic_file_path(file_path: &Path) -> PathBuf { pub fn get_atomic_file_path(file_path: &Path) -> PathBuf {
let rand = gen_rand_path_component(); let rand = gen_rand_path_component();
let extension = format!("{rand}.tmp"); let extension = format!("{rand}.tmp");

View file

@ -3,11 +3,9 @@
mod async_flag; mod async_flag;
mod sync_read_async_write_lock; mod sync_read_async_write_lock;
mod task_queue; mod task_queue;
mod value_creator;
pub use async_flag::AsyncFlag; pub use async_flag::AsyncFlag;
pub use deno_core::unsync::sync::AtomicFlag; pub use deno_core::unsync::sync::AtomicFlag;
pub use sync_read_async_write_lock::SyncReadAsyncWriteLock; pub use sync_read_async_write_lock::SyncReadAsyncWriteLock;
pub use task_queue::TaskQueue; pub use task_queue::TaskQueue;
pub use task_queue::TaskQueuePermit; pub use task_queue::TaskQueuePermit;
pub use value_creator::MultiRuntimeAsyncValueCreator;

View file

@ -1,213 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use deno_core::futures::future::BoxFuture;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::future::Shared;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use tokio::task::JoinError;
type JoinResult<TResult> = Result<TResult, Arc<JoinError>>;
type CreateFutureFn<TResult> =
Box<dyn Fn() -> LocalBoxFuture<'static, TResult> + Send + Sync>;
#[derive(Debug)]
struct State<TResult> {
retry_index: usize,
future: Option<Shared<BoxFuture<'static, JoinResult<TResult>>>>,
}
/// Attempts to create a shared value asynchronously on one tokio runtime while
/// many runtimes are requesting the value.
///
/// This is only useful when the value needs to get created once across
/// many runtimes.
///
/// This handles the case where the tokio runtime creating the value goes down
/// while another one is waiting on the value.
pub struct MultiRuntimeAsyncValueCreator<TResult: Send + Clone + 'static> {
create_future: CreateFutureFn<TResult>,
state: Mutex<State<TResult>>,
}
impl<TResult: Send + Clone + 'static> std::fmt::Debug
for MultiRuntimeAsyncValueCreator<TResult>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MultiRuntimeAsyncValueCreator").finish()
}
}
impl<TResult: Send + Clone + 'static> MultiRuntimeAsyncValueCreator<TResult> {
pub fn new(create_future: CreateFutureFn<TResult>) -> Self {
Self {
state: Mutex::new(State {
retry_index: 0,
future: None,
}),
create_future,
}
}
pub async fn get(&self) -> TResult {
let (mut future, mut retry_index) = {
let mut state = self.state.lock();
let future = match &state.future {
Some(future) => future.clone(),
None => {
let future = self.create_shared_future();
state.future = Some(future.clone());
future
}
};
(future, state.retry_index)
};
loop {
let result = future.await;
match result {
Ok(result) => return result,
Err(join_error) => {
if join_error.is_cancelled() {
let mut state = self.state.lock();
if state.retry_index == retry_index {
// we were the first one to retry, so create a new future
// that we'll run from the current runtime
state.retry_index += 1;
state.future = Some(self.create_shared_future());
}
retry_index = state.retry_index;
future = state.future.as_ref().unwrap().clone();
// just in case we're stuck in a loop
if retry_index > 1000 {
panic!("Something went wrong.") // should never happen
}
} else {
panic!("{}", join_error);
}
}
}
}
}
fn create_shared_future(
&self,
) -> Shared<BoxFuture<'static, JoinResult<TResult>>> {
let future = (self.create_future)();
deno_core::unsync::spawn(future)
.map(|result| result.map_err(Arc::new))
.boxed()
.shared()
}
}
#[cfg(test)]
mod test {
use deno_core::unsync::spawn;
use super::*;
#[tokio::test]
async fn single_runtime() {
let value_creator = MultiRuntimeAsyncValueCreator::new(Box::new(|| {
async { 1 }.boxed_local()
}));
let value = value_creator.get().await;
assert_eq!(value, 1);
}
#[test]
fn multi_runtimes() {
let value_creator =
Arc::new(MultiRuntimeAsyncValueCreator::new(Box::new(|| {
async {
tokio::task::yield_now().await;
1
}
.boxed_local()
})));
let handles = (0..3)
.map(|_| {
let value_creator = value_creator.clone();
std::thread::spawn(|| {
create_runtime().block_on(async move { value_creator.get().await })
})
})
.collect::<Vec<_>>();
for handle in handles {
assert_eq!(handle.join().unwrap(), 1);
}
}
#[test]
fn multi_runtimes_first_never_finishes() {
let is_first_run = Arc::new(Mutex::new(true));
let (tx, rx) = std::sync::mpsc::channel::<()>();
let value_creator = Arc::new(MultiRuntimeAsyncValueCreator::new({
let is_first_run = is_first_run.clone();
Box::new(move || {
let is_first_run = is_first_run.clone();
let tx = tx.clone();
async move {
let is_first_run = {
let mut is_first_run = is_first_run.lock();
let initial_value = *is_first_run;
*is_first_run = false;
tx.send(()).unwrap();
initial_value
};
if is_first_run {
tokio::time::sleep(std::time::Duration::from_millis(30_000)).await;
panic!("TIMED OUT"); // should not happen
} else {
tokio::task::yield_now().await;
}
1
}
.boxed_local()
})
}));
std::thread::spawn({
let value_creator = value_creator.clone();
let is_first_run = is_first_run.clone();
move || {
create_runtime().block_on(async {
let value_creator = value_creator.clone();
// spawn a task that will never complete
spawn(async move { value_creator.get().await });
// wait for the task to set is_first_run to false
while *is_first_run.lock() {
tokio::time::sleep(std::time::Duration::from_millis(20)).await;
}
// now exit the runtime while the value_creator is still pending
})
}
});
let handle = {
let value_creator = value_creator.clone();
std::thread::spawn(|| {
create_runtime().block_on(async move {
let value_creator = value_creator.clone();
rx.recv().unwrap();
// even though the other runtime shutdown, this get() should
// recover and still get the value
value_creator.get().await
})
})
};
assert_eq!(handle.join().unwrap(), 1);
}
fn create_runtime() -> tokio::runtime::Runtime {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
}
}

View file

@ -872,12 +872,10 @@ impl deno_package_json::fs::DenoPkgJsonFs for DenoFsNodeResolverEnv {
fn read_to_string_lossy( fn read_to_string_lossy(
&self, &self,
path: &std::path::Path, path: &std::path::Path,
) -> Result<String, std::io::Error> { ) -> Result<Cow<'static, str>, std::io::Error> {
self self
.fs .fs
.read_text_file_lossy_sync(path, None) .read_text_file_lossy_sync(path, None)
// todo(https://github.com/denoland/deno_package_json/pull/9): don't clone
.map(|text| text.into_owned())
.map_err(|err| err.into_io_error()) .map_err(|err| err.into_io_error())
} }
} }
@ -888,12 +886,10 @@ impl<'a> deno_package_json::fs::DenoPkgJsonFs for DenoPkgJsonFsAdapter<'a> {
fn read_to_string_lossy( fn read_to_string_lossy(
&self, &self,
path: &Path, path: &Path,
) -> Result<String, std::io::Error> { ) -> Result<Cow<'static, str>, std::io::Error> {
self self
.0 .0
.read_text_file_lossy_sync(path, None) .read_text_file_lossy_sync(path, None)
// todo(https://github.com/denoland/deno_package_json/pull/9): don't clone
.map(|text| text.into_owned())
.map_err(|err| err.into_io_error()) .map_err(|err| err.into_io_error())
} }
} }

View file

@ -4,12 +4,13 @@
// deno-lint-ignore-file prefer-primordials // deno-lint-ignore-file prefer-primordials
import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js"; import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js";
import { asyncIterableToCallback } from "ext:deno_node/_fs/_fs_watch.ts";
import Dirent from "ext:deno_node/_fs/_fs_dirent.ts"; import Dirent from "ext:deno_node/_fs/_fs_dirent.ts";
import { denoErrorToNodeError } from "ext:deno_node/internal/errors.ts"; import { denoErrorToNodeError } from "ext:deno_node/internal/errors.ts";
import { getValidatedPath } from "ext:deno_node/internal/fs/utils.mjs"; import { getValidatedPath } from "ext:deno_node/internal/fs/utils.mjs";
import { Buffer } from "node:buffer"; import { Buffer } from "node:buffer";
import { promisify } from "ext:deno_node/internal/util.mjs"; import { promisify } from "ext:deno_node/internal/util.mjs";
import { op_fs_read_dir_async, op_fs_read_dir_sync } from "ext:core/ops";
import { join, relative } from "node:path";
function toDirent(val: Deno.DirEntry & { parentPath: string }): Dirent { function toDirent(val: Deno.DirEntry & { parentPath: string }): Dirent {
return new Dirent(val); return new Dirent(val);
@ -18,6 +19,7 @@ function toDirent(val: Deno.DirEntry & { parentPath: string }): Dirent {
type readDirOptions = { type readDirOptions = {
encoding?: string; encoding?: string;
withFileTypes?: boolean; withFileTypes?: boolean;
recursive?: boolean;
}; };
type readDirCallback = (err: Error | null, files: string[]) => void; type readDirCallback = (err: Error | null, files: string[]) => void;
@ -30,12 +32,12 @@ type readDirBoth = (
export function readdir( export function readdir(
path: string | Buffer | URL, path: string | Buffer | URL,
options: { withFileTypes?: false; encoding?: string }, options: readDirOptions,
callback: readDirCallback, callback: readDirCallback,
): void; ): void;
export function readdir( export function readdir(
path: string | Buffer | URL, path: string | Buffer | URL,
options: { withFileTypes: true; encoding?: string }, options: readDirOptions,
callback: readDirCallbackDirent, callback: readDirCallbackDirent,
): void; ): void;
export function readdir(path: string | URL, callback: readDirCallback): void; export function readdir(path: string | URL, callback: readDirCallback): void;
@ -51,8 +53,7 @@ export function readdir(
const options = typeof optionsOrCallback === "object" const options = typeof optionsOrCallback === "object"
? optionsOrCallback ? optionsOrCallback
: null; : null;
const result: Array<string | Dirent> = []; path = getValidatedPath(path).toString();
path = getValidatedPath(path);
if (!callback) throw new Error("No callback function supplied"); if (!callback) throw new Error("No callback function supplied");
@ -66,24 +67,44 @@ export function readdir(
} }
} }
try { const result: Array<string | Dirent> = [];
path = path.toString(); const dirs = [path];
asyncIterableToCallback(Deno.readDir(path), (val, done) => { let current: string | undefined;
if (typeof path !== "string") return; (async () => {
if (done) { while ((current = dirs.shift()) !== undefined) {
callback(null, result); try {
const entries = await op_fs_read_dir_async(current);
for (let i = 0; i < entries.length; i++) {
const entry = entries[i];
if (options?.recursive && entry.isDirectory) {
dirs.push(join(current, entry.name));
}
if (options?.withFileTypes) {
entry.parentPath = current;
result.push(toDirent(entry));
} else {
let name = decode(entry.name, options?.encoding);
if (options?.recursive) {
name = relative(path, join(current, name));
}
result.push(name);
}
}
} catch (err) {
callback(
denoErrorToNodeError(err as Error, {
syscall: "readdir",
path: current,
}),
);
return; return;
} }
if (options?.withFileTypes) { }
val.parentPath = path;
result.push(toDirent(val)); callback(null, result);
} else result.push(decode(val.name)); })();
}, (e) => {
callback(denoErrorToNodeError(e as Error, { syscall: "readdir" }));
});
} catch (e) {
callback(denoErrorToNodeError(e as Error, { syscall: "readdir" }));
}
} }
function decode(str: string, encoding?: string): string { function decode(str: string, encoding?: string): string {
@ -118,8 +139,7 @@ export function readdirSync(
path: string | Buffer | URL, path: string | Buffer | URL,
options?: readDirOptions, options?: readDirOptions,
): Array<string | Dirent> { ): Array<string | Dirent> {
const result = []; path = getValidatedPath(path).toString();
path = getValidatedPath(path);
if (options?.encoding) { if (options?.encoding) {
try { try {
@ -131,16 +151,37 @@ export function readdirSync(
} }
} }
try { const result: Array<string | Dirent> = [];
path = path.toString(); const dirs = [path];
for (const file of Deno.readDirSync(path)) { let current: string | undefined;
if (options?.withFileTypes) { while ((current = dirs.shift()) !== undefined) {
file.parentPath = path; try {
result.push(toDirent(file)); const entries = op_fs_read_dir_sync(current);
} else result.push(decode(file.name));
for (let i = 0; i < entries.length; i++) {
const entry = entries[i];
if (options?.recursive && entry.isDirectory) {
dirs.push(join(current, entry.name));
}
if (options?.withFileTypes) {
entry.parentPath = current;
result.push(toDirent(entry));
} else {
let name = decode(entry.name, options?.encoding);
if (options?.recursive) {
name = relative(path, join(current, name));
}
result.push(name);
}
}
} catch (e) {
throw denoErrorToNodeError(e as Error, {
syscall: "readdir",
path: current,
});
} }
} catch (e) {
throw denoErrorToNodeError(e as Error, { syscall: "readdir" });
} }
return result; return result;
} }

View file

@ -16,6 +16,7 @@ use once_cell::sync::OnceCell;
use opentelemetry::logs::AnyValue; use opentelemetry::logs::AnyValue;
use opentelemetry::logs::LogRecord as LogRecordTrait; use opentelemetry::logs::LogRecord as LogRecordTrait;
use opentelemetry::logs::Severity; use opentelemetry::logs::Severity;
use opentelemetry::otel_error;
use opentelemetry::trace::SpanContext; use opentelemetry::trace::SpanContext;
use opentelemetry::trace::SpanId; use opentelemetry::trace::SpanId;
use opentelemetry::trace::SpanKind; use opentelemetry::trace::SpanKind;
@ -27,15 +28,21 @@ use opentelemetry::KeyValue;
use opentelemetry::StringValue; use opentelemetry::StringValue;
use opentelemetry::Value; use opentelemetry::Value;
use opentelemetry_otlp::HttpExporterBuilder; use opentelemetry_otlp::HttpExporterBuilder;
use opentelemetry_otlp::MetricExporter;
use opentelemetry_otlp::Protocol; use opentelemetry_otlp::Protocol;
use opentelemetry_otlp::WithExportConfig; use opentelemetry_otlp::WithExportConfig;
use opentelemetry_otlp::WithHttpConfig; use opentelemetry_otlp::WithHttpConfig;
use opentelemetry_sdk::export::trace::SpanData; use opentelemetry_sdk::export::trace::SpanData;
use opentelemetry_sdk::logs::BatchLogProcessor; use opentelemetry_sdk::logs::BatchLogProcessor;
use opentelemetry_sdk::logs::LogProcessor as LogProcessorTrait; use opentelemetry_sdk::logs::LogProcessor;
use opentelemetry_sdk::logs::LogRecord; use opentelemetry_sdk::logs::LogRecord;
use opentelemetry_sdk::metrics::data::Metric;
use opentelemetry_sdk::metrics::data::ResourceMetrics;
use opentelemetry_sdk::metrics::data::ScopeMetrics;
use opentelemetry_sdk::metrics::exporter::PushMetricExporter;
use opentelemetry_sdk::metrics::Temporality;
use opentelemetry_sdk::trace::BatchSpanProcessor; use opentelemetry_sdk::trace::BatchSpanProcessor;
use opentelemetry_sdk::trace::SpanProcessor as SpanProcessorTrait; use opentelemetry_sdk::trace::SpanProcessor;
use opentelemetry_sdk::Resource; use opentelemetry_sdk::Resource;
use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_NAME; use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_NAME;
use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_VERSION; use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_VERSION;
@ -54,9 +61,6 @@ use std::thread;
use std::time::Duration; use std::time::Duration;
use std::time::SystemTime; use std::time::SystemTime;
type SpanProcessor = BatchSpanProcessor<OtelSharedRuntime>;
type LogProcessor = BatchLogProcessor<OtelSharedRuntime>;
deno_core::extension!( deno_core::extension!(
deno_telemetry, deno_telemetry,
ops = [ ops = [
@ -71,6 +75,23 @@ deno_core::extension!(
op_otel_span_attribute3, op_otel_span_attribute3,
op_otel_span_set_dropped, op_otel_span_set_dropped,
op_otel_span_flush, op_otel_span_flush,
op_otel_metrics_resource_attribute,
op_otel_metrics_resource_attribute2,
op_otel_metrics_resource_attribute3,
op_otel_metrics_scope,
op_otel_metrics_sum,
op_otel_metrics_gauge,
op_otel_metrics_sum_or_gauge_data_point,
op_otel_metrics_histogram,
op_otel_metrics_histogram_data_point,
op_otel_metrics_histogram_data_point_entry_final,
op_otel_metrics_histogram_data_point_entry1,
op_otel_metrics_histogram_data_point_entry2,
op_otel_metrics_histogram_data_point_entry3,
op_otel_metrics_data_point_attribute,
op_otel_metrics_data_point_attribute2,
op_otel_metrics_data_point_attribute3,
op_otel_metrics_submit,
], ],
esm = ["telemetry.ts", "util.ts"], esm = ["telemetry.ts", "util.ts"],
); );
@ -322,8 +343,69 @@ mod hyper_client {
} }
} }
static OTEL_PROCESSORS: OnceCell<(SpanProcessor, LogProcessor)> = enum MetricProcessorMessage {
OnceCell::new(); ResourceMetrics(ResourceMetrics),
Flush(tokio::sync::oneshot::Sender<()>),
}
struct MetricProcessor {
tx: tokio::sync::mpsc::Sender<MetricProcessorMessage>,
}
impl MetricProcessor {
fn new(exporter: MetricExporter) -> Self {
let (tx, mut rx) = tokio::sync::mpsc::channel(2048);
let future = async move {
while let Some(message) = rx.recv().await {
match message {
MetricProcessorMessage::ResourceMetrics(mut rm) => {
if let Err(err) = exporter.export(&mut rm).await {
otel_error!(
name: "MetricProcessor.Export.Error",
error = format!("{}", err)
);
}
}
MetricProcessorMessage::Flush(tx) => {
if let Err(()) = tx.send(()) {
otel_error!(
name: "MetricProcessor.Flush.SendResultError",
error = "()",
);
}
}
}
}
};
(*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX)
.unbounded_send(Box::pin(future))
.expect("failed to send task to shared OpenTelemetry runtime");
Self { tx }
}
fn submit(&self, rm: ResourceMetrics) {
let _ = self
.tx
.try_send(MetricProcessorMessage::ResourceMetrics(rm));
}
fn force_flush(&self) -> Result<(), anyhow::Error> {
let (tx, rx) = tokio::sync::oneshot::channel();
self.tx.try_send(MetricProcessorMessage::Flush(tx))?;
deno_core::futures::executor::block_on(rx)?;
Ok(())
}
}
struct Processors {
spans: BatchSpanProcessor<OtelSharedRuntime>,
logs: BatchLogProcessor<OtelSharedRuntime>,
metrics: MetricProcessor,
}
static OTEL_PROCESSORS: OnceCell<Processors> = OnceCell::new();
static BUILT_IN_INSTRUMENTATION_SCOPE: OnceCell< static BUILT_IN_INSTRUMENTATION_SCOPE: OnceCell<
opentelemetry::InstrumentationScope, opentelemetry::InstrumentationScope,
@ -404,6 +486,12 @@ pub fn init(config: OtelConfig) -> anyhow::Result<()> {
BatchSpanProcessor::builder(span_exporter, OtelSharedRuntime).build(); BatchSpanProcessor::builder(span_exporter, OtelSharedRuntime).build();
span_processor.set_resource(&resource); span_processor.set_resource(&resource);
let metric_exporter = HttpExporterBuilder::default()
.with_http_client(client.clone())
.with_protocol(protocol)
.build_metrics_exporter(Temporality::Cumulative)?;
let metric_processor = MetricProcessor::new(metric_exporter);
let log_exporter = HttpExporterBuilder::default() let log_exporter = HttpExporterBuilder::default()
.with_http_client(client) .with_http_client(client)
.with_protocol(protocol) .with_protocol(protocol)
@ -413,7 +501,11 @@ pub fn init(config: OtelConfig) -> anyhow::Result<()> {
log_processor.set_resource(&resource); log_processor.set_resource(&resource);
OTEL_PROCESSORS OTEL_PROCESSORS
.set((span_processor, log_processor)) .set(Processors {
spans: span_processor,
logs: log_processor,
metrics: metric_processor,
})
.map_err(|_| anyhow!("failed to init otel"))?; .map_err(|_| anyhow!("failed to init otel"))?;
let builtin_instrumentation_scope = let builtin_instrumentation_scope =
@ -431,16 +523,22 @@ pub fn init(config: OtelConfig) -> anyhow::Result<()> {
/// `process::exit()`, to ensure that all OpenTelemetry logs are properly /// `process::exit()`, to ensure that all OpenTelemetry logs are properly
/// flushed before the process terminates. /// flushed before the process terminates.
pub fn flush() { pub fn flush() {
if let Some((span_processor, log_processor)) = OTEL_PROCESSORS.get() { if let Some(Processors {
let _ = span_processor.force_flush(); spans,
let _ = log_processor.force_flush(); logs,
metrics,
}) = OTEL_PROCESSORS.get()
{
let _ = spans.force_flush();
let _ = logs.force_flush();
let _ = metrics.force_flush();
} }
} }
pub fn handle_log(record: &log::Record) { pub fn handle_log(record: &log::Record) {
use log::Level; use log::Level;
let Some((_, log_processor)) = OTEL_PROCESSORS.get() else { let Some(Processors { logs, .. }) = OTEL_PROCESSORS.get() else {
return; return;
}; };
@ -490,7 +588,7 @@ pub fn handle_log(record: &log::Record) {
let _ = record.key_values().visit(&mut Visitor(&mut log_record)); let _ = record.key_values().visit(&mut Visitor(&mut log_record));
log_processor.emit( logs.emit(
&mut log_record, &mut log_record,
BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap(), BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap(),
); );
@ -648,7 +746,7 @@ fn op_otel_log(
span_id: v8::Local<'_, v8::Value>, span_id: v8::Local<'_, v8::Value>,
#[smi] trace_flags: u8, #[smi] trace_flags: u8,
) { ) {
let Some((_, log_processor)) = OTEL_PROCESSORS.get() else { let Some(Processors { logs, .. }) = OTEL_PROCESSORS.get() else {
return; return;
}; };
@ -678,12 +776,25 @@ fn op_otel_log(
); );
} }
log_processor.emit( logs.emit(
&mut log_record, &mut log_record,
BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap(), BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap(),
); );
} }
fn owned_string<'s>(
scope: &mut v8::HandleScope<'s>,
string: v8::Local<'s, v8::String>,
) -> String {
let x = v8::ValueView::new(scope, string);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
String::from_utf8_lossy(bytes).into_owned()
}
v8::ValueViewData::TwoByte(bytes) => String::from_utf16_lossy(bytes),
}
}
struct TemporarySpan(SpanData); struct TemporarySpan(SpanData);
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
@ -700,10 +811,10 @@ fn op_otel_span_start<'s>(
end_time: f64, end_time: f64,
) -> Result<(), anyhow::Error> { ) -> Result<(), anyhow::Error> {
if let Some(temporary_span) = state.try_take::<TemporarySpan>() { if let Some(temporary_span) = state.try_take::<TemporarySpan>() {
let Some((span_processor, _)) = OTEL_PROCESSORS.get() else { let Some(Processors { spans, .. }) = OTEL_PROCESSORS.get() else {
return Ok(()); return Ok(());
}; };
span_processor.on_end(temporary_span.0); spans.on_end(temporary_span.0);
}; };
let Some(InstrumentationScope(instrumentation_scope)) = let Some(InstrumentationScope(instrumentation_scope)) =
@ -724,15 +835,7 @@ fn op_otel_span_start<'s>(
let parent_span_id = parse_span_id(scope, parent_span_id); let parent_span_id = parse_span_id(scope, parent_span_id);
let name = { let name = owned_string(scope, name.try_cast()?);
let x = v8::ValueView::new(scope, name.try_cast()?);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
String::from_utf8_lossy(bytes).into_owned()
}
v8::ValueViewData::TwoByte(bytes) => String::from_utf16_lossy(bytes),
}
};
let temporary_span = TemporarySpan(SpanData { let temporary_span = TemporarySpan(SpanData {
span_context: SpanContext::new( span_context: SpanContext::new(
@ -866,9 +969,598 @@ fn op_otel_span_flush(state: &mut OpState) {
return; return;
}; };
let Some((span_processor, _)) = OTEL_PROCESSORS.get() else { let Some(Processors { spans, .. }) = OTEL_PROCESSORS.get() else {
return; return;
}; };
span_processor.on_end(temporary_span.0); spans.on_end(temporary_span.0);
}
// Holds data being built from JS before
// it is submitted to the rust processor.
struct TemporaryMetricsExport {
resource_attributes: Vec<KeyValue>,
scope_metrics: Vec<ScopeMetrics>,
metric: Option<TemporaryMetric>,
}
struct TemporaryMetric {
name: String,
description: String,
unit: String,
data: TemporaryMetricData,
}
enum TemporaryMetricData {
Sum(opentelemetry_sdk::metrics::data::Sum<f64>),
Gauge(opentelemetry_sdk::metrics::data::Gauge<f64>),
Histogram(opentelemetry_sdk::metrics::data::Histogram<f64>),
}
impl From<TemporaryMetric> for Metric {
fn from(value: TemporaryMetric) -> Self {
Metric {
name: Cow::Owned(value.name),
description: Cow::Owned(value.description),
unit: Cow::Owned(value.unit),
data: match value.data {
TemporaryMetricData::Sum(sum) => Box::new(sum),
TemporaryMetricData::Gauge(gauge) => Box::new(gauge),
TemporaryMetricData::Histogram(histogram) => Box::new(histogram),
},
}
}
}
#[op2(fast)]
fn op_otel_metrics_resource_attribute<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key: v8::Local<'s, v8::Value>,
value: v8::Local<'s, v8::Value>,
) {
let metrics_export = if let Some(metrics_export) =
state.try_borrow_mut::<TemporaryMetricsExport>()
{
metrics_export.resource_attributes.reserve_exact(
(capacity as usize) - metrics_export.resource_attributes.capacity(),
);
metrics_export
} else {
state.put(TemporaryMetricsExport {
resource_attributes: Vec::with_capacity(capacity as usize),
scope_metrics: vec![],
metric: None,
});
state.borrow_mut()
};
attr!(scope, metrics_export.resource_attributes, key, value);
}
#[op2(fast)]
fn op_otel_metrics_resource_attribute2<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key1: v8::Local<'s, v8::Value>,
value1: v8::Local<'s, v8::Value>,
key2: v8::Local<'s, v8::Value>,
value2: v8::Local<'s, v8::Value>,
) {
let metrics_export = if let Some(metrics_export) =
state.try_borrow_mut::<TemporaryMetricsExport>()
{
metrics_export.resource_attributes.reserve_exact(
(capacity as usize) - metrics_export.resource_attributes.capacity(),
);
metrics_export
} else {
state.put(TemporaryMetricsExport {
resource_attributes: Vec::with_capacity(capacity as usize),
scope_metrics: vec![],
metric: None,
});
state.borrow_mut()
};
attr!(scope, metrics_export.resource_attributes, key1, value1);
attr!(scope, metrics_export.resource_attributes, key2, value2);
}
#[allow(clippy::too_many_arguments)]
#[op2(fast)]
fn op_otel_metrics_resource_attribute3<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key1: v8::Local<'s, v8::Value>,
value1: v8::Local<'s, v8::Value>,
key2: v8::Local<'s, v8::Value>,
value2: v8::Local<'s, v8::Value>,
key3: v8::Local<'s, v8::Value>,
value3: v8::Local<'s, v8::Value>,
) {
let metrics_export = if let Some(metrics_export) =
state.try_borrow_mut::<TemporaryMetricsExport>()
{
metrics_export.resource_attributes.reserve_exact(
(capacity as usize) - metrics_export.resource_attributes.capacity(),
);
metrics_export
} else {
state.put(TemporaryMetricsExport {
resource_attributes: Vec::with_capacity(capacity as usize),
scope_metrics: vec![],
metric: None,
});
state.borrow_mut()
};
attr!(scope, metrics_export.resource_attributes, key1, value1);
attr!(scope, metrics_export.resource_attributes, key2, value2);
attr!(scope, metrics_export.resource_attributes, key3, value3);
}
#[op2(fast)]
fn op_otel_metrics_scope<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
name: v8::Local<'s, v8::Value>,
schema_url: v8::Local<'s, v8::Value>,
version: v8::Local<'s, v8::Value>,
) {
let name = owned_string(scope, name.cast());
let scope_builder = opentelemetry::InstrumentationScope::builder(name);
let scope_builder = if schema_url.is_null_or_undefined() {
scope_builder
} else {
scope_builder.with_schema_url(owned_string(scope, schema_url.cast()))
};
let scope_builder = if version.is_null_or_undefined() {
scope_builder
} else {
scope_builder.with_version(owned_string(scope, version.cast()))
};
let scope = scope_builder.build();
let scope_metric = ScopeMetrics {
scope,
metrics: vec![],
};
match state.try_borrow_mut::<TemporaryMetricsExport>() {
Some(temp) => {
if let Some(current_metric) = temp.metric.take() {
let metric = Metric::from(current_metric);
temp.scope_metrics.last_mut().unwrap().metrics.push(metric);
}
temp.scope_metrics.push(scope_metric);
}
None => {
state.put(TemporaryMetricsExport {
resource_attributes: vec![],
scope_metrics: vec![scope_metric],
metric: None,
});
}
}
}
#[op2(fast)]
fn op_otel_metrics_sum<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
name: v8::Local<'s, v8::Value>,
description: v8::Local<'s, v8::Value>,
unit: v8::Local<'s, v8::Value>,
#[smi] temporality: u8,
is_monotonic: bool,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(current_metric) = temp.metric.take() {
let metric = Metric::from(current_metric);
temp.scope_metrics.last_mut().unwrap().metrics.push(metric);
}
let name = owned_string(scope, name.cast());
let description = owned_string(scope, description.cast());
let unit = owned_string(scope, unit.cast());
let temporality = match temporality {
0 => Temporality::Delta,
1 => Temporality::Cumulative,
_ => return,
};
let sum = opentelemetry_sdk::metrics::data::Sum {
data_points: vec![],
temporality,
is_monotonic,
};
temp.metric = Some(TemporaryMetric {
name,
description,
unit,
data: TemporaryMetricData::Sum(sum),
});
}
#[op2(fast)]
fn op_otel_metrics_gauge<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
name: v8::Local<'s, v8::Value>,
description: v8::Local<'s, v8::Value>,
unit: v8::Local<'s, v8::Value>,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(current_metric) = temp.metric.take() {
let metric = Metric::from(current_metric);
temp.scope_metrics.last_mut().unwrap().metrics.push(metric);
}
let name = owned_string(scope, name.cast());
let description = owned_string(scope, description.cast());
let unit = owned_string(scope, unit.cast());
let gauge = opentelemetry_sdk::metrics::data::Gauge {
data_points: vec![],
};
temp.metric = Some(TemporaryMetric {
name,
description,
unit,
data: TemporaryMetricData::Gauge(gauge),
});
}
#[op2(fast)]
fn op_otel_metrics_sum_or_gauge_data_point(
state: &mut OpState,
value: f64,
start_time: f64,
time: f64,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
let start_time = SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs_f64(start_time))
.unwrap();
let time = SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs_f64(time))
.unwrap();
let data_point = opentelemetry_sdk::metrics::data::DataPoint {
value,
start_time: Some(start_time),
time: Some(time),
attributes: vec![],
exemplars: vec![],
};
match &mut temp.metric {
Some(TemporaryMetric {
data: TemporaryMetricData::Sum(sum),
..
}) => sum.data_points.push(data_point),
Some(TemporaryMetric {
data: TemporaryMetricData::Gauge(gauge),
..
}) => gauge.data_points.push(data_point),
_ => {}
}
}
#[op2(fast)]
fn op_otel_metrics_histogram<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
name: v8::Local<'s, v8::Value>,
description: v8::Local<'s, v8::Value>,
unit: v8::Local<'s, v8::Value>,
#[smi] temporality: u8,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(current_metric) = temp.metric.take() {
let metric = Metric::from(current_metric);
temp.scope_metrics.last_mut().unwrap().metrics.push(metric);
}
let name = owned_string(scope, name.cast());
let description = owned_string(scope, description.cast());
let unit = owned_string(scope, unit.cast());
let temporality = match temporality {
0 => Temporality::Delta,
1 => Temporality::Cumulative,
_ => return,
};
let histogram = opentelemetry_sdk::metrics::data::Histogram {
data_points: vec![],
temporality,
};
temp.metric = Some(TemporaryMetric {
name,
description,
unit,
data: TemporaryMetricData::Histogram(histogram),
});
}
#[allow(clippy::too_many_arguments)]
#[op2(fast)]
fn op_otel_metrics_histogram_data_point(
state: &mut OpState,
#[number] count: u64,
min: f64,
max: f64,
sum: f64,
start_time: f64,
time: f64,
#[smi] buckets: u32,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
let min = if min.is_nan() { None } else { Some(min) };
let max = if max.is_nan() { None } else { Some(max) };
let start_time = SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs_f64(start_time))
.unwrap();
let time = SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs_f64(time))
.unwrap();
let data_point = opentelemetry_sdk::metrics::data::HistogramDataPoint {
bounds: Vec::with_capacity(buckets as usize),
bucket_counts: Vec::with_capacity((buckets as usize) + 1),
count,
sum,
min,
max,
start_time,
time,
attributes: vec![],
exemplars: vec![],
};
if let Some(TemporaryMetric {
data: TemporaryMetricData::Histogram(histogram),
..
}) = &mut temp.metric
{
histogram.data_points.push(data_point);
}
}
#[op2(fast)]
fn op_otel_metrics_histogram_data_point_entry_final(
state: &mut OpState,
#[number] count1: u64,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(TemporaryMetric {
data: TemporaryMetricData::Histogram(histogram),
..
}) = &mut temp.metric
{
histogram
.data_points
.last_mut()
.unwrap()
.bucket_counts
.push(count1)
}
}
#[op2(fast)]
fn op_otel_metrics_histogram_data_point_entry1(
state: &mut OpState,
#[number] count1: u64,
bound1: f64,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(TemporaryMetric {
data: TemporaryMetricData::Histogram(histogram),
..
}) = &mut temp.metric
{
let data_point = histogram.data_points.last_mut().unwrap();
data_point.bucket_counts.push(count1);
data_point.bounds.push(bound1);
}
}
#[op2(fast)]
fn op_otel_metrics_histogram_data_point_entry2(
state: &mut OpState,
#[number] count1: u64,
bound1: f64,
#[number] count2: u64,
bound2: f64,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(TemporaryMetric {
data: TemporaryMetricData::Histogram(histogram),
..
}) = &mut temp.metric
{
let data_point = histogram.data_points.last_mut().unwrap();
data_point.bucket_counts.push(count1);
data_point.bounds.push(bound1);
data_point.bucket_counts.push(count2);
data_point.bounds.push(bound2);
}
}
#[op2(fast)]
fn op_otel_metrics_histogram_data_point_entry3(
state: &mut OpState,
#[number] count1: u64,
bound1: f64,
#[number] count2: u64,
bound2: f64,
#[number] count3: u64,
bound3: f64,
) {
let Some(temp) = state.try_borrow_mut::<TemporaryMetricsExport>() else {
return;
};
if let Some(TemporaryMetric {
data: TemporaryMetricData::Histogram(histogram),
..
}) = &mut temp.metric
{
let data_point = histogram.data_points.last_mut().unwrap();
data_point.bucket_counts.push(count1);
data_point.bounds.push(bound1);
data_point.bucket_counts.push(count2);
data_point.bounds.push(bound2);
data_point.bucket_counts.push(count3);
data_point.bounds.push(bound3);
}
}
#[op2(fast)]
fn op_otel_metrics_data_point_attribute<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key: v8::Local<'s, v8::Value>,
value: v8::Local<'s, v8::Value>,
) {
if let Some(TemporaryMetricsExport {
metric: Some(metric),
..
}) = state.try_borrow_mut::<TemporaryMetricsExport>()
{
let attributes = match &mut metric.data {
TemporaryMetricData::Sum(sum) => {
&mut sum.data_points.last_mut().unwrap().attributes
}
TemporaryMetricData::Gauge(gauge) => {
&mut gauge.data_points.last_mut().unwrap().attributes
}
TemporaryMetricData::Histogram(histogram) => {
&mut histogram.data_points.last_mut().unwrap().attributes
}
};
attributes.reserve_exact((capacity as usize) - attributes.capacity());
attr!(scope, attributes, key, value);
}
}
#[op2(fast)]
fn op_otel_metrics_data_point_attribute2<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key1: v8::Local<'s, v8::Value>,
value1: v8::Local<'s, v8::Value>,
key2: v8::Local<'s, v8::Value>,
value2: v8::Local<'s, v8::Value>,
) {
if let Some(TemporaryMetricsExport {
metric: Some(metric),
..
}) = state.try_borrow_mut::<TemporaryMetricsExport>()
{
let attributes = match &mut metric.data {
TemporaryMetricData::Sum(sum) => {
&mut sum.data_points.last_mut().unwrap().attributes
}
TemporaryMetricData::Gauge(gauge) => {
&mut gauge.data_points.last_mut().unwrap().attributes
}
TemporaryMetricData::Histogram(histogram) => {
&mut histogram.data_points.last_mut().unwrap().attributes
}
};
attributes.reserve_exact((capacity as usize) - attributes.capacity());
attr!(scope, attributes, key1, value1);
attr!(scope, attributes, key2, value2);
}
}
#[allow(clippy::too_many_arguments)]
#[op2(fast)]
fn op_otel_metrics_data_point_attribute3<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key1: v8::Local<'s, v8::Value>,
value1: v8::Local<'s, v8::Value>,
key2: v8::Local<'s, v8::Value>,
value2: v8::Local<'s, v8::Value>,
key3: v8::Local<'s, v8::Value>,
value3: v8::Local<'s, v8::Value>,
) {
if let Some(TemporaryMetricsExport {
metric: Some(metric),
..
}) = state.try_borrow_mut::<TemporaryMetricsExport>()
{
let attributes = match &mut metric.data {
TemporaryMetricData::Sum(sum) => {
&mut sum.data_points.last_mut().unwrap().attributes
}
TemporaryMetricData::Gauge(gauge) => {
&mut gauge.data_points.last_mut().unwrap().attributes
}
TemporaryMetricData::Histogram(histogram) => {
&mut histogram.data_points.last_mut().unwrap().attributes
}
};
attributes.reserve_exact((capacity as usize) - attributes.capacity());
attr!(scope, attributes, key1, value1);
attr!(scope, attributes, key2, value2);
attr!(scope, attributes, key3, value3);
}
}
#[op2(fast)]
fn op_otel_metrics_submit(state: &mut OpState) {
let Some(mut temp) = state.try_take::<TemporaryMetricsExport>() else {
return;
};
let Some(Processors { metrics, .. }) = OTEL_PROCESSORS.get() else {
return;
};
if let Some(current_metric) = temp.metric {
let metric = Metric::from(current_metric);
temp.scope_metrics.last_mut().unwrap().metrics.push(metric);
}
let resource = Resource::new(temp.resource_attributes);
let scope_metrics = temp.scope_metrics;
metrics.submit(ResourceMetrics {
resource,
scope_metrics,
});
} }

View file

@ -7,6 +7,23 @@ import {
op_otel_instrumentation_scope_enter, op_otel_instrumentation_scope_enter,
op_otel_instrumentation_scope_enter_builtin, op_otel_instrumentation_scope_enter_builtin,
op_otel_log, op_otel_log,
op_otel_metrics_data_point_attribute,
op_otel_metrics_data_point_attribute2,
op_otel_metrics_data_point_attribute3,
op_otel_metrics_gauge,
op_otel_metrics_histogram,
op_otel_metrics_histogram_data_point,
op_otel_metrics_histogram_data_point_entry1,
op_otel_metrics_histogram_data_point_entry2,
op_otel_metrics_histogram_data_point_entry3,
op_otel_metrics_histogram_data_point_entry_final,
op_otel_metrics_resource_attribute,
op_otel_metrics_resource_attribute2,
op_otel_metrics_resource_attribute3,
op_otel_metrics_scope,
op_otel_metrics_submit,
op_otel_metrics_sum,
op_otel_metrics_sum_or_gauge_data_point,
op_otel_span_attribute, op_otel_span_attribute,
op_otel_span_attribute2, op_otel_span_attribute2,
op_otel_span_attribute3, op_otel_span_attribute3,
@ -186,7 +203,7 @@ const instrumentationScopes = new SafeWeakMap<
>(); >();
let activeInstrumentationLibrary: WeakRef<InstrumentationLibrary> | null = null; let activeInstrumentationLibrary: WeakRef<InstrumentationLibrary> | null = null;
function submit( function submitSpan(
spanId: string | Uint8Array, spanId: string | Uint8Array,
traceId: string | Uint8Array, traceId: string | Uint8Array,
traceFlags: number, traceFlags: number,
@ -411,7 +428,7 @@ export class Span {
endSpan = (span: Span) => { endSpan = (span: Span) => {
const endTime = now(); const endTime = now();
submit( submitSpan(
span.#spanId, span.#spanId,
span.#traceId, span.#traceId,
span.#traceFlags, span.#traceFlags,
@ -571,7 +588,7 @@ class SpanExporter {
for (let i = 0; i < spans.length; i += 1) { for (let i = 0; i < spans.length; i += 1) {
const span = spans[i]; const span = spans[i];
const context = span.spanContext(); const context = span.spanContext();
submit( submitSpan(
context.spanId, context.spanId,
context.traceId, context.traceId,
context.traceFlags, context.traceFlags,
@ -671,6 +688,262 @@ class ContextManager {
} }
} }
function attributeValue(value: IAnyValue) {
return value.boolValue ?? value.stringValue ?? value.doubleValue ??
value.intValue;
}
function submitMetrics(resource, scopeMetrics) {
let i = 0;
while (i < resource.attributes.length) {
if (i + 2 < resource.attributes.length) {
op_otel_metrics_resource_attribute3(
resource.attributes.length,
resource.attributes[i].key,
attributeValue(resource.attributes[i].value),
resource.attributes[i + 1].key,
attributeValue(resource.attributes[i + 1].value),
resource.attributes[i + 2].key,
attributeValue(resource.attributes[i + 2].value),
);
i += 3;
} else if (i + 1 < resource.attributes.length) {
op_otel_metrics_resource_attribute2(
resource.attributes.length,
resource.attributes[i].key,
attributeValue(resource.attributes[i].value),
resource.attributes[i + 1].key,
attributeValue(resource.attributes[i + 1].value),
);
i += 2;
} else {
op_otel_metrics_resource_attribute(
resource.attributes.length,
resource.attributes[i].key,
attributeValue(resource.attributes[i].value),
);
i += 1;
}
}
for (let smi = 0; smi < scopeMetrics.length; smi += 1) {
const { scope, metrics } = scopeMetrics[smi];
op_otel_metrics_scope(scope.name, scope.schemaUrl, scope.version);
for (let mi = 0; mi < metrics.length; mi += 1) {
const metric = metrics[mi];
switch (metric.dataPointType) {
case 3:
op_otel_metrics_sum(
metric.descriptor.name,
// deno-lint-ignore prefer-primordials
metric.descriptor.description,
metric.descriptor.unit,
metric.aggregationTemporality,
metric.isMonotonic,
);
for (let di = 0; di < metric.dataPoints.length; di += 1) {
const dataPoint = metric.dataPoints[di];
op_otel_metrics_sum_or_gauge_data_point(
dataPoint.value,
hrToSecs(dataPoint.startTime),
hrToSecs(dataPoint.endTime),
);
const attributes = ObjectEntries(dataPoint.attributes);
let i = 0;
while (i < attributes.length) {
if (i + 2 < attributes.length) {
op_otel_metrics_data_point_attribute3(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
attributes[i + 2][0],
attributes[i + 2][1],
);
i += 3;
} else if (i + 1 < attributes.length) {
op_otel_metrics_data_point_attribute2(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
);
i += 2;
} else {
op_otel_metrics_data_point_attribute(
attributes.length,
attributes[i][0],
attributes[i][1],
);
i += 1;
}
}
}
break;
case 2:
op_otel_metrics_gauge(
metric.descriptor.name,
// deno-lint-ignore prefer-primordials
metric.descriptor.description,
metric.descriptor.unit,
);
for (let di = 0; di < metric.dataPoints.length; di += 1) {
const dataPoint = metric.dataPoints[di];
op_otel_metrics_sum_or_gauge_data_point(
dataPoint.value,
hrToSecs(dataPoint.startTime),
hrToSecs(dataPoint.endTime),
);
const attributes = ObjectEntries(dataPoint.attributes);
let i = 0;
while (i < attributes.length) {
if (i + 2 < attributes.length) {
op_otel_metrics_data_point_attribute3(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
attributes[i + 2][0],
attributes[i + 2][1],
);
i += 3;
} else if (i + 1 < attributes.length) {
op_otel_metrics_data_point_attribute2(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
);
i += 2;
} else {
op_otel_metrics_data_point_attribute(
attributes.length,
attributes[i][0],
attributes[i][1],
);
i += 1;
}
}
}
break;
case 0:
op_otel_metrics_histogram(
metric.descriptor.name,
// deno-lint-ignore prefer-primordials
metric.descriptor.description,
metric.descriptor.unit,
metric.aggregationTemporality,
);
for (let di = 0; di < metric.dataPoints.length; di += 1) {
const dataPoint = metric.dataPoints[di];
const { boundaries, counts } = dataPoint.value.buckets;
op_otel_metrics_histogram_data_point(
dataPoint.value.count,
dataPoint.value.min ?? NaN,
dataPoint.value.max ?? NaN,
dataPoint.value.sum,
hrToSecs(dataPoint.startTime),
hrToSecs(dataPoint.endTime),
boundaries.length,
);
let j = 0;
while (j < boundaries.length) {
if (j + 3 < boundaries.length) {
op_otel_metrics_histogram_data_point_entry3(
counts[j],
boundaries[j],
counts[j + 1],
boundaries[j + 1],
counts[j + 2],
boundaries[j + 2],
);
j += 3;
} else if (j + 2 < boundaries.length) {
op_otel_metrics_histogram_data_point_entry2(
counts[j],
boundaries[j],
counts[j + 1],
boundaries[j + 1],
);
j += 2;
} else {
op_otel_metrics_histogram_data_point_entry1(
counts[j],
boundaries[j],
);
j += 1;
}
}
op_otel_metrics_histogram_data_point_entry_final(counts[j]);
const attributes = ObjectEntries(dataPoint.attributes);
let i = 0;
while (i < attributes.length) {
if (i + 2 < attributes.length) {
op_otel_metrics_data_point_attribute3(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
attributes[i + 2][0],
attributes[i + 2][1],
);
i += 3;
} else if (i + 1 < attributes.length) {
op_otel_metrics_data_point_attribute2(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
);
i += 2;
} else {
op_otel_metrics_data_point_attribute(
attributes.length,
attributes[i][0],
attributes[i][1],
);
i += 1;
}
}
}
break;
default:
continue;
}
}
}
op_otel_metrics_submit();
}
class MetricExporter {
export(metrics, resultCallback: (result: ExportResult) => void) {
try {
submitMetrics(metrics.resource, metrics.scopeMetrics);
resultCallback({ code: 0 });
} catch (error) {
resultCallback({
code: 1,
error: ObjectPrototypeIsPrototypeOf(error, Error)
? error as Error
: new Error(String(error)),
});
}
}
async forceFlush() {}
async shutdown() {}
}
const otelConsoleConfig = { const otelConsoleConfig = {
ignore: 0, ignore: 0,
capture: 1, capture: 1,
@ -708,4 +981,5 @@ export function bootstrap(
export const telemetry = { export const telemetry = {
SpanExporter, SpanExporter,
ContextManager, ContextManager,
MetricExporter,
}; };

View file

@ -271,7 +271,7 @@ function addPaddingToBase64url(base64url) {
if (base64url.length % 4 === 2) return base64url + "=="; if (base64url.length % 4 === 2) return base64url + "==";
if (base64url.length % 4 === 3) return base64url + "="; if (base64url.length % 4 === 3) return base64url + "=";
if (base64url.length % 4 === 1) { if (base64url.length % 4 === 1) {
throw new TypeError("Illegal base64url string!"); throw new TypeError("Illegal base64url string");
} }
return base64url; return base64url;
} }
@ -382,7 +382,7 @@ function assert(cond, msg = "Assertion failed.") {
function serializeJSValueToJSONString(value) { function serializeJSValueToJSONString(value) {
const result = JSONStringify(value); const result = JSONStringify(value);
if (result === undefined) { if (result === undefined) {
throw new TypeError("Value is not JSON serializable."); throw new TypeError("Value is not JSON serializable");
} }
return result; return result;
} }
@ -429,7 +429,7 @@ function pathFromURLWin32(url) {
*/ */
function pathFromURLPosix(url) { function pathFromURLPosix(url) {
if (url.hostname !== "") { if (url.hostname !== "") {
throw new TypeError(`Host must be empty.`); throw new TypeError("Host must be empty");
} }
return decodeURIComponent( return decodeURIComponent(
@ -444,7 +444,7 @@ function pathFromURLPosix(url) {
function pathFromURL(pathOrUrl) { function pathFromURL(pathOrUrl) {
if (ObjectPrototypeIsPrototypeOf(URLPrototype, pathOrUrl)) { if (ObjectPrototypeIsPrototypeOf(URLPrototype, pathOrUrl)) {
if (pathOrUrl.protocol != "file:") { if (pathOrUrl.protocol != "file:") {
throw new TypeError("Must be a file URL."); throw new TypeError("Must be a file URL");
} }
return core.build.os == "windows" return core.build.os == "windows"

View file

@ -1031,11 +1031,11 @@ class EventTarget {
} }
if (getDispatched(event)) { if (getDispatched(event)) {
throw new DOMException("Invalid event state.", "InvalidStateError"); throw new DOMException("Invalid event state", "InvalidStateError");
} }
if (event.eventPhase !== Event.NONE) { if (event.eventPhase !== Event.NONE) {
throw new DOMException("Invalid event state.", "InvalidStateError"); throw new DOMException("Invalid event state", "InvalidStateError");
} }
return dispatch(self, event); return dispatch(self, event);

View file

@ -196,7 +196,7 @@ class AbortSignal extends EventTarget {
constructor(key = null) { constructor(key = null) {
if (key !== illegalConstructorKey) { if (key !== illegalConstructorKey) {
throw new TypeError("Illegal constructor."); throw new TypeError("Illegal constructor");
} }
super(); super();
} }

View file

@ -16,7 +16,7 @@ const illegalConstructorKey = Symbol("illegalConstructorKey");
class Window extends EventTarget { class Window extends EventTarget {
constructor(key = null) { constructor(key = null) {
if (key !== illegalConstructorKey) { if (key !== illegalConstructorKey) {
throw new TypeError("Illegal constructor."); throw new TypeError("Illegal constructor");
} }
super(); super();
} }
@ -29,7 +29,7 @@ class Window extends EventTarget {
class WorkerGlobalScope extends EventTarget { class WorkerGlobalScope extends EventTarget {
constructor(key = null) { constructor(key = null) {
if (key != illegalConstructorKey) { if (key != illegalConstructorKey) {
throw new TypeError("Illegal constructor."); throw new TypeError("Illegal constructor");
} }
super(); super();
} }
@ -42,7 +42,7 @@ class WorkerGlobalScope extends EventTarget {
class DedicatedWorkerGlobalScope extends WorkerGlobalScope { class DedicatedWorkerGlobalScope extends WorkerGlobalScope {
constructor(key = null) { constructor(key = null) {
if (key != illegalConstructorKey) { if (key != illegalConstructorKey) {
throw new TypeError("Illegal constructor."); throw new TypeError("Illegal constructor");
} }
super(); super();
} }

View file

@ -50,7 +50,7 @@ function btoa(data) {
} catch (e) { } catch (e) {
if (ObjectPrototypeIsPrototypeOf(TypeErrorPrototype, e)) { if (ObjectPrototypeIsPrototypeOf(TypeErrorPrototype, e)) {
throw new DOMException( throw new DOMException(
"The string to be encoded contains characters outside of the Latin1 range.", "Cannot encode string: string contains characters outside of the Latin1 range",
"InvalidCharacterError", "InvalidCharacterError",
); );
} }

View file

@ -523,10 +523,14 @@ function dequeueValue(container) {
function enqueueValueWithSize(container, value, size) { function enqueueValueWithSize(container, value, size) {
assert(container[_queue] && typeof container[_queueTotalSize] === "number"); assert(container[_queue] && typeof container[_queueTotalSize] === "number");
if (isNonNegativeNumber(size) === false) { if (isNonNegativeNumber(size) === false) {
throw new RangeError("chunk size isn't a positive number"); throw new RangeError(
"Cannot enqueue value with size: chunk size must be a positive number",
);
} }
if (size === Infinity) { if (size === Infinity) {
throw new RangeError("chunk size is invalid"); throw new RangeError(
"Cannot enqueue value with size: chunk size is invalid",
);
} }
container[_queue].enqueue({ value, size }); container[_queue].enqueue({ value, size });
container[_queueTotalSize] += size; container[_queueTotalSize] += size;
@ -1097,7 +1101,7 @@ async function readableStreamCollectIntoUint8Array(stream) {
if (TypedArrayPrototypeGetSymbolToStringTag(chunk) !== "Uint8Array") { if (TypedArrayPrototypeGetSymbolToStringTag(chunk) !== "Uint8Array") {
throw new TypeError( throw new TypeError(
"Can't convert value to Uint8Array while consuming the stream", "Cannot convert value to Uint8Array while consuming the stream",
); );
} }
@ -1347,7 +1351,7 @@ function readableByteStreamControllerEnqueue(controller, chunk) {
if (isDetachedBuffer(buffer)) { if (isDetachedBuffer(buffer)) {
throw new TypeError( throw new TypeError(
"chunk's buffer is detached and so cannot be enqueued", "Chunk's buffer is detached and so cannot be enqueued",
); );
} }
const transferredBuffer = ArrayBufferPrototypeTransferToFixedLength(buffer); const transferredBuffer = ArrayBufferPrototypeTransferToFixedLength(buffer);
@ -2095,14 +2099,14 @@ function readableByteStreamControllerRespond(controller, bytesWritten) {
if (state === "closed") { if (state === "closed") {
if (bytesWritten !== 0) { if (bytesWritten !== 0) {
throw new TypeError( throw new TypeError(
"bytesWritten must be 0 when calling respond() on a closed stream", `"bytesWritten" must be 0 when calling respond() on a closed stream: received ${bytesWritten}`,
); );
} }
} else { } else {
assert(state === "readable"); assert(state === "readable");
if (bytesWritten === 0) { if (bytesWritten === 0) {
throw new TypeError( throw new TypeError(
"bytesWritten must be greater than 0 when calling respond() on a readable stream", '"bytesWritten" must be greater than 0 when calling respond() on a readable stream',
); );
} }
if ( if (
@ -2110,7 +2114,7 @@ function readableByteStreamControllerRespond(controller, bytesWritten) {
// deno-lint-ignore prefer-primordials // deno-lint-ignore prefer-primordials
firstDescriptor.byteLength firstDescriptor.byteLength
) { ) {
throw new RangeError("bytesWritten out of range"); throw new RangeError('"bytesWritten" out of range');
} }
} }
firstDescriptor.buffer = ArrayBufferPrototypeTransferToFixedLength( firstDescriptor.buffer = ArrayBufferPrototypeTransferToFixedLength(
@ -2305,7 +2309,7 @@ function readableByteStreamControllerRespondWithNewView(controller, view) {
if (state === "closed") { if (state === "closed") {
if (byteLength !== 0) { if (byteLength !== 0) {
throw new TypeError( throw new TypeError(
"The view's length must be 0 when calling respondWithNewView() on a closed stream", `The view's length must be 0 when calling respondWithNewView() on a closed stream: received ${byteLength}`,
); );
} }
} else { } else {
@ -3577,7 +3581,7 @@ function setUpReadableByteStreamControllerFromUnderlyingSource(
} }
const autoAllocateChunkSize = underlyingSourceDict["autoAllocateChunkSize"]; const autoAllocateChunkSize = underlyingSourceDict["autoAllocateChunkSize"];
if (autoAllocateChunkSize === 0) { if (autoAllocateChunkSize === 0) {
throw new TypeError("autoAllocateChunkSize must be greater than 0"); throw new TypeError('"autoAllocateChunkSize" must be greater than 0');
} }
setUpReadableByteStreamController( setUpReadableByteStreamController(
stream, stream,
@ -3706,7 +3710,7 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource(
*/ */
function setUpReadableStreamBYOBReader(reader, stream) { function setUpReadableStreamBYOBReader(reader, stream) {
if (isReadableStreamLocked(stream)) { if (isReadableStreamLocked(stream)) {
throw new TypeError("ReadableStream is locked."); throw new TypeError("ReadableStream is locked");
} }
if ( if (
!(ObjectPrototypeIsPrototypeOf( !(ObjectPrototypeIsPrototypeOf(
@ -3727,7 +3731,7 @@ function setUpReadableStreamBYOBReader(reader, stream) {
*/ */
function setUpReadableStreamDefaultReader(reader, stream) { function setUpReadableStreamDefaultReader(reader, stream) {
if (isReadableStreamLocked(stream)) { if (isReadableStreamLocked(stream)) {
throw new TypeError("ReadableStream is locked."); throw new TypeError("ReadableStream is locked");
} }
readableStreamReaderGenericInitialize(reader, stream); readableStreamReaderGenericInitialize(reader, stream);
reader[_readRequests] = new Queue(); reader[_readRequests] = new Queue();
@ -3961,7 +3965,7 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink(
*/ */
function setUpWritableStreamDefaultWriter(writer, stream) { function setUpWritableStreamDefaultWriter(writer, stream) {
if (isWritableStreamLocked(stream) === true) { if (isWritableStreamLocked(stream) === true) {
throw new TypeError("The stream is already locked."); throw new TypeError("The stream is already locked");
} }
writer[_stream] = stream; writer[_stream] = stream;
stream[_writer] = writer; stream[_writer] = writer;
@ -4019,7 +4023,7 @@ function transformStreamDefaultControllerEnqueue(controller, chunk) {
/** @type {ReadableStreamDefaultController<O>} */ readableController, /** @type {ReadableStreamDefaultController<O>} */ readableController,
) === false ) === false
) { ) {
throw new TypeError("Readable stream is unavailable."); throw new TypeError("Readable stream is unavailable");
} }
try { try {
readableStreamDefaultControllerEnqueue( readableStreamDefaultControllerEnqueue(
@ -5143,7 +5147,7 @@ class ReadableStream {
if (underlyingSourceDict.type === "bytes") { if (underlyingSourceDict.type === "bytes") {
if (strategy.size !== undefined) { if (strategy.size !== undefined) {
throw new RangeError( throw new RangeError(
`${prefix}: When underlying source is "bytes", strategy.size must be undefined.`, `${prefix}: When underlying source is "bytes", strategy.size must be 'undefined'`,
); );
} }
const highWaterMark = extractHighWaterMark(strategy, 0); const highWaterMark = extractHighWaterMark(strategy, 0);
@ -5273,10 +5277,10 @@ class ReadableStream {
const { readable, writable } = transform; const { readable, writable } = transform;
const { preventClose, preventAbort, preventCancel, signal } = options; const { preventClose, preventAbort, preventCancel, signal } = options;
if (isReadableStreamLocked(this)) { if (isReadableStreamLocked(this)) {
throw new TypeError("ReadableStream is already locked."); throw new TypeError("ReadableStream is already locked");
} }
if (isWritableStreamLocked(writable)) { if (isWritableStreamLocked(writable)) {
throw new TypeError("Target WritableStream is already locked."); throw new TypeError("Target WritableStream is already locked");
} }
const promise = readableStreamPipeTo( const promise = readableStreamPipeTo(
this, this,
@ -5814,7 +5818,7 @@ class ReadableByteStreamController {
} }
if (this[_stream][_state] !== "readable") { if (this[_stream][_state] !== "readable") {
throw new TypeError( throw new TypeError(
"ReadableByteStreamController's stream is not in a readable state.", "ReadableByteStreamController's stream is not in a readable state",
); );
} }
readableByteStreamControllerClose(this); readableByteStreamControllerClose(this);
@ -5846,7 +5850,7 @@ class ReadableByteStreamController {
if (byteLength === 0) { if (byteLength === 0) {
throw webidl.makeException( throw webidl.makeException(
TypeError, TypeError,
"length must be non-zero", "Length must be non-zero",
prefix, prefix,
arg1, arg1,
); );
@ -5854,19 +5858,19 @@ class ReadableByteStreamController {
if (getArrayBufferByteLength(buffer) === 0) { if (getArrayBufferByteLength(buffer) === 0) {
throw webidl.makeException( throw webidl.makeException(
TypeError, TypeError,
"buffer length must be non-zero", "Buffer length must be non-zero",
prefix, prefix,
arg1, arg1,
); );
} }
if (this[_closeRequested] === true) { if (this[_closeRequested] === true) {
throw new TypeError( throw new TypeError(
"Cannot enqueue chunk after a close has been requested.", "Cannot enqueue chunk after a close has been requested",
); );
} }
if (this[_stream][_state] !== "readable") { if (this[_stream][_state] !== "readable") {
throw new TypeError( throw new TypeError(
"Cannot enqueue chunk when underlying stream is not readable.", "Cannot enqueue chunk when underlying stream is not readable",
); );
} }
return readableByteStreamControllerEnqueue(this, chunk); return readableByteStreamControllerEnqueue(this, chunk);
@ -6006,7 +6010,7 @@ class ReadableStreamDefaultController {
close() { close() {
webidl.assertBranded(this, ReadableStreamDefaultControllerPrototype); webidl.assertBranded(this, ReadableStreamDefaultControllerPrototype);
if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) { if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) {
throw new TypeError("The stream controller cannot close or enqueue."); throw new TypeError("The stream controller cannot close or enqueue");
} }
readableStreamDefaultControllerClose(this); readableStreamDefaultControllerClose(this);
} }
@ -6021,7 +6025,7 @@ class ReadableStreamDefaultController {
chunk = webidl.converters.any(chunk); chunk = webidl.converters.any(chunk);
} }
if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) { if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) {
throw new TypeError("The stream controller cannot close or enqueue."); throw new TypeError("The stream controller cannot close or enqueue");
} }
readableStreamDefaultControllerEnqueue(this, chunk); readableStreamDefaultControllerEnqueue(this, chunk);
} }
@ -6146,12 +6150,12 @@ class TransformStream {
); );
if (transformerDict.readableType !== undefined) { if (transformerDict.readableType !== undefined) {
throw new RangeError( throw new RangeError(
`${prefix}: readableType transformers not supported.`, `${prefix}: readableType transformers not supported`,
); );
} }
if (transformerDict.writableType !== undefined) { if (transformerDict.writableType !== undefined) {
throw new RangeError( throw new RangeError(
`${prefix}: writableType transformers not supported.`, `${prefix}: writableType transformers not supported`,
); );
} }
const readableHighWaterMark = extractHighWaterMark(readableStrategy, 0); const readableHighWaterMark = extractHighWaterMark(readableStrategy, 0);
@ -6356,7 +6360,7 @@ class WritableStream {
); );
if (underlyingSinkDict.type != null) { if (underlyingSinkDict.type != null) {
throw new RangeError( throw new RangeError(
`${prefix}: WritableStream does not support 'type' in the underlying sink.`, `${prefix}: WritableStream does not support 'type' in the underlying sink`,
); );
} }
initializeWritableStream(this); initializeWritableStream(this);
@ -6483,7 +6487,7 @@ class WritableStreamDefaultWriter {
webidl.assertBranded(this, WritableStreamDefaultWriterPrototype); webidl.assertBranded(this, WritableStreamDefaultWriterPrototype);
if (this[_stream] === undefined) { if (this[_stream] === undefined) {
throw new TypeError( throw new TypeError(
"A writable stream is not associated with the writer.", "A writable stream is not associated with the writer",
); );
} }
return writableStreamDefaultWriterGetDesiredSize(this); return writableStreamDefaultWriterGetDesiredSize(this);

View file

@ -65,7 +65,7 @@ class FileReader extends EventTarget {
// 1. If fr's state is "loading", throw an InvalidStateError DOMException. // 1. If fr's state is "loading", throw an InvalidStateError DOMException.
if (this[state] === "loading") { if (this[state] === "loading") {
throw new DOMException( throw new DOMException(
"Invalid FileReader state.", "Invalid FileReader state",
"InvalidStateError", "InvalidStateError",
); );
} }

View file

@ -28,7 +28,7 @@ const locationConstructorKey = Symbol("locationConstructorKey");
class Location { class Location {
constructor(href = null, key = null) { constructor(href = null, key = null) {
if (key != locationConstructorKey) { if (key != locationConstructorKey) {
throw new TypeError("Illegal constructor."); throw new TypeError("Illegal constructor");
} }
const url = new URL(href); const url = new URL(href);
url.username = ""; url.username = "";
@ -41,7 +41,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.hash".`, `Cannot set "location.hash"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -54,7 +54,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.host".`, `Cannot set "location.host"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -67,7 +67,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.hostname".`, `Cannot set "location.hostname"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -80,7 +80,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.href".`, `Cannot set "location.href"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -100,7 +100,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.pathname".`, `Cannot set "location.pathname"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -113,7 +113,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.port".`, `Cannot set "location.port"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -126,7 +126,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.protocol".`, `Cannot set "location.protocol"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -139,7 +139,7 @@ class Location {
}, },
set() { set() {
throw new DOMException( throw new DOMException(
`Cannot set "location.search".`, `Cannot set "location.search"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -161,7 +161,7 @@ class Location {
__proto__: null, __proto__: null,
value: function assign() { value: function assign() {
throw new DOMException( throw new DOMException(
`Cannot call "location.assign()".`, `Cannot call "location.assign()"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -171,7 +171,7 @@ class Location {
__proto__: null, __proto__: null,
value: function reload() { value: function reload() {
throw new DOMException( throw new DOMException(
`Cannot call "location.reload()".`, `Cannot call "location.reload()"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -181,7 +181,7 @@ class Location {
__proto__: null, __proto__: null,
value: function replace() { value: function replace() {
throw new DOMException( throw new DOMException(
`Cannot call "location.replace()".`, `Cannot call "location.replace()"`,
"NotSupportedError", "NotSupportedError",
); );
}, },
@ -229,7 +229,7 @@ const workerLocationUrls = new SafeWeakMap();
class WorkerLocation { class WorkerLocation {
constructor(href = null, key = null) { constructor(href = null, key = null) {
if (key != locationConstructorKey) { if (key != locationConstructorKey) {
throw new TypeError("Illegal constructor."); throw new TypeError("Illegal constructor");
} }
const url = new URL(href); const url = new URL(href);
url.username = ""; url.username = "";
@ -244,7 +244,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.hash; return url.hash;
}, },
@ -256,7 +256,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.host; return url.host;
}, },
@ -268,7 +268,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.hostname; return url.hostname;
}, },
@ -280,7 +280,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.href; return url.href;
}, },
@ -292,7 +292,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.origin; return url.origin;
}, },
@ -304,7 +304,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.pathname; return url.pathname;
}, },
@ -316,7 +316,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.port; return url.port;
}, },
@ -328,7 +328,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.protocol; return url.protocol;
}, },
@ -340,7 +340,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
get() { get() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.search; return url.search;
}, },
@ -352,7 +352,7 @@ ObjectDefineProperties(WorkerLocation.prototype, {
value: function toString() { value: function toString() {
const url = WeakMapPrototypeGet(workerLocationUrls, this); const url = WeakMapPrototypeGet(workerLocationUrls, this);
if (url == null) { if (url == null) {
throw new TypeError("Illegal invocation."); throw new TypeError("Illegal invocation");
} }
return url.href; return url.href;
}, },
@ -414,7 +414,7 @@ const locationDescriptor = {
return location; return location;
}, },
set() { set() {
throw new DOMException(`Cannot set "location".`, "NotSupportedError"); throw new DOMException(`Cannot set "location"`, "NotSupportedError");
}, },
enumerable: true, enumerable: true,
}; };
@ -422,7 +422,7 @@ const workerLocationDescriptor = {
get() { get() {
if (workerLocation == null) { if (workerLocation == null) {
throw new Error( throw new Error(
`Assertion: "globalThis.location" must be defined in a worker.`, `Assertion: "globalThis.location" must be defined in a worker`,
); );
} }
return workerLocation; return workerLocation;

View file

@ -123,14 +123,14 @@ function convertMarkToTimestamp(mark) {
const entry = findMostRecent(mark, "mark"); const entry = findMostRecent(mark, "mark");
if (!entry) { if (!entry) {
throw new DOMException( throw new DOMException(
`Cannot find mark: "${mark}".`, `Cannot find mark: "${mark}"`,
"SyntaxError", "SyntaxError",
); );
} }
return entry.startTime; return entry.startTime;
} }
if (mark < 0) { if (mark < 0) {
throw new TypeError("Mark cannot be negative."); throw new TypeError(`Mark cannot be negative: received ${mark}`);
} }
return mark; return mark;
} }
@ -261,7 +261,9 @@ class PerformanceMark extends PerformanceEntry {
super(name, "mark", startTime, 0, illegalConstructorKey); super(name, "mark", startTime, 0, illegalConstructorKey);
this[webidl.brand] = webidl.brand; this[webidl.brand] = webidl.brand;
if (startTime < 0) { if (startTime < 0) {
throw new TypeError("startTime cannot be negative"); throw new TypeError(
`Cannot construct PerformanceMark: startTime cannot be negative, received ${startTime}`,
);
} }
this[_detail] = structuredClone(detail); this[_detail] = structuredClone(detail);
} }
@ -504,14 +506,14 @@ class Performance extends EventTarget {
ObjectKeys(startOrMeasureOptions).length > 0 ObjectKeys(startOrMeasureOptions).length > 0
) { ) {
if (endMark) { if (endMark) {
throw new TypeError("Options cannot be passed with endMark."); throw new TypeError('Options cannot be passed with "endMark"');
} }
if ( if (
!ReflectHas(startOrMeasureOptions, "start") && !ReflectHas(startOrMeasureOptions, "start") &&
!ReflectHas(startOrMeasureOptions, "end") !ReflectHas(startOrMeasureOptions, "end")
) { ) {
throw new TypeError( throw new TypeError(
"A start or end mark must be supplied in options.", 'A "start" or "end" mark must be supplied in options',
); );
} }
if ( if (
@ -520,7 +522,7 @@ class Performance extends EventTarget {
ReflectHas(startOrMeasureOptions, "end") ReflectHas(startOrMeasureOptions, "end")
) { ) {
throw new TypeError( throw new TypeError(
"Cannot specify start, end, and duration together in options.", 'Cannot specify "start", "end", and "duration" together in options',
); );
} }
} }

View file

@ -84,35 +84,35 @@ class ImageData {
if (dataLength === 0) { if (dataLength === 0) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The input data has zero elements.", "Failed to construct 'ImageData': the input data has zero elements",
"InvalidStateError", "InvalidStateError",
); );
} }
if (dataLength % 4 !== 0) { if (dataLength % 4 !== 0) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The input data length is not a multiple of 4.", `Failed to construct 'ImageData': the input data length is not a multiple of 4, received ${dataLength}`,
"InvalidStateError", "InvalidStateError",
); );
} }
if (sourceWidth < 1) { if (sourceWidth < 1) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The source width is zero or not a number.", "Failed to construct 'ImageData': the source width is zero or not a number",
"IndexSizeError", "IndexSizeError",
); );
} }
if (webidl.type(sourceHeight) !== "Undefined" && sourceHeight < 1) { if (webidl.type(sourceHeight) !== "Undefined" && sourceHeight < 1) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The source height is zero or not a number.", "Failed to construct 'ImageData': the source height is zero or not a number",
"IndexSizeError", "IndexSizeError",
); );
} }
if (dataLength / 4 % sourceWidth !== 0) { if (dataLength / 4 % sourceWidth !== 0) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The input data length is not a multiple of (4 * width).", "Failed to construct 'ImageData': the input data length is not a multiple of (4 * width)",
"IndexSizeError", "IndexSizeError",
); );
} }
@ -122,7 +122,7 @@ class ImageData {
(sourceWidth * sourceHeight * 4 !== dataLength) (sourceWidth * sourceHeight * 4 !== dataLength)
) { ) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The input data length is not equal to (4 * width * height).", "Failed to construct 'ImageData': the input data length is not equal to (4 * width * height)",
"IndexSizeError", "IndexSizeError",
); );
} }
@ -159,14 +159,14 @@ class ImageData {
if (sourceWidth < 1) { if (sourceWidth < 1) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The source width is zero or not a number.", "Failed to construct 'ImageData': the source width is zero or not a number",
"IndexSizeError", "IndexSizeError",
); );
} }
if (sourceHeight < 1) { if (sourceHeight < 1) {
throw new DOMException( throw new DOMException(
"Failed to construct 'ImageData': The source height is zero or not a number.", "Failed to construct 'ImageData': the source height is zero or not a number",
"IndexSizeError", "IndexSizeError",
); );
} }

View file

@ -179,7 +179,11 @@ impl<Fs: DenoResolverFs, TEnv: NodeResolverEnv> ByonmNpmResolver<Fs, TEnv> {
pkg_json: &PackageJson, pkg_json: &PackageJson,
) -> Option<String> { ) -> Option<String> {
let deps = pkg_json.resolve_local_package_json_deps(); let deps = pkg_json.resolve_local_package_json_deps();
for (key, value) in deps { for (key, value) in deps
.dependencies
.into_iter()
.chain(deps.dev_dependencies.into_iter())
{
if let Ok(value) = value { if let Ok(value) = value {
match value { match value {
PackageJsonDepValue::Req(dep_req) => { PackageJsonDepValue::Req(dep_req) => {

View file

@ -0,0 +1,42 @@
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
[package]
name = "deno_npm_cache"
version = "0.0.1"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "Helpers for downloading and caching npm dependencies for Deno"
[lib]
path = "lib.rs"
[dependencies]
# todo(dsherret): remove this dependency
anyhow.workspace = true
# todo(dsherret): remove this dependency
deno_core.workspace = true
async-trait.workspace = true
base64.workspace = true
boxed_error.workspace = true
deno_cache_dir.workspace = true
deno_npm.workspace = true
deno_semver.workspace = true
deno_unsync = { workspace = true, features = ["tokio"] }
faster-hex.workspace = true
flate2 = { workspace = true, features = ["zlib-ng-compat"] }
futures.workspace = true
http.workspace = true
log.workspace = true
parking_lot.workspace = true
percent-encoding.workspace = true
rand.workspace = true
ring.workspace = true
serde_json.workspace = true
tar.workspace = true
tempfile = "3.4.0"
thiserror.workspace = true
url.workspace = true

View file

@ -0,0 +1,6 @@
# deno_npm_cache
[![crates](https://img.shields.io/crates/v/deno_npm_cache.svg)](https://crates.io/crates/deno_npm_cache)
[![docs](https://docs.rs/deno_npm_cache/badge.svg)](https://docs.rs/deno_npm_cache)
Helpers for downloading and caching npm dependencies for Deno.

View file

@ -1,63 +1,133 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashSet; use std::collections::HashSet;
use std::fs;
use std::io::ErrorKind; use std::io::ErrorKind;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier; use anyhow::bail;
use anyhow::Context;
use anyhow::Error as AnyError;
use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version; use deno_semver::Version;
use http::HeaderName;
use http::HeaderValue;
use http::StatusCode;
use parking_lot::Mutex;
use url::Url;
use crate::args::CacheSetting; mod registry_info;
use crate::cache::CACHE_PERM; mod remote;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::hard_link_dir_recursive;
pub mod registry_info;
mod tarball; mod tarball;
mod tarball_extract; mod tarball_extract;
pub use registry_info::RegistryInfoDownloader; pub use registry_info::RegistryInfoProvider;
pub use tarball::TarballCache; pub use tarball::TarballCache;
// todo(#27198): make both of these private and get the rest of the code
// using RegistryInfoProvider.
pub use registry_info::get_package_url;
pub use remote::maybe_auth_header_for_npm_registry;
#[derive(Debug)]
pub struct DownloadError {
pub status_code: Option<StatusCode>,
pub error: AnyError,
}
impl std::error::Error for DownloadError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
Some(self.error.as_ref())
}
}
impl std::fmt::Display for DownloadError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.error.fmt(f)
}
}
#[async_trait::async_trait(?Send)]
pub trait NpmCacheEnv: Send + Sync + 'static {
fn exists(&self, path: &Path) -> bool;
fn hard_link_dir_recursive(
&self,
from: &Path,
to: &Path,
) -> Result<(), AnyError>;
fn atomic_write_file_with_retries(
&self,
file_path: &Path,
data: &[u8],
) -> std::io::Result<()>;
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
maybe_auth_header: Option<(HeaderName, HeaderValue)>,
) -> Result<Option<Vec<u8>>, DownloadError>;
}
/// Indicates how cached source files should be handled.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum NpmCacheSetting {
/// Only the cached files should be used. Any files not in the cache will
/// error. This is the equivalent of `--cached-only` in the CLI.
Only,
/// No cached source files should be used, and all files should be reloaded.
/// This is the equivalent of `--reload` in the CLI.
ReloadAll,
/// Only some cached resources should be used. This is the equivalent of
/// `--reload=npm:chalk`
ReloadSome { npm_package_names: Vec<String> },
/// The cached source files should be used for local modules. This is the
/// default behavior of the CLI.
Use,
}
impl NpmCacheSetting {
pub fn should_use_for_npm_package(&self, package_name: &str) -> bool {
match self {
NpmCacheSetting::ReloadAll => false,
NpmCacheSetting::ReloadSome { npm_package_names } => {
!npm_package_names.iter().any(|n| n == package_name)
}
_ => true,
}
}
}
/// Stores a single copy of npm packages in a cache. /// Stores a single copy of npm packages in a cache.
#[derive(Debug)] #[derive(Debug)]
pub struct NpmCache { pub struct NpmCache<TEnv: NpmCacheEnv> {
env: Arc<TEnv>,
cache_dir: Arc<NpmCacheDir>, cache_dir: Arc<NpmCacheDir>,
cache_setting: CacheSetting, cache_setting: NpmCacheSetting,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
/// ensures a package is only downloaded once per run
previously_reloaded_packages: Mutex<HashSet<PackageNv>>, previously_reloaded_packages: Mutex<HashSet<PackageNv>>,
} }
impl NpmCache { impl<TEnv: NpmCacheEnv> NpmCache<TEnv> {
pub fn new( pub fn new(
cache_dir: Arc<NpmCacheDir>, cache_dir: Arc<NpmCacheDir>,
cache_setting: CacheSetting, cache_setting: NpmCacheSetting,
env: Arc<TEnv>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
) -> Self { ) -> Self {
Self { Self {
cache_dir, cache_dir,
cache_setting, cache_setting,
env,
previously_reloaded_packages: Default::default(), previously_reloaded_packages: Default::default(),
npmrc, npmrc,
} }
} }
pub fn cache_setting(&self) -> &CacheSetting { pub fn cache_setting(&self) -> &NpmCacheSetting {
&self.cache_setting &self.cache_setting
} }
@ -118,7 +188,9 @@ impl NpmCache {
// it seems Windows does an "AccessDenied" error when moving a // it seems Windows does an "AccessDenied" error when moving a
// directory with hard links, so that's why this solution is done // directory with hard links, so that's why this solution is done
with_folder_sync_lock(&folder_id.nv, &package_folder, || { with_folder_sync_lock(&folder_id.nv, &package_folder, || {
hard_link_dir_recursive(&original_package_folder, &package_folder) self
.env
.hard_link_dir_recursive(&original_package_folder, &package_folder)
})?; })?;
Ok(()) Ok(())
} }
@ -158,7 +230,7 @@ impl NpmCache {
pub fn resolve_package_folder_id_from_specifier( pub fn resolve_package_folder_id_from_specifier(
&self, &self,
specifier: &ModuleSpecifier, specifier: &Url,
) -> Option<NpmPackageCacheFolderId> { ) -> Option<NpmPackageCacheFolderId> {
self self
.cache_dir .cache_dir
@ -180,7 +252,7 @@ impl NpmCache {
) -> Result<Option<NpmPackageInfo>, AnyError> { ) -> Result<Option<NpmPackageInfo>, AnyError> {
let file_cache_path = self.get_registry_package_info_file_cache_path(name); let file_cache_path = self.get_registry_package_info_file_cache_path(name);
let file_text = match fs::read_to_string(file_cache_path) { let file_text = match std::fs::read_to_string(file_cache_path) {
Ok(file_text) => file_text, Ok(file_text) => file_text,
Err(err) if err.kind() == ErrorKind::NotFound => return Ok(None), Err(err) if err.kind() == ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
@ -195,7 +267,9 @@ impl NpmCache {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let file_cache_path = self.get_registry_package_info_file_cache_path(name); let file_cache_path = self.get_registry_package_info_file_cache_path(name);
let file_text = serde_json::to_string(&package_info)?; let file_text = serde_json::to_string(&package_info)?;
atomic_write_file_with_retries(&file_cache_path, file_text, CACHE_PERM)?; self
.env
.atomic_write_file_with_retries(&file_cache_path, file_text.as_bytes())?;
Ok(()) Ok(())
} }
@ -216,7 +290,7 @@ fn with_folder_sync_lock(
output_folder: &Path, output_folder: &Path,
action: impl FnOnce() -> Result<(), AnyError>, action: impl FnOnce() -> Result<(), AnyError>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
fs::create_dir_all(output_folder).with_context(|| { std::fs::create_dir_all(output_folder).with_context(|| {
format!("Error creating '{}'.", output_folder.display()) format!("Error creating '{}'.", output_folder.display())
})?; })?;
@ -229,7 +303,7 @@ fn with_folder_sync_lock(
// then wait until the other process finishes with a timeout), but // then wait until the other process finishes with a timeout), but
// for now this is good enough. // for now this is good enough.
let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME); let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
match fs::OpenOptions::new() match std::fs::OpenOptions::new()
.write(true) .write(true)
.create(true) .create(true)
.truncate(false) .truncate(false)
@ -257,7 +331,7 @@ fn with_folder_sync_lock(
match inner(output_folder, action) { match inner(output_folder, action) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(err) => { Err(err) => {
if let Err(remove_err) = fs::remove_dir_all(output_folder) { if let Err(remove_err) = std::fs::remove_dir_all(output_folder) {
if remove_err.kind() != std::io::ErrorKind::NotFound { if remove_err.kind() != std::io::ErrorKind::NotFound {
bail!( bail!(
concat!( concat!(

View file

@ -3,28 +3,22 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use deno_core::anyhow::anyhow; use anyhow::anyhow;
use deno_core::anyhow::bail; use anyhow::bail;
use deno_core::anyhow::Context; use anyhow::Context;
use deno_core::error::custom_error; use anyhow::Error as AnyError;
use deno_core::error::AnyError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_unsync::sync::MultiRuntimeAsyncValueCreator;
use futures::future::LocalBoxFuture;
use futures::FutureExt;
use parking_lot::Mutex;
use url::Url;
use crate::args::CacheSetting; use crate::remote::maybe_auth_header_for_npm_registry;
use crate::http_util::HttpClientProvider; use crate::NpmCache;
use crate::npm::common::maybe_auth_header_for_npm_registry; use crate::NpmCacheEnv;
use crate::util::progress_bar::ProgressBar; use crate::NpmCacheSetting;
use crate::util::sync::MultiRuntimeAsyncValueCreator;
use super::NpmCache;
// todo(dsherret): create seams and unit test this
type LoadResult = Result<FutureResult, Arc<AnyError>>; type LoadResult = Result<FutureResult, Arc<AnyError>>;
type LoadFuture = LocalBoxFuture<'static, LoadResult>; type LoadFuture = LocalBoxFuture<'static, LoadResult>;
@ -49,30 +43,31 @@ enum MemoryCacheItem {
MemoryCached(Result<Option<Arc<NpmPackageInfo>>, Arc<AnyError>>), MemoryCached(Result<Option<Arc<NpmPackageInfo>>, Arc<AnyError>>),
} }
// todo(#27198): refactor to store this only in the http cache and also
// consolidate with CliNpmRegistryApi.
/// Downloads packuments from the npm registry. /// Downloads packuments from the npm registry.
/// ///
/// This is shared amongst all the workers. /// This is shared amongst all the workers.
#[derive(Debug)] #[derive(Debug)]
pub struct RegistryInfoDownloader { pub struct RegistryInfoProvider<TEnv: NpmCacheEnv> {
cache: Arc<NpmCache>, // todo(#27198): remove this
http_client_provider: Arc<HttpClientProvider>, cache: Arc<NpmCache<TEnv>>,
env: Arc<TEnv>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
progress_bar: ProgressBar,
memory_cache: Mutex<HashMap<String, MemoryCacheItem>>, memory_cache: Mutex<HashMap<String, MemoryCacheItem>>,
} }
impl RegistryInfoDownloader { impl<TEnv: NpmCacheEnv> RegistryInfoProvider<TEnv> {
pub fn new( pub fn new(
cache: Arc<NpmCache>, cache: Arc<NpmCache<TEnv>>,
http_client_provider: Arc<HttpClientProvider>, env: Arc<TEnv>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
progress_bar: ProgressBar,
) -> Self { ) -> Self {
Self { Self {
cache, cache,
http_client_provider, env,
npmrc, npmrc,
progress_bar,
memory_cache: Default::default(), memory_cache: Default::default(),
} }
} }
@ -94,8 +89,8 @@ impl RegistryInfoDownloader {
self: &Arc<Self>, self: &Arc<Self>,
name: &str, name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> { ) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
if *self.cache.cache_setting() == CacheSetting::Only { if *self.cache.cache_setting() == NpmCacheSetting::Only {
return Err(custom_error( return Err(deno_core::error::custom_error(
"NotCached", "NotCached",
format!( format!(
"An npm specifier not found in cache: \"{name}\", --cached-only is specified." "An npm specifier not found in cache: \"{name}\", --cached-only is specified."
@ -167,7 +162,7 @@ impl RegistryInfoDownloader {
) -> Result<NpmPackageInfo, AnyError> { ) -> Result<NpmPackageInfo, AnyError> {
// this scenario failing should be exceptionally rare so let's // this scenario failing should be exceptionally rare so let's
// deal with improving it only when anyone runs into an issue // deal with improving it only when anyone runs into an issue
let maybe_package_info = deno_core::unsync::spawn_blocking({ let maybe_package_info = deno_unsync::spawn_blocking({
let cache = self.cache.clone(); let cache = self.cache.clone();
let name = name.to_string(); let name = name.to_string();
move || cache.load_package_info(&name) move || cache.load_package_info(&name)
@ -199,20 +194,18 @@ impl RegistryInfoDownloader {
return std::future::ready(Err(Arc::new(err))).boxed_local() return std::future::ready(Err(Arc::new(err))).boxed_local()
} }
}; };
let guard = self.progress_bar.update(package_url.as_str());
let name = name.to_string(); let name = name.to_string();
async move { async move {
let client = downloader.http_client_provider.get_or_create()?; let maybe_bytes = downloader
let maybe_bytes = client .env
.download_with_progress_and_retries( .download_with_retries_on_any_tokio_runtime(
package_url, package_url,
maybe_auth_header, maybe_auth_header,
&guard,
) )
.await?; .await?;
match maybe_bytes { match maybe_bytes {
Some(bytes) => { Some(bytes) => {
let future_result = deno_core::unsync::spawn_blocking( let future_result = deno_unsync::spawn_blocking(
move || -> Result<FutureResult, AnyError> { move || -> Result<FutureResult, AnyError> {
let package_info = serde_json::from_slice(&bytes)?; let package_info = serde_json::from_slice(&bytes)?;
match downloader.cache.save_package_info(&name, &package_info) { match downloader.cache.save_package_info(&name, &package_info) {
@ -241,6 +234,8 @@ impl RegistryInfoDownloader {
} }
} }
// todo(#27198): make this private and only use RegistryInfoProvider in the rest of
// the code
pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url { pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url {
let registry_url = npmrc.get_registry_url(name); let registry_url = npmrc.get_registry_url(name);
// The '/' character in scoped package names "@scope/name" must be // The '/' character in scoped package names "@scope/name" must be

View file

@ -1,10 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use anyhow::bail;
use anyhow::Context;
use anyhow::Error as AnyError;
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_npm::npm_rc::RegistryConfig; use deno_npm::npm_rc::RegistryConfig;
use http::header; use http::header;

View file

@ -3,33 +3,26 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use deno_core::anyhow::anyhow; use anyhow::anyhow;
use deno_core::anyhow::bail; use anyhow::bail;
use deno_core::anyhow::Context; use anyhow::Context;
use deno_core::error::custom_error; use anyhow::Error as AnyError;
use deno_core::error::AnyError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageVersionDistInfo; use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_runtime::deno_fs::FileSystem;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_unsync::sync::MultiRuntimeAsyncValueCreator;
use futures::future::LocalBoxFuture;
use futures::FutureExt;
use http::StatusCode; use http::StatusCode;
use parking_lot::Mutex;
use url::Url;
use crate::args::CacheSetting; use crate::remote::maybe_auth_header_for_npm_registry;
use crate::http_util::DownloadError; use crate::tarball_extract::verify_and_extract_tarball;
use crate::http_util::HttpClientProvider; use crate::tarball_extract::TarballExtractionMode;
use crate::npm::common::maybe_auth_header_for_npm_registry; use crate::NpmCache;
use crate::util::progress_bar::ProgressBar; use crate::NpmCacheEnv;
use crate::util::sync::MultiRuntimeAsyncValueCreator; use crate::NpmCacheSetting;
use super::tarball_extract::verify_and_extract_tarball;
use super::tarball_extract::TarballExtractionMode;
use super::NpmCache;
// todo(dsherret): create seams and unit test this
type LoadResult = Result<(), Arc<AnyError>>; type LoadResult = Result<(), Arc<AnyError>>;
type LoadFuture = LocalBoxFuture<'static, LoadResult>; type LoadFuture = LocalBoxFuture<'static, LoadResult>;
@ -49,29 +42,23 @@ enum MemoryCacheItem {
/// ///
/// This is shared amongst all the workers. /// This is shared amongst all the workers.
#[derive(Debug)] #[derive(Debug)]
pub struct TarballCache { pub struct TarballCache<TEnv: NpmCacheEnv> {
cache: Arc<NpmCache>, cache: Arc<NpmCache<TEnv>>,
fs: Arc<dyn FileSystem>, env: Arc<TEnv>,
http_client_provider: Arc<HttpClientProvider>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
progress_bar: ProgressBar,
memory_cache: Mutex<HashMap<PackageNv, MemoryCacheItem>>, memory_cache: Mutex<HashMap<PackageNv, MemoryCacheItem>>,
} }
impl TarballCache { impl<TEnv: NpmCacheEnv> TarballCache<TEnv> {
pub fn new( pub fn new(
cache: Arc<NpmCache>, cache: Arc<NpmCache<TEnv>>,
fs: Arc<dyn FileSystem>, env: Arc<TEnv>,
http_client_provider: Arc<HttpClientProvider>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
progress_bar: ProgressBar,
) -> Self { ) -> Self {
Self { Self {
cache, cache,
fs, env,
http_client_provider,
npmrc, npmrc,
progress_bar,
memory_cache: Default::default(), memory_cache: Default::default(),
} }
} }
@ -144,11 +131,11 @@ impl TarballCache {
let package_folder = let package_folder =
tarball_cache.cache.package_folder_for_nv_and_url(&package_nv, registry_url); tarball_cache.cache.package_folder_for_nv_and_url(&package_nv, registry_url);
let should_use_cache = tarball_cache.cache.should_use_cache_for_package(&package_nv); let should_use_cache = tarball_cache.cache.should_use_cache_for_package(&package_nv);
let package_folder_exists = tarball_cache.fs.exists_sync(&package_folder); let package_folder_exists = tarball_cache.env.exists(&package_folder);
if should_use_cache && package_folder_exists { if should_use_cache && package_folder_exists {
return Ok(()); return Ok(());
} else if tarball_cache.cache.cache_setting() == &CacheSetting::Only { } else if tarball_cache.cache.cache_setting() == &NpmCacheSetting::Only {
return Err(custom_error( return Err(deno_core::error::custom_error(
"NotCached", "NotCached",
format!( format!(
"An npm specifier not found in cache: \"{}\", --cached-only is specified.", "An npm specifier not found in cache: \"{}\", --cached-only is specified.",
@ -169,15 +156,13 @@ impl TarballCache {
tarball_cache.npmrc.tarball_config(&tarball_uri); tarball_cache.npmrc.tarball_config(&tarball_uri);
let maybe_auth_header = maybe_registry_config.and_then(|c| maybe_auth_header_for_npm_registry(c).ok()?); let maybe_auth_header = maybe_registry_config.and_then(|c| maybe_auth_header_for_npm_registry(c).ok()?);
let guard = tarball_cache.progress_bar.update(&dist.tarball); let result = tarball_cache.env
let result = tarball_cache.http_client_provider .download_with_retries_on_any_tokio_runtime(tarball_uri, maybe_auth_header)
.get_or_create()?
.download_with_progress_and_retries(tarball_uri, maybe_auth_header, &guard)
.await; .await;
let maybe_bytes = match result { let maybe_bytes = match result {
Ok(maybe_bytes) => maybe_bytes, Ok(maybe_bytes) => maybe_bytes,
Err(DownloadError::BadResponse(err)) => { Err(err) => {
if err.status_code == StatusCode::UNAUTHORIZED if err.status_code == Some(StatusCode::UNAUTHORIZED)
&& maybe_registry_config.is_none() && maybe_registry_config.is_none()
&& tarball_cache.npmrc.get_registry_config(&package_nv.name).auth_token.is_some() && tarball_cache.npmrc.get_registry_config(&package_nv.name).auth_token.is_some()
{ {
@ -194,7 +179,6 @@ impl TarballCache {
} }
return Err(err.into()) return Err(err.into())
}, },
Err(err) => return Err(err.into()),
}; };
match maybe_bytes { match maybe_bytes {
Some(bytes) => { Some(bytes) => {
@ -213,7 +197,7 @@ impl TarballCache {
}; };
let dist = dist.clone(); let dist = dist.clone();
let package_nv = package_nv.clone(); let package_nv = package_nv.clone();
deno_core::unsync::spawn_blocking(move || { deno_unsync::spawn_blocking(move || {
verify_and_extract_tarball( verify_and_extract_tarball(
&package_nv, &package_nv,
&bytes, &bytes,

View file

@ -1,16 +1,17 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::collections::HashSet; use std::collections::HashSet;
use std::fs; use std::fs;
use std::io::ErrorKind; use std::io::ErrorKind;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use anyhow::bail;
use anyhow::Context;
use anyhow::Error as AnyError;
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_npm::registry::NpmPackageVersionDistInfo; use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_npm::registry::NpmPackageVersionDistInfoIntegrity; use deno_npm::registry::NpmPackageVersionDistInfoIntegrity;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
@ -18,8 +19,6 @@ use flate2::read::GzDecoder;
use tar::Archive; use tar::Archive;
use tar::EntryType; use tar::EntryType;
use crate::util::path::get_atomic_dir_path;
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum TarballExtractionMode { pub enum TarballExtractionMode {
/// Overwrites the destination directory without deleting any files. /// Overwrites the destination directory without deleting any files.
@ -206,10 +205,30 @@ fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> {
Ok(()) Ok(())
} }
fn get_atomic_dir_path(file_path: &Path) -> PathBuf {
let rand = gen_rand_path_component();
let new_file_name = format!(
".{}_{}",
file_path
.file_name()
.map(|f| f.to_string_lossy())
.unwrap_or(Cow::Borrowed("")),
rand
);
file_path.with_file_name(new_file_name)
}
fn gen_rand_path_component() -> String {
(0..4).fold(String::new(), |mut output, _| {
output.push_str(&format!("{:02x}", rand::random::<u8>()));
output
})
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use deno_semver::Version; use deno_semver::Version;
use test_util::TempDir; use tempfile::TempDir;
use super::*; use super::*;
@ -303,21 +322,21 @@ mod test {
#[test] #[test]
fn rename_with_retries_succeeds_exists() { fn rename_with_retries_succeeds_exists() {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new().unwrap();
let folder_1 = temp_dir.path().join("folder_1"); let folder_1 = temp_dir.path().join("folder_1");
let folder_2 = temp_dir.path().join("folder_2"); let folder_2 = temp_dir.path().join("folder_2");
folder_1.create_dir_all(); std::fs::create_dir_all(&folder_1).unwrap();
folder_1.join("a.txt").write("test"); std::fs::write(folder_1.join("a.txt"), "test").unwrap();
folder_2.create_dir_all(); std::fs::create_dir_all(&folder_2).unwrap();
// this will not end up in the output as rename_with_retries assumes // this will not end up in the output as rename_with_retries assumes
// the folders ending up at the destination are the same // the folders ending up at the destination are the same
folder_2.join("b.txt").write("test2"); std::fs::write(folder_2.join("b.txt"), "test2").unwrap();
let dest_folder = temp_dir.path().join("dest_folder"); let dest_folder = temp_dir.path().join("dest_folder");
rename_with_retries(folder_1.as_path(), dest_folder.as_path()).unwrap(); rename_with_retries(folder_1.as_path(), &dest_folder).unwrap();
rename_with_retries(folder_2.as_path(), dest_folder.as_path()).unwrap(); rename_with_retries(folder_2.as_path(), &dest_folder).unwrap();
assert!(dest_folder.join("a.txt").exists()); assert!(dest_folder.join("a.txt").exists());
assert!(!dest_folder.join("b.txt").exists()); assert!(!dest_folder.join("b.txt").exists());
} }

View file

@ -0,0 +1,9 @@
This crate is a work in progress:
1. Remove `deno_core` dependency.
1. Remove `anyhow` dependency.
1. Add a clippy.toml file that bans accessing the file system directory and
instead does it through a trait.
1. Make this crate work in Wasm.
1. Refactor to store npm packument in a single place:
https://github.com/denoland/deno/issues/27198

View file

@ -19,7 +19,7 @@ const encoder = new TextEncoder();
const NODE_VERSION = version; const NODE_VERSION = version;
const NODE_IGNORED_TEST_DIRS = [ export const NODE_IGNORED_TEST_DIRS = [
"addons", "addons",
"async-hooks", "async-hooks",
"cctest", "cctest",
@ -40,13 +40,13 @@ const NODE_IGNORED_TEST_DIRS = [
"wpt", "wpt",
]; ];
const VENDORED_NODE_TEST = new URL("./suite/test/", import.meta.url); export const VENDORED_NODE_TEST = new URL("./suite/test/", import.meta.url);
const NODE_COMPAT_TEST_DEST_URL = new URL( export const NODE_COMPAT_TEST_DEST_URL = new URL(
"../test/", "../test/",
import.meta.url, import.meta.url,
); );
async function getNodeTests(): Promise<string[]> { export async function getNodeTests(): Promise<string[]> {
const paths: string[] = []; const paths: string[] = [];
const rootPath = VENDORED_NODE_TEST.href.slice(7); const rootPath = VENDORED_NODE_TEST.href.slice(7);
for await ( for await (
@ -61,7 +61,7 @@ async function getNodeTests(): Promise<string[]> {
return paths.sort(); return paths.sort();
} }
function getDenoTests() { export function getDenoTests() {
return Object.entries(config.tests) return Object.entries(config.tests)
.filter(([testDir]) => !NODE_IGNORED_TEST_DIRS.includes(testDir)) .filter(([testDir]) => !NODE_IGNORED_TEST_DIRS.includes(testDir))
.flatMap(([testDir, tests]) => tests.map((test) => testDir + "/" + test)); .flatMap(([testDir, tests]) => tests.map((test) => testDir + "/" + test));

File diff suppressed because one or more lines are too long

View file

@ -15,6 +15,10 @@
{ {
"args": "run -A main.ts uncaught.ts", "args": "run -A main.ts uncaught.ts",
"output": "uncaught.out" "output": "uncaught.out"
},
{
"args": "run -A main.ts metric.ts",
"output": "metric.out"
} }
] ]
} }

View file

@ -188,5 +188,6 @@
"traceId": "00000000000000000000000000000003", "traceId": "00000000000000000000000000000003",
"spanId": "1000000000000002" "spanId": "1000000000000002"
} }
] ],
"metrics": []
} }

View file

@ -15,5 +15,6 @@
"traceId": "", "traceId": "",
"spanId": "" "spanId": ""
} }
] ],
"metrics": []
} }

View file

@ -3,6 +3,7 @@
const data = { const data = {
spans: [], spans: [],
logs: [], logs: [],
metrics: [],
}; };
const server = Deno.serve( const server = Deno.serve(
@ -45,6 +46,11 @@ const server = Deno.serve(
data.spans.push(...sSpans.spans); data.spans.push(...sSpans.spans);
}); });
}); });
body.resourceMetrics?.forEach((rMetrics) => {
rMetrics.scopeMetrics.forEach((sMetrics) => {
data.metrics.push(...sMetrics.metrics);
});
});
return Response.json({ partialSuccess: {} }, { status: 200 }); return Response.json({ partialSuccess: {} }, { status: 200 });
}, },
}, },

View file

@ -0,0 +1,124 @@
{
"spans": [],
"logs": [],
"metrics": [
{
"name": "counter",
"description": "Example of a Counter",
"unit": "",
"metadata": [],
"sum": {
"dataPoints": [
{
"attributes": [
{
"key": "attribute",
"value": {
"doubleValue": 1
}
}
],
"startTimeUnixNano": "[WILDCARD]",
"timeUnixNano": "[WILDCARD]",
"exemplars": [],
"flags": 0,
"asDouble": 1
}
],
"aggregationTemporality": 2,
"isMonotonic": true
}
},
{
"name": "up_down_counter",
"description": "Example of a UpDownCounter",
"unit": "",
"metadata": [],
"sum": {
"dataPoints": [
{
"attributes": [
{
"key": "attribute",
"value": {
"doubleValue": 1
}
}
],
"startTimeUnixNano": "[WILDCARD]",
"timeUnixNano": "[WILDCARD]",
"exemplars": [],
"flags": 0,
"asDouble": -1
}
],
"aggregationTemporality": 2,
"isMonotonic": false
}
},
{
"name": "histogram",
"description": "Example of a Histogram",
"unit": "",
"metadata": [],
"histogram": {
"dataPoints": [
{
"attributes": [
{
"key": "attribute",
"value": {
"doubleValue": 1
}
}
],
"startTimeUnixNano": "[WILDCARD]",
"timeUnixNano": "[WILDCARD]",
"count": 1,
"sum": 1,
"bucketCounts": [
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"explicitBounds": [
0,
5,
10,
25,
50,
75,
100,
250,
500,
750,
1000,
2500,
5000,
7500,
10000
],
"exemplars": [],
"flags": 0,
"min": 1,
"max": 1
}
],
"aggregationTemporality": 2
}
}
]
}

View file

@ -0,0 +1,34 @@
import {
MeterProvider,
PeriodicExportingMetricReader,
} from "npm:@opentelemetry/sdk-metrics@1.28.0";
const meterProvider = new MeterProvider();
meterProvider.addMetricReader(
new PeriodicExportingMetricReader({
exporter: new Deno.telemetry.MetricExporter(),
exportIntervalMillis: 100,
}),
);
const meter = meterProvider.getMeter("m");
const counter = meter.createCounter("counter", {
description: "Example of a Counter",
});
const upDownCounter = meter.createUpDownCounter("up_down_counter", {
description: "Example of a UpDownCounter",
});
const histogram = meter.createHistogram("histogram", {
description: "Example of a Histogram",
});
const attributes = { attribute: 1 };
counter.add(1, attributes);
upDownCounter.add(-1, attributes);
histogram.record(1, attributes);
await meterProvider.forceFlush();

View file

@ -15,5 +15,6 @@
"traceId": "", "traceId": "",
"spanId": "" "spanId": ""
} }
] ],
"metrics": []
} }

View file

@ -33,5 +33,6 @@ throw new Error("uncaught");
"traceId": "", "traceId": "",
"spanId": "" "spanId": ""
} }
] ],
"metrics": []
} }

View file

@ -12,6 +12,10 @@
"broken": { "broken": {
"args": "fmt broken.html", "args": "fmt broken.html",
"output": "broken.out" "output": "broken.out"
},
"with_js": {
"args": "fmt --check with_js.html",
"output": "Checked 1 file\n"
} }
} }
} }

View file

@ -0,0 +1,9 @@
<html>
<body>
<script>
/* some multi-line comment
with function below it */
someFunc();
</script>
</body>
</html>

View file

@ -1,4 +1,5 @@
{ {
"tempDir": true,
"tests": { "tests": {
"cjs_with_deps": { "cjs_with_deps": {
"args": "run --allow-read --allow-env main.js", "args": "run --allow-read --allow-env main.js",

View file

@ -3,7 +3,7 @@ type: JavaScript
dependencies: 14 unique dependencies: 14 unique
size: [WILDCARD] size: [WILDCARD]
file:///[WILDCARD]/cjs_with_deps/main.js ([WILDCARD]) file:///[WILDCARD]/main.js ([WILDCARD])
├─┬ npm:/chalk@4.1.2 ([WILDCARD]) ├─┬ npm:/chalk@4.1.2 ([WILDCARD])
│ ├─┬ npm:/ansi-styles@4.3.0 ([WILDCARD]) │ ├─┬ npm:/ansi-styles@4.3.0 ([WILDCARD])
│ │ └─┬ npm:/color-convert@2.0.1 ([WILDCARD]) │ │ └─┬ npm:/color-convert@2.0.1 ([WILDCARD])

View file

@ -0,0 +1,10 @@
{
"tempDir": true,
"steps": [{
"args": "install",
"output": "[WILDCARD]"
}, {
"args": "run e/main.ts",
"output": "main.out"
}]
}

View file

@ -0,0 +1,3 @@
export function sayHello() {
console.log("Hello from a!");
}

View file

@ -0,0 +1,7 @@
{
"name": "@denotest/a",
"version": "1.0.0",
"exports": {
".": "./mod.ts"
}
}

View file

@ -0,0 +1,3 @@
export function sayHello() {
console.log("Hello from b!");
}

View file

@ -0,0 +1,7 @@
{
"name": "@denotest/b",
"version": "1.0.0",
"exports": {
".": "./mod.ts"
}
}

View file

@ -0,0 +1,3 @@
export function sayHello() {
console.log("Hello from c!");
}

View file

@ -0,0 +1,7 @@
{
"name": "@denotest/c",
"version": "1.0.0",
"exports": {
".": "./mod.ts"
}
}

View file

@ -0,0 +1,3 @@
export function sayHello() {
console.log("Hello from d!");
}

View file

@ -0,0 +1,7 @@
{
"name": "@denotest/d",
"version": "1.2.3",
"exports": {
".": "./mod.ts"
}
}

View file

@ -0,0 +1,9 @@
import * as a from "@denotest/a";
import * as b from "@denotest/b";
import * as c from "@denotest/c";
import * as d from "@denotest/d";
a.sayHello();
b.sayHello();
c.sayHello();
d.sayHello();

View file

@ -0,0 +1,10 @@
{
"name": "@denotest/e",
"version": "1.0.0",
"dependencies": {
"@denotest/a": "workspace:*",
"@denotest/b": "workspace:~",
"@denotest/c": "workspace:^",
"@denotest/d": "workspace:1.2.3"
}
}

View file

@ -0,0 +1,4 @@
Hello from a!
Hello from b!
Hello from c!
Hello from d!

View file

@ -0,0 +1,6 @@
// should resolve these as bare specifiers within the workspace
import * as a from "@denotest/a";
import * as c from "@denotest/c";
a.sayHello();
c.sayHello();

View file

@ -0,0 +1,9 @@
{
"workspaces": [
"./a",
"./b",
"./c",
"./d",
"./e"
]
}

View file

@ -11,5 +11,5 @@ Location {
protocol: "https:", protocol: "https:",
search: "?baz" search: "?baz"
} }
NotSupportedError: Cannot set "location". NotSupportedError: Cannot set "location"
NotSupportedError: Cannot set "location.hostname". NotSupportedError: Cannot set "location.hostname"

View file

@ -53,6 +53,29 @@ Deno.test({
}, },
}); });
Deno.test("ASYNC: read dirs recursively", async () => {
const dir = Deno.makeTempDirSync();
Deno.writeTextFileSync(join(dir, "file1.txt"), "hi");
Deno.mkdirSync(join(dir, "sub"));
Deno.writeTextFileSync(join(dir, "sub", "file2.txt"), "hi");
try {
const files = await new Promise<string[]>((resolve, reject) => {
readdir(dir, { recursive: true }, (err, files) => {
if (err) reject(err);
resolve(files.map((f) => f.toString()));
});
});
assertEqualsArrayAnyOrder(
files,
["file1.txt", "sub", join("sub", "file2.txt")],
);
} finally {
Deno.removeSync(dir, { recursive: true });
}
});
Deno.test({ Deno.test({
name: "SYNC: reading empty the directory", name: "SYNC: reading empty the directory",
fn() { fn() {
@ -75,6 +98,26 @@ Deno.test({
}, },
}); });
Deno.test("SYNC: read dirs recursively", () => {
const dir = Deno.makeTempDirSync();
Deno.writeTextFileSync(join(dir, "file1.txt"), "hi");
Deno.mkdirSync(join(dir, "sub"));
Deno.writeTextFileSync(join(dir, "sub", "file2.txt"), "hi");
try {
const files = readdirSync(dir, { recursive: true }).map((f) =>
f.toString()
);
assertEqualsArrayAnyOrder(
files,
["file1.txt", "sub", join("sub", "file2.txt")],
);
} finally {
Deno.removeSync(dir, { recursive: true });
}
});
Deno.test("[std/node/fs] readdir callback isn't called twice if error is thrown", async () => { Deno.test("[std/node/fs] readdir callback isn't called twice if error is thrown", async () => {
// The correct behaviour is not to catch any errors thrown, // The correct behaviour is not to catch any errors thrown,
// but that means there'll be an uncaught error and the test will fail. // but that means there'll be an uncaught error and the test will fail.