diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index c22f87a861..bc3f15380b 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 30; +const cacheVersion = 32; const ubuntuX86Runner = "ubuntu-24.04"; const ubuntuX86XlRunner = "ubuntu-24.04-xl"; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa7500f7c6..cc1aa89669 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -184,8 +184,8 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '30-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '30-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' + key: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' if: '!(matrix.skip)' - uses: dsherret/rust-toolchain-file@v1 if: '!(matrix.skip)' @@ -379,7 +379,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '30-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -689,7 +689,7 @@ jobs: !./target/*/gn_root !./target/*/*.zip !./target/*/*.tar.gz - key: '30-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-24.04 diff --git a/Cargo.lock b/Cargo.lock index 5d3ec21962..9ee40077a6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -39,9 +39,9 @@ dependencies = [ [[package]] name = "aead-gcm-stream" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4947a169074c7e038fa43051d1c4e073f4488b0e4b0a30658f1e1a1b06449ce8" +checksum = "e70c8dec860340effb00f6945c49c0daaa6dac963602750db862eabb74bf7886" dependencies = [ "aead", "aes", @@ -380,7 +380,7 @@ dependencies = [ "rustversion", "serde", "sync_wrapper", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", ] @@ -596,9 +596,9 @@ dependencies = [ [[package]] name = "boxed_error" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69aae56aaf59d1994b902ed5c0c79024012bdc2426741def75a635999a030e7e" +checksum = "17d4f95e880cfd28c4ca5a006cf7f6af52b4bcb7b5866f573b2faa126fb7affb" dependencies = [ "quote", "syn 2.0.87", @@ -670,13 +670,35 @@ checksum = "1bf2a5fb3207c12b5d208ebc145f967fea5cac41a021c37417ccc31ba40f39ee" [[package]] name = "capacity_builder" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab97838e07d98bdddf1e7e191ffe3c718cba7477c0b6607fdfb96ecd696202df" +checksum = "58ec49028cb308564429cd8fac4ef21290067a0afe8f5955330a8d487d0d790c" dependencies = [ "itoa", ] +[[package]] +name = "capacity_builder" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f2d24a6dcf0cd402a21b65d35340f3a49ff3475dc5fdac91d22d2733e6641c6" +dependencies = [ + "capacity_builder_macros", + "ecow", + "hipstr", + "itoa", +] + +[[package]] +name = "capacity_builder_macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b4a6cae9efc04cc6cbb8faf338d2c497c165c83e74509cf4dbedea948bbf6e5" +dependencies = [ + "quote", + "syn 2.0.87", +] + [[package]] name = "caseless" version = "0.2.1" @@ -728,6 +750,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.37" @@ -839,6 +867,7 @@ dependencies = [ "regex", "reqwest", "serde", + "sys_traits", "test_server", "tokio", "url", @@ -1193,9 +1222,9 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "data-url" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41b319d1b62ffbd002e057f36bebd1f42b9f97927c9577461d855f3513c4289f" +checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" [[package]] name = "debug-ignore" @@ -1221,8 +1250,10 @@ dependencies = [ "async-trait", "base64 0.21.7", "bincode", + "boxed_error", "bytes", "cache_control", + "capacity_builder 0.5.0", "chrono", "clap", "clap_complete", @@ -1237,13 +1268,14 @@ dependencies = [ "deno_config", "deno_core", "deno_doc", + "deno_error", "deno_graph", "deno_lint", "deno_lockfile", "deno_npm", "deno_npm_cache", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_resolver", "deno_runtime", "deno_semver", @@ -1309,6 +1341,7 @@ dependencies = [ "spki", "sqlformat", "strsim", + "sys_traits", "tar", "tempfile", "test_server", @@ -1388,7 +1421,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.176.0" +version = "0.178.0" dependencies = [ "bencher", "deno_core", @@ -1397,7 +1430,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.176.0" +version = "0.178.0" dependencies = [ "async-trait", "deno_core", @@ -1408,7 +1441,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.114.0" +version = "0.116.0" dependencies = [ "async-trait", "deno_core", @@ -1421,13 +1454,21 @@ dependencies = [ [[package]] name = "deno_cache_dir" -version = "0.14.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cca43605c8cbce6c6787e0daf227864487c07c2b31d438c0bf43d1b38da94b7f" +checksum = "e73ed17f285731a23df9779ca1e0e721de866db6776ed919ebd9235e0a107c4c" dependencies = [ + "async-trait", "base32", + "base64 0.21.7", + "boxed_error", + "cache_control", + "chrono", + "data-url", + "deno_error", "deno_media_type", - "deno_path_util", + "deno_path_util 0.3.0", + "http 1.1.0", "indexmap 2.3.0", "log", "once_cell", @@ -1435,13 +1476,14 @@ dependencies = [ "serde", "serde_json", "sha2", + "sys_traits", "thiserror 1.0.64", "url", ] [[package]] name = "deno_canvas" -version = "0.51.0" +version = "0.53.0" dependencies = [ "deno_core", "deno_webgpu", @@ -1452,13 +1494,13 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.39.3" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce717af3fe6788dae63965d58d5637fd62be8fe4f345f189137ffc06c51837d2" +checksum = "b45aaf31e58ca915d5c0746bf8e2d07b94635154ad9e5afe5ff265cae6187b19" dependencies = [ "anyhow", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_semver", "glob", "ignore", @@ -1470,22 +1512,23 @@ dependencies = [ "phf", "serde", "serde_json", + "sys_traits", "thiserror 1.0.64", "url", ] [[package]] name = "deno_console" -version = "0.182.0" +version = "0.184.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.326.0" +version = "0.327.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed157162dc5320a2b46ffeeaec24788339df0f2437cfaea78a8d82696715ad7f" +checksum = "eaf8dff204b9c2415deb47b9f30d4d38b0925d0d88f1f9074e8e76f59e6d7ded" dependencies = [ "anyhow", "az", @@ -1493,7 +1536,7 @@ dependencies = [ "bit-set", "bit-vec", "bytes", - "capacity_builder", + "capacity_builder 0.1.3", "cooked-waker", "deno_core_icudata", "deno_ops", @@ -1525,7 +1568,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695" [[package]] name = "deno_cron" -version = "0.62.0" +version = "0.64.0" dependencies = [ "anyhow", "async-trait", @@ -1538,7 +1581,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.196.0" +version = "0.198.0" dependencies = [ "aes", "aes-gcm", @@ -1584,7 +1627,7 @@ dependencies = [ "comrak", "deno_ast", "deno_graph", - "deno_path_util", + "deno_path_util 0.2.2", "handlebars", "html-escape", "import_map", @@ -1612,6 +1655,7 @@ dependencies = [ "libc", "serde", "serde_json", + "url", ] [[package]] @@ -1627,18 +1671,19 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.206.0" +version = "0.208.0" dependencies = [ "base64 0.21.7", "bytes", "data-url", "deno_core", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_tls", "dyn-clone", "error_reporter", "fast-socks5", + "h2 0.4.4", "hickory-resolver", "http 1.1.0", "http-body-util", @@ -1655,14 +1700,14 @@ dependencies = [ "tokio-rustls", "tokio-socks", "tokio-util", - "tower", + "tower 0.5.2", "tower-http", "tower-service", ] [[package]] name = "deno_ffi" -version = "0.169.0" +version = "0.171.0" dependencies = [ "deno_core", "deno_permissions", @@ -1682,14 +1727,14 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.92.0" +version = "0.94.0" dependencies = [ "async-trait", "base32", "boxed_error", "deno_core", "deno_io", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "filetime", "junction", @@ -1705,14 +1750,16 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.86.3" +version = "0.86.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc78ed0b4bbcb4197300f0d6e7d1edc2d2c5019cdb9dedba7ff229158441885b" +checksum = "83af194ca492ea7b624d21055f933676d3f3d27586de93be31c8f1babcc73510" dependencies = [ "anyhow", "async-trait", + "capacity_builder 0.5.0", "data-url", "deno_ast", + "deno_path_util 0.3.0", "deno_semver", "deno_unsync", "encoding_rs", @@ -1727,6 +1774,7 @@ dependencies = [ "serde", "serde_json", "sha2", + "sys_traits", "thiserror 2.0.3", "twox-hash", "url", @@ -1735,7 +1783,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.180.0" +version = "0.182.0" dependencies = [ "async-compression", "async-trait", @@ -1774,7 +1822,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.92.0" +version = "0.94.0" dependencies = [ "async-trait", "deno_core", @@ -1795,7 +1843,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.90.0" +version = "0.92.0" dependencies = [ "anyhow", "async-trait", @@ -1805,7 +1853,7 @@ dependencies = [ "chrono", "deno_core", "deno_fetch", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_tls", "denokv_proto", @@ -1845,9 +1893,9 @@ dependencies = [ [[package]] name = "deno_lockfile" -version = "0.23.2" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "559c19feb00af0c34f0bd4a20e56e12463fafd5c5069d6005f3ce33008027eea" +checksum = "632e835a53ed667d62fdd766c5780fe8361c831d3e3fbf1a760a0b7896657587" dependencies = [ "deno_semver", "serde", @@ -1857,9 +1905,9 @@ dependencies = [ [[package]] name = "deno_media_type" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcf552fbdedbe81c89705349d7d2485c7051382b000dfddbdbf7fc25931cf83" +checksum = "eaa135b8a9febc9a51c16258e294e268a1276750780d69e46edb31cced2826e4" dependencies = [ "data-url", "serde", @@ -1868,7 +1916,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.113.0" +version = "0.115.0" dependencies = [ "deno_core", "deno_permissions", @@ -1896,7 +1944,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.174.0" +version = "0.176.0" dependencies = [ "deno_core", "deno_permissions", @@ -1904,6 +1952,7 @@ dependencies = [ "hickory-proto", "hickory-resolver", "pin-project", + "quinn", "rustls-tokio-stream", "serde", "socket2", @@ -1913,7 +1962,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.119.0" +version = "0.122.0" dependencies = [ "aead-gcm-stream", "aes", @@ -1933,7 +1982,7 @@ dependencies = [ "deno_media_type", "deno_net", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_whoami", "der", @@ -1993,6 +2042,7 @@ dependencies = [ "sm3", "spki", "stable_deref_trait", + "sys_traits", "thiserror 2.0.3", "tokio", "tokio-eld", @@ -2007,12 +2057,13 @@ dependencies = [ [[package]] name = "deno_npm" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f125a5dba7839c46394a0a9c835da9fe60f5f412587ab4956a76492a1cc6a8" +checksum = "5f818ad5dc4c206b50b5cfa6f10b4b94b127e15c8342c152768eba40c225ca23" dependencies = [ - "anyhow", "async-trait", + "capacity_builder 0.5.0", + "deno_error", "deno_lockfile", "deno_semver", "futures", @@ -2026,7 +2077,7 @@ dependencies = [ [[package]] name = "deno_npm_cache" -version = "0.2.0" +version = "0.3.0" dependencies = [ "anyhow", "async-trait", @@ -2034,7 +2085,9 @@ dependencies = [ "boxed_error", "deno_cache_dir", "deno_core", + "deno_error", "deno_npm", + "deno_path_util 0.3.0", "deno_semver", "deno_unsync", "faster-hex", @@ -2047,6 +2100,7 @@ dependencies = [ "rand", "ring", "serde_json", + "sys_traits", "tar", "tempfile", "thiserror 2.0.3", @@ -2055,9 +2109,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.202.0" +version = "0.203.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd8ac1af251e292388e516dd339b9a3b982a6d1e7f8644c08e34671ca39003c" +checksum = "b146ca74cac431843486ade58e2accc16c11315fb2c6934590a52a73c56b7ec3" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -2071,38 +2125,54 @@ dependencies = [ [[package]] name = "deno_package_json" -version = "0.2.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80b0a3d81c592624a1ae15332a04b4dc2b7c163ef1dfc7c60171f736d1babdf5" +checksum = "e1d3c0f699ba2040669204ce24ab73720499fc290af843e4ce0fc8a9b3d67735" dependencies = [ + "boxed_error", "deno_error", - "deno_path_util", + "deno_path_util 0.3.0", "deno_semver", "indexmap 2.3.0", "serde", "serde_json", + "sys_traits", "thiserror 2.0.3", "url", ] [[package]] name = "deno_path_util" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff25f6e08e7a0214bbacdd6f7195c7f1ebcd850c87a624e4ff06326b68b42d99" +checksum = "b02c7d341e1b2cf089daff0f4fb2b4be8f3b5511b1d96040b3f7ed63a66c737b" dependencies = [ + "deno_error", "percent-encoding", - "thiserror 1.0.64", + "thiserror 2.0.3", + "url", +] + +[[package]] +name = "deno_path_util" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420e8211aaba7fde83ccaa9a5dad855c3b940ed988d70c95159acd600a70dc87" +dependencies = [ + "deno_error", + "percent-encoding", + "sys_traits", + "thiserror 2.0.3", "url", ] [[package]] name = "deno_permissions" -version = "0.42.0" +version = "0.43.0" dependencies = [ - "capacity_builder", + "capacity_builder 0.5.0", "deno_core", - "deno_path_util", + "deno_path_util 0.3.0", "deno_terminal 0.2.0", "fqdn", "libc", @@ -2117,7 +2187,7 @@ dependencies = [ [[package]] name = "deno_resolver" -version = "0.14.0" +version = "0.15.0" dependencies = [ "anyhow", "base32", @@ -2126,9 +2196,10 @@ dependencies = [ "deno_config", "deno_media_type", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_semver", "node_resolver", + "sys_traits", "test_server", "thiserror 2.0.3", "url", @@ -2136,7 +2207,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.191.0" +version = "0.192.0" dependencies = [ "color-print", "deno_ast", @@ -2156,7 +2227,7 @@ dependencies = [ "deno_napi", "deno_net", "deno_node", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_telemetry", "deno_terminal 0.2.0", @@ -2191,6 +2262,7 @@ dependencies = [ "serde", "signal-hook", "signal-hook-registry", + "sys_traits", "tempfile", "test_server", "thiserror 2.0.3", @@ -2205,11 +2277,14 @@ dependencies = [ [[package]] name = "deno_semver" -version = "0.6.1" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d1259270d66a5e6d29bb75c9289656541874f79ae9ff6c9f1c790846d5c07ba" +checksum = "4775271f9b5602482698f76d24ea9ed8ba27af7f587a7e9a876916300c542435" dependencies = [ + "capacity_builder 0.5.0", "deno_error", + "ecow", + "hipstr", "monch", "once_cell", "serde", @@ -2237,7 +2312,7 @@ dependencies = [ [[package]] name = "deno_telemetry" -version = "0.4.0" +version = "0.6.0" dependencies = [ "async-trait", "deno_core", @@ -2278,7 +2353,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.169.0" +version = "0.171.0" dependencies = [ "deno_core", "deno_native_certs", @@ -2311,7 +2386,7 @@ dependencies = [ "serde_json", "tokio", "tokio-util", - "tower", + "tower 0.4.13", "tracing", ] @@ -2328,7 +2403,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.182.0" +version = "0.184.0" dependencies = [ "deno_bench_util", "deno_console", @@ -2340,7 +2415,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.213.0" +version = "0.215.0" dependencies = [ "async-trait", "base64-simd 0.8.0", @@ -2362,7 +2437,7 @@ dependencies = [ [[package]] name = "deno_webgpu" -version = "0.149.0" +version = "0.151.0" dependencies = [ "deno_core", "raw-window-handle", @@ -2375,7 +2450,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.182.0" +version = "0.184.0" dependencies = [ "deno_bench_util", "deno_core", @@ -2383,7 +2458,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.187.0" +version = "0.189.0" dependencies = [ "bytes", "deno_core", @@ -2405,7 +2480,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.177.0" +version = "0.179.0" dependencies = [ "deno_core", "deno_web", @@ -2876,6 +2951,15 @@ dependencies = [ "spki", ] +[[package]] +name = "ecow" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e42fc0a93992b20c58b99e59d61eaf1635a25bfbe49e4275c34ba0aee98119ba" +dependencies = [ + "serde", +] + [[package]] name = "ed25519" version = "2.2.3" @@ -3812,6 +3896,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "hipstr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97971ffc85d4c98de12e2608e992a43f5294ebb625fdb045b27c731b64c4c6d6" +dependencies = [ + "serde", + "serde_bytes", + "sptr", +] + [[package]] name = "hkdf" version = "0.12.4" @@ -4012,9 +4107,9 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ "hyper 1.4.1", "hyper-util", @@ -4541,9 +4636,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.153" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libffi" @@ -4919,7 +5014,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.112.0" +version = "0.114.0" dependencies = [ "quote", "serde", @@ -4974,20 +5069,21 @@ dependencies = [ [[package]] name = "node_resolver" -version = "0.21.0" +version = "0.22.0" dependencies = [ "anyhow", "async-trait", "boxed_error", "deno_media_type", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "futures", "lazy-regex", "once_cell", "path-clean", "regex", "serde_json", + "sys_traits", "thiserror 2.0.3", "tokio", "url", @@ -5895,49 +5991,54 @@ dependencies = [ [[package]] name = "quinn" -version = "0.11.2" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" dependencies = [ "bytes", "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 1.1.0", + "rustc-hash 2.0.0", "rustls", - "thiserror 1.0.64", + "socket2", + "thiserror 2.0.3", "tokio", "tracing", ] [[package]] name = "quinn-proto" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" dependencies = [ "bytes", + "getrandom", "rand", "ring", "rustc-hash 2.0.0", "rustls", + "rustls-pki-types", "slab", - "thiserror 1.0.64", + "thiserror 2.0.3", "tinyvec", "tracing", + "web-time", ] [[package]] name = "quinn-udp" -version = "0.5.2" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" dependencies = [ + "cfg_aliases 0.2.1", "libc", "once_cell", "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -6414,6 +6515,9 @@ name = "rustls-pki-types" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +dependencies = [ + "web-time", +] [[package]] name = "rustls-tokio-stream" @@ -6697,9 +6801,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.235.0" +version = "0.236.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07afd8b67b4a442ecc2823038473ac0e9e5682de93c213323b60661afdd7eb4" +checksum = "e23b3abce64010612f88f4ff689a959736f99eb3dc0dbf1c7903434b8bd8cda5" dependencies = [ "num-bigint", "serde", @@ -6975,6 +7079,12 @@ dependencies = [ "der", ] +[[package]] +name = "sptr" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" + [[package]] name = "sqlformat" version = "0.3.2" @@ -7570,6 +7680,19 @@ dependencies = [ "syn 2.0.87", ] +[[package]] +name = "sys_traits" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6683465f4e1d8fd75069cbc36c646258c05b7d8d6676bcb5d71968b99b7d5ae2" +dependencies = [ + "filetime", + "getrandom", + "libc", + "parking_lot", + "windows-sys 0.59.0", +] + [[package]] name = "tagptr" version = "0.2.0" @@ -7965,7 +8088,7 @@ dependencies = [ "socket2", "tokio", "tokio-stream", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -7991,6 +8114,21 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-http" version = "0.6.1" @@ -8019,9 +8157,9 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -8495,6 +8633,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-root-certs" version = "0.26.6" @@ -8522,7 +8670,7 @@ dependencies = [ "arrayvec", "bit-vec", "bitflags 2.6.0", - "cfg_aliases", + "cfg_aliases 0.1.1", "codespan-reporting", "document-features", "indexmap 2.3.0", @@ -8554,7 +8702,7 @@ dependencies = [ "bit-set", "bitflags 2.6.0", "block", - "cfg_aliases", + "cfg_aliases 0.1.1", "core-graphics-types", "d3d12", "glow", diff --git a/Cargo.toml b/Cargo.toml index 984cb187ef..bfd7437441 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,19 +48,19 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.44.0", features = ["transpiling"] } -deno_core = { version = "0.326.0" } +deno_core = { version = "0.327.0" } -deno_bench_util = { version = "0.176.0", path = "./bench_util" } -deno_config = { version = "=0.39.3", features = ["workspace", "sync"] } -deno_lockfile = "=0.23.2" +deno_bench_util = { version = "0.178.0", path = "./bench_util" } +deno_config = { version = "=0.42.0", features = ["workspace", "sync"] } +deno_lockfile = "=0.24.0" deno_media_type = { version = "0.2.0", features = ["module_specifier"] } -deno_npm = "=0.26.0" -deno_path_util = "=0.2.1" -deno_permissions = { version = "0.42.0", path = "./runtime/permissions" } -deno_runtime = { version = "0.191.0", path = "./runtime" } -deno_semver = "=0.6.1" +deno_npm = "=0.27.0" +deno_path_util = "=0.3.0" +deno_permissions = { version = "0.43.0", path = "./runtime/permissions" } +deno_runtime = { version = "0.192.0", path = "./runtime" } +deno_semver = "=0.7.1" deno_terminal = "0.2.0" -napi_sym = { version = "0.112.0", path = "./ext/napi/sym" } +napi_sym = { version = "0.114.0", path = "./ext/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } denokv_proto = "0.8.4" @@ -69,34 +69,34 @@ denokv_remote = "0.8.4" denokv_sqlite = { default-features = false, version = "0.8.4" } # exts -deno_broadcast_channel = { version = "0.176.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.114.0", path = "./ext/cache" } -deno_canvas = { version = "0.51.0", path = "./ext/canvas" } -deno_console = { version = "0.182.0", path = "./ext/console" } -deno_cron = { version = "0.62.0", path = "./ext/cron" } -deno_crypto = { version = "0.196.0", path = "./ext/crypto" } -deno_fetch = { version = "0.206.0", path = "./ext/fetch" } -deno_ffi = { version = "0.169.0", path = "./ext/ffi" } -deno_fs = { version = "0.92.0", path = "./ext/fs" } -deno_http = { version = "0.180.0", path = "./ext/http" } -deno_io = { version = "0.92.0", path = "./ext/io" } -deno_kv = { version = "0.90.0", path = "./ext/kv" } -deno_napi = { version = "0.113.0", path = "./ext/napi" } -deno_net = { version = "0.174.0", path = "./ext/net" } -deno_node = { version = "0.119.0", path = "./ext/node" } -deno_telemetry = { version = "0.4.0", path = "./ext/telemetry" } -deno_tls = { version = "0.169.0", path = "./ext/tls" } -deno_url = { version = "0.182.0", path = "./ext/url" } -deno_web = { version = "0.213.0", path = "./ext/web" } -deno_webgpu = { version = "0.149.0", path = "./ext/webgpu" } -deno_webidl = { version = "0.182.0", path = "./ext/webidl" } -deno_websocket = { version = "0.187.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.177.0", path = "./ext/webstorage" } +deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.116.0", path = "./ext/cache" } +deno_canvas = { version = "0.53.0", path = "./ext/canvas" } +deno_console = { version = "0.184.0", path = "./ext/console" } +deno_cron = { version = "0.64.0", path = "./ext/cron" } +deno_crypto = { version = "0.198.0", path = "./ext/crypto" } +deno_fetch = { version = "0.208.0", path = "./ext/fetch" } +deno_ffi = { version = "0.171.0", path = "./ext/ffi" } +deno_fs = { version = "0.94.0", path = "./ext/fs" } +deno_http = { version = "0.182.0", path = "./ext/http" } +deno_io = { version = "0.94.0", path = "./ext/io" } +deno_kv = { version = "0.92.0", path = "./ext/kv" } +deno_napi = { version = "0.115.0", path = "./ext/napi" } +deno_net = { version = "0.176.0", path = "./ext/net" } +deno_node = { version = "0.122.0", path = "./ext/node" } +deno_telemetry = { version = "0.6.0", path = "./ext/telemetry" } +deno_tls = { version = "0.171.0", path = "./ext/tls" } +deno_url = { version = "0.184.0", path = "./ext/url" } +deno_web = { version = "0.215.0", path = "./ext/web" } +deno_webgpu = { version = "0.151.0", path = "./ext/webgpu" } +deno_webidl = { version = "0.184.0", path = "./ext/webidl" } +deno_websocket = { version = "0.189.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.179.0", path = "./ext/webstorage" } # resolvers -deno_npm_cache = { version = "0.2.0", path = "./resolvers/npm_cache" } -deno_resolver = { version = "0.14.0", path = "./resolvers/deno" } -node_resolver = { version = "0.21.0", path = "./resolvers/node" } +deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" } +deno_resolver = { version = "0.15.0", path = "./resolvers/deno" } +node_resolver = { version = "0.22.0", path = "./resolvers/node" } aes = "=0.8.3" anyhow = "1.0.57" @@ -104,11 +104,11 @@ async-trait = "0.1.73" base32 = "=0.5.1" base64 = "0.21.7" bencher = "0.1" -boxed_error = "0.2.2" +boxed_error = "0.2.3" brotli = "6.0.0" bytes = "1.4.0" cache_control = "=0.2.0" -capacity_builder = "0.1.0" +capacity_builder = "0.5.0" cbc = { version = "=0.1.2", features = ["alloc"] } # Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS. # Instead use util::time::utc_now() @@ -117,9 +117,10 @@ color-print = "0.3.5" console_static_text = "=0.8.1" dashmap = "5.5.3" data-encoding = "2.3.3" -data-url = "=0.3.0" -deno_cache_dir = "=0.14.0" -deno_package_json = { version = "0.2.1", default-features = false } +data-url = "=0.3.1" +deno_cache_dir = "=0.16.0" +deno_error = "=0.5.2" +deno_package_json = { version = "0.4.0", default-features = false } deno_unsync = "0.4.2" dlopen2 = "0.6.1" ecb = "=0.1.2" @@ -148,7 +149,7 @@ indexmap = { version = "2", features = ["serde"] } ipnet = "2.3" jsonc-parser = { version = "=0.26.2", features = ["serde"] } lazy-regex = "3" -libc = "0.2.126" +libc = "0.2.168" libz-sys = { version = "1.1.20", default-features = false } log = { version = "0.4.20", features = ["kv"] } lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases @@ -192,6 +193,7 @@ slab = "0.4" smallvec = "1.8" socket2 = { version = "0.5.3", features = ["all"] } spki = "0.7.2" +sys_traits = "=0.1.4" tar = "=0.4.40" tempfile = "3.4.0" termcolor = "1.1.3" @@ -201,7 +203,7 @@ tokio-metrics = { version = "0.3.0", features = ["rt"] } tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] } tokio-socks = "0.5.1" tokio-util = "0.7.4" -tower = { version = "0.4.13", default-features = false, features = ["util"] } +tower = { version = "0.5.2", default-features = false, features = ["retry", "util"] } tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] } tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] } tower-service = "0.3.2" diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index 9833996fd3..014b74f264 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.176.0" +version = "0.178.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 253ea80f19..d05c3fb3e3 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -73,7 +73,8 @@ deno_cache_dir.workspace = true deno_config.workspace = true deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] } -deno_graph = { version = "=0.86.3" } +deno_error.workspace = true +deno_graph = { version = "=0.86.6" } deno_lint = { version = "=0.68.2", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true @@ -93,8 +94,10 @@ anstream = "0.6.14" async-trait.workspace = true base64.workspace = true bincode = "=1.3.3" +boxed_error.workspace = true bytes.workspace = true cache_control.workspace = true +capacity_builder.workspace = true chrono = { workspace = true, features = ["now"] } clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] } clap_complete = "=4.5.24" @@ -155,6 +158,7 @@ shell-escape = "=0.1.5" spki = { version = "0.7", features = ["pem"] } sqlformat = "=0.3.2" strsim = "0.11.1" +sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] } tar.workspace = true tempfile.workspace = true text-size = "=1.1.0" @@ -183,6 +187,7 @@ nix.workspace = true [dev-dependencies] deno_bench_util.workspace = true pretty_assertions.workspace = true +sys_traits = { workspace = true, features = ["memory"] } test_util.workspace = true [package.metadata.winres] diff --git a/cli/args/deno_json.rs b/cli/args/deno_json.rs index 8853107eef..47dcbb91ea 100644 --- a/cli/args/deno_json.rs +++ b/cli/args/deno_json.rs @@ -8,62 +8,6 @@ use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; -#[cfg(test)] // happens to only be used by the tests at the moment -pub struct DenoConfigFsAdapter<'a>( - pub &'a dyn deno_runtime::deno_fs::FileSystem, -); - -#[cfg(test)] -impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> { - fn read_to_string_lossy( - &self, - path: &std::path::Path, - ) -> Result, std::io::Error> { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } - - fn stat_sync( - &self, - path: &std::path::Path, - ) -> Result { - self - .0 - .stat_sync(path) - .map(|stat| deno_config::fs::FsMetadata { - is_file: stat.is_file, - is_directory: stat.is_directory, - is_symlink: stat.is_symlink, - }) - .map_err(|err| err.into_io_error()) - } - - fn read_dir( - &self, - path: &std::path::Path, - ) -> Result, std::io::Error> { - self - .0 - .read_dir_sync(path) - .map_err(|err| err.into_io_error()) - .map(|entries| { - entries - .into_iter() - .map(|e| deno_config::fs::FsDirEntry { - path: path.join(e.name), - metadata: deno_config::fs::FsMetadata { - is_file: e.is_file, - is_directory: e.is_directory, - is_symlink: e.is_symlink, - }, - }) - .collect() - }) - } -} - pub fn import_map_deps( import_map: &serde_json::Value, ) -> HashSet { diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 418edcf34b..2b0b9a2908 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1006,6 +1006,8 @@ impl Flags { OtelConfig { tracing_enabled: !disabled && otel_var("OTEL_DENO_TRACING").unwrap_or(default), + metrics_enabled: !disabled + && otel_var("OTEL_DENO_METRICS").unwrap_or(default), console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() { Ok(_) if disabled => OtelConsoleConfig::Ignore, Ok("ignore") => OtelConsoleConfig::Ignore, diff --git a/cli/args/import_map.rs b/cli/args/import_map.rs index ff2f158715..d6434ed46a 100644 --- a/cli/args/import_map.rs +++ b/cli/args/import_map.rs @@ -4,21 +4,21 @@ use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url::Url; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; +use crate::file_fetcher::TextDecodedFile; pub async fn resolve_import_map_value_from_specifier( specifier: &Url, - file_fetcher: &FileFetcher, + file_fetcher: &CliFileFetcher, ) -> Result { if specifier.scheme() == "data" { let data_url_text = deno_graph::source::RawDataUrl::parse(specifier)?.decode()?; Ok(serde_json::from_str(&data_url_text)?) } else { - let file = file_fetcher - .fetch_bypass_permissions(specifier) - .await? - .into_text_decoded()?; + let file = TextDecodedFile::decode( + file_fetcher.fetch_bypass_permissions(specifier).await?, + )?; Ok(serde_json::from_str(&file.source)?) } } diff --git a/cli/args/lockfile.rs b/cli/args/lockfile.rs index 1075f93a6f..7d5fe57bc3 100644 --- a/cli/args/lockfile.rs +++ b/cli/args/lockfile.rs @@ -12,12 +12,13 @@ use deno_core::parking_lot::MutexGuard; use deno_core::serde_json; use deno_lockfile::WorkspaceMemberConfig; use deno_package_json::PackageJsonDepValue; +use deno_path_util::fs::atomic_write_file_with_retries; use deno_runtime::deno_node::PackageJson; use deno_semver::jsr::JsrDepPackageReq; use crate::args::deno_json::import_map_deps; use crate::cache; -use crate::util::fs::atomic_write_file_with_retries; +use crate::sys::CliSys; use crate::Flags; use crate::args::DenoSubcommand; @@ -35,6 +36,7 @@ pub struct CliLockfileReadFromPathOptions { #[derive(Debug)] pub struct CliLockfile { + sys: CliSys, lockfile: Mutex, pub filename: PathBuf, frozen: bool, @@ -91,8 +93,9 @@ impl CliLockfile { // do an atomic write to reduce the chance of multiple deno // processes corrupting the file atomic_write_file_with_retries( + &self.sys, &lockfile.filename, - bytes, + &bytes, cache::CACHE_PERM, ) .context("Failed writing lockfile.")?; @@ -101,6 +104,7 @@ impl CliLockfile { } pub fn discover( + sys: &CliSys, flags: &Flags, workspace: &Workspace, maybe_external_import_map: Option<&serde_json::Value>, @@ -163,11 +167,14 @@ impl CliLockfile { .unwrap_or(false) }); - let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions { - file_path, - frozen, - skip_write: flags.internal.lockfile_skip_write, - })?; + let lockfile = Self::read_from_path( + sys, + CliLockfileReadFromPathOptions { + file_path, + frozen, + skip_write: flags.internal.lockfile_skip_write, + }, + )?; // initialize the lockfile with the workspace's configuration let root_url = workspace.root_dir(); @@ -223,6 +230,7 @@ impl CliLockfile { } pub fn read_from_path( + sys: &CliSys, opts: CliLockfileReadFromPathOptions, ) -> Result { let lockfile = match std::fs::read_to_string(&opts.file_path) { @@ -241,6 +249,7 @@ impl CliLockfile { } }; Ok(CliLockfile { + sys: sys.clone(), filename: lockfile.filename.clone(), lockfile: Mutex::new(lockfile), frozen: opts.frozen, diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 450aa11652..a059b07757 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -9,6 +9,7 @@ mod package_json; use deno_ast::MediaType; use deno_ast::SourceMapOption; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_config::deno_json::NodeModulesDirMode; use deno_config::workspace::CreateResolverOptions; use deno_config::workspace::FolderConfigs; @@ -23,13 +24,14 @@ use deno_config::workspace::WorkspaceLintConfig; use deno_config::workspace::WorkspaceResolver; use deno_core::resolve_url_or_path; use deno_graph::GraphKind; +use deno_lint::linter::LintConfig as DenoLintConfig; use deno_npm::npm_rc::NpmRc; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; -use deno_npm_cache::NpmCacheSetting; use deno_path_util::normalize_path; use deno_semver::npm::NpmPackageReqReference; +use deno_semver::StackString; use deno_telemetry::OtelConfig; use deno_telemetry::OtelRuntimeConfig; use import_map::resolve_import_map_value_from_specifier; @@ -81,11 +83,12 @@ use std::num::NonZeroUsize; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use sys_traits::EnvHomeDir; use thiserror::Error; -use crate::cache; use crate::cache::DenoDirProvider; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; +use crate::sys::CliSys; use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::version; @@ -217,52 +220,6 @@ pub fn ts_config_to_transpile_and_emit_options( )) } -/// Indicates how cached source files should be handled. -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum CacheSetting { - /// Only the cached files should be used. Any files not in the cache will - /// error. This is the equivalent of `--cached-only` in the CLI. - Only, - /// No cached source files should be used, and all files should be reloaded. - /// This is the equivalent of `--reload` in the CLI. - ReloadAll, - /// Only some cached resources should be used. This is the equivalent of - /// `--reload=jsr:@std/http/file-server` or - /// `--reload=jsr:@std/http/file-server,jsr:@std/assert/assert-equals`. - ReloadSome(Vec), - /// The usability of a cached value is determined by analyzing the cached - /// headers and other metadata associated with a cached response, reloading - /// any cached "non-fresh" cached responses. - RespectHeaders, - /// The cached source files should be used for local modules. This is the - /// default behavior of the CLI. - Use, -} - -impl CacheSetting { - pub fn as_npm_cache_setting(&self) -> NpmCacheSetting { - match self { - CacheSetting::Only => NpmCacheSetting::Only, - CacheSetting::ReloadAll => NpmCacheSetting::ReloadAll, - CacheSetting::ReloadSome(values) => { - if values.iter().any(|v| v == "npm:") { - NpmCacheSetting::ReloadAll - } else { - NpmCacheSetting::ReloadSome { - npm_package_names: values - .iter() - .filter_map(|v| v.strip_prefix("npm:")) - .map(|n| n.to_string()) - .collect(), - } - } - } - CacheSetting::RespectHeaders => unreachable!(), // not supported - CacheSetting::Use => NpmCacheSetting::Use, - } - } -} - pub struct WorkspaceBenchOptions { pub filter: Option, pub json: bool, @@ -616,7 +573,7 @@ fn discover_npmrc( // TODO(bartlomieju): update to read both files - one in the project root and one and // home dir and then merge them. // 3. Try `.npmrc` in the user's home directory - if let Some(home_dir) = cache::home_dir() { + if let Some(home_dir) = crate::sys::CliSys::default().env_home_dir() { match try_to_read_npmrc(&home_dir) { Ok(Some((source, path))) => { return try_to_parse_npmrc(source, &path).map(|r| (r, Some(path))); @@ -815,7 +772,9 @@ pub struct CliOptions { } impl CliOptions { + #[allow(clippy::too_many_arguments)] pub fn new( + sys: &CliSys, flags: Arc, initial_cwd: PathBuf, maybe_lockfile: Option>, @@ -840,8 +799,10 @@ impl CliOptions { } let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache); - let deno_dir_provider = - Arc::new(DenoDirProvider::new(flags.internal.cache_path.clone())); + let deno_dir_provider = Arc::new(DenoDirProvider::new( + sys.clone(), + flags.internal.cache_path.clone(), + )); let maybe_node_modules_folder = resolve_node_modules_folder( &initial_cwd, &flags, @@ -866,7 +827,7 @@ impl CliOptions { }) } - pub fn from_flags(flags: Arc) -> Result { + pub fn from_flags(sys: &CliSys, flags: Arc) -> Result { let initial_cwd = std::env::current_dir().with_context(|| "Failed getting cwd.")?; let maybe_vendor_override = flags.vendor.map(|v| match v { @@ -889,7 +850,6 @@ impl CliOptions { log::debug!("package.json auto-discovery is disabled"); } WorkspaceDiscoverOptions { - fs: Default::default(), // use real fs deno_json_cache: None, pkg_json_cache: Some(&node_resolver::PackageJsonThreadLocalCache), workspace_cache: None, @@ -911,6 +871,7 @@ impl CliOptions { ConfigFlag::Discover => { if let Some(start_paths) = flags.config_path_args(&initial_cwd) { WorkspaceDirectory::discover( + sys, WorkspaceDiscoverStart::Paths(&start_paths), &resolve_workspace_discover_options(), )? @@ -921,6 +882,7 @@ impl CliOptions { ConfigFlag::Path(path) => { let config_path = normalize_path(initial_cwd.join(path)); WorkspaceDirectory::discover( + sys, WorkspaceDiscoverStart::ConfigFile(&config_path), &resolve_workspace_discover_options(), )? @@ -959,6 +921,7 @@ impl CliOptions { }; let maybe_lock_file = CliLockfile::discover( + sys, &flags, &start_dir.workspace, external_import_map.as_ref().map(|(_, v)| v), @@ -967,6 +930,7 @@ impl CliOptions { log::debug!("Finished config loading."); Self::new( + sys, flags, initial_cwd, maybe_lock_file.map(Arc::new), @@ -1037,24 +1001,24 @@ impl CliOptions { // https://nodejs.org/api/process.html match target.as_str() { "aarch64-apple-darwin" => NpmSystemInfo { - os: "darwin".to_string(), - cpu: "arm64".to_string(), + os: "darwin".into(), + cpu: "arm64".into(), }, "aarch64-unknown-linux-gnu" => NpmSystemInfo { - os: "linux".to_string(), - cpu: "arm64".to_string(), + os: "linux".into(), + cpu: "arm64".into(), }, "x86_64-apple-darwin" => NpmSystemInfo { - os: "darwin".to_string(), - cpu: "x64".to_string(), + os: "darwin".into(), + cpu: "x64".into(), }, "x86_64-unknown-linux-gnu" => NpmSystemInfo { - os: "linux".to_string(), - cpu: "x64".to_string(), + os: "linux".into(), + cpu: "x64".into(), }, "x86_64-pc-windows-msvc" => NpmSystemInfo { - os: "win32".to_string(), - cpu: "x64".to_string(), + os: "win32".into(), + cpu: "x64".into(), }, value => { log::warn!( @@ -1091,7 +1055,7 @@ impl CliOptions { pub async fn create_workspace_resolver( &self, - file_fetcher: &FileFetcher, + file_fetcher: &CliFileFetcher, pkg_json_dep_resolution: PackageJsonDepResolution, ) -> Result { let overrode_no_import_map: bool = self @@ -1397,9 +1361,7 @@ impl CliOptions { Ok(result) } - pub fn resolve_deno_lint_config( - &self, - ) -> Result { + pub fn resolve_deno_lint_config(&self) -> Result { let ts_config_result = self.resolve_ts_config_for_emit(TsConfigType::Emit)?; @@ -1408,11 +1370,11 @@ impl CliOptions { ts_config_result.ts_config, )?; - Ok(deno_lint::linter::LintConfig { + Ok(DenoLintConfig { default_jsx_factory: (!transpile_options.jsx_automatic) - .then(|| transpile_options.jsx_factory.clone()), + .then_some(transpile_options.jsx_factory), default_jsx_fragment_factory: (!transpile_options.jsx_automatic) - .then(|| transpile_options.jsx_fragment_factory.clone()), + .then_some(transpile_options.jsx_fragment_factory), }) } @@ -1993,15 +1955,17 @@ pub fn has_flag_env_var(name: &str) -> bool { pub fn npm_pkg_req_ref_to_binary_command( req_ref: &NpmPackageReqReference, ) -> String { - let binary_name = req_ref.sub_path().unwrap_or(req_ref.req().name.as_str()); - binary_name.to_string() + req_ref + .sub_path() + .map(|s| s.to_string()) + .unwrap_or_else(|| req_ref.req().name.to_string()) } pub fn config_to_deno_graph_workspace_member( config: &ConfigFile, ) -> Result { - let name = match &config.json.name { - Some(name) => name.clone(), + let name: StackString = match &config.json.name { + Some(name) => name.as_str().into(), None => bail!("Missing 'name' field in config file."), }; let version = match &config.json.version { diff --git a/cli/args/package_json.rs b/cli/args/package_json.rs index b0f0a2f9ba..50d1c04799 100644 --- a/cli/args/package_json.rs +++ b/cli/args/package_json.rs @@ -11,19 +11,20 @@ use deno_package_json::PackageJsonDepValueParseError; use deno_package_json::PackageJsonDepWorkspaceReq; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::VersionReq; use thiserror::Error; #[derive(Debug)] pub struct InstallNpmRemotePkg { - pub alias: Option, + pub alias: Option, pub base_dir: PathBuf, pub req: PackageReq, } #[derive(Debug)] pub struct InstallNpmWorkspacePkg { - pub alias: Option, + pub alias: Option, pub target_dir: PathBuf, } @@ -31,7 +32,7 @@ pub struct InstallNpmWorkspacePkg { #[error("Failed to install '{}'\n at {}", alias, location)] pub struct PackageJsonDepValueParseWithLocationError { pub location: Url, - pub alias: String, + pub alias: StackString, #[source] pub source: PackageJsonDepValueParseError, } @@ -100,10 +101,8 @@ impl NpmInstallDepsProvider { let mut pkg_pkgs = Vec::with_capacity( deps.dependencies.len() + deps.dev_dependencies.len(), ); - for (alias, dep) in deps - .dependencies - .into_iter() - .chain(deps.dev_dependencies.into_iter()) + for (alias, dep) in + deps.dependencies.iter().chain(deps.dev_dependencies.iter()) { let dep = match dep { Ok(dep) => dep, @@ -111,8 +110,8 @@ impl NpmInstallDepsProvider { pkg_json_dep_errors.push( PackageJsonDepValueParseWithLocationError { location: pkg_json.specifier(), - alias, - source: err, + alias: alias.clone(), + source: err.clone(), }, ); continue; @@ -121,28 +120,28 @@ impl NpmInstallDepsProvider { match dep { PackageJsonDepValue::Req(pkg_req) => { let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| { - pkg.matches_req(&pkg_req) + pkg.matches_req(pkg_req) // do not resolve to the current package && pkg.pkg_json.path != pkg_json.path }); if let Some(pkg) = workspace_pkg { workspace_pkgs.push(InstallNpmWorkspacePkg { - alias: Some(alias), + alias: Some(alias.clone()), target_dir: pkg.pkg_json.dir_path().to_path_buf(), }); } else { pkg_pkgs.push(InstallNpmRemotePkg { - alias: Some(alias), + alias: Some(alias.clone()), base_dir: pkg_json.dir_path().to_path_buf(), - req: pkg_req, + req: pkg_req.clone(), }); } } PackageJsonDepValue::Workspace(workspace_version_req) => { let version_req = match workspace_version_req { PackageJsonDepWorkspaceReq::VersionReq(version_req) => { - version_req + version_req.clone() } PackageJsonDepWorkspaceReq::Tilde | PackageJsonDepWorkspaceReq::Caret => { @@ -150,10 +149,10 @@ impl NpmInstallDepsProvider { } }; if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| { - pkg.matches_name_and_version_req(&alias, &version_req) + pkg.matches_name_and_version_req(alias, &version_req) }) { workspace_pkgs.push(InstallNpmWorkspacePkg { - alias: Some(alias), + alias: Some(alias.clone()), target_dir: pkg.pkg_json.dir_path().to_path_buf(), }); } diff --git a/cli/auth_tokens.rs b/cli/auth_tokens.rs deleted file mode 100644 index ef9f9d0746..0000000000 --- a/cli/auth_tokens.rs +++ /dev/null @@ -1,369 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use base64::prelude::BASE64_STANDARD; -use base64::Engine; -use deno_core::ModuleSpecifier; -use log::debug; -use log::error; -use std::borrow::Cow; -use std::fmt; -use std::net::IpAddr; -use std::net::Ipv4Addr; -use std::net::Ipv6Addr; -use std::net::SocketAddr; -use std::str::FromStr; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum AuthTokenData { - Bearer(String), - Basic { username: String, password: String }, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AuthToken { - host: AuthDomain, - token: AuthTokenData, -} - -impl fmt::Display for AuthToken { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match &self.token { - AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"), - AuthTokenData::Basic { username, password } => { - let credentials = format!("{username}:{password}"); - write!(f, "Basic {}", BASE64_STANDARD.encode(credentials)) - } - } - } -} - -/// A structure which contains bearer tokens that can be used when sending -/// requests to websites, intended to authorize access to private resources -/// such as remote modules. -#[derive(Debug, Clone)] -pub struct AuthTokens(Vec); - -/// An authorization domain, either an exact or suffix match. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum AuthDomain { - Ip(IpAddr), - IpPort(SocketAddr), - /// Suffix match, no dot. May include a port. - Suffix(Cow<'static, str>), -} - -impl From for AuthDomain { - fn from(value: T) -> Self { - let s = value.to_string().to_lowercase(); - if let Ok(ip) = SocketAddr::from_str(&s) { - return AuthDomain::IpPort(ip); - }; - if s.starts_with('[') && s.ends_with(']') { - if let Ok(ip) = Ipv6Addr::from_str(&s[1..s.len() - 1]) { - return AuthDomain::Ip(ip.into()); - } - } else if let Ok(ip) = Ipv4Addr::from_str(&s) { - return AuthDomain::Ip(ip.into()); - } - if let Some(s) = s.strip_prefix('.') { - AuthDomain::Suffix(Cow::Owned(s.to_owned())) - } else { - AuthDomain::Suffix(Cow::Owned(s)) - } - } -} - -impl AuthDomain { - pub fn matches(&self, specifier: &ModuleSpecifier) -> bool { - let Some(host) = specifier.host_str() else { - return false; - }; - match *self { - Self::Ip(ip) => { - let AuthDomain::Ip(parsed) = AuthDomain::from(host) else { - return false; - }; - ip == parsed && specifier.port().is_none() - } - Self::IpPort(ip) => { - let AuthDomain::Ip(parsed) = AuthDomain::from(host) else { - return false; - }; - ip.ip() == parsed && specifier.port() == Some(ip.port()) - } - Self::Suffix(ref suffix) => { - let hostname = if let Some(port) = specifier.port() { - Cow::Owned(format!("{}:{}", host, port)) - } else { - Cow::Borrowed(host) - }; - - if suffix.len() == hostname.len() { - return suffix == &hostname; - } - - // If it's a suffix match, ensure a dot - if hostname.ends_with(suffix.as_ref()) - && hostname.ends_with(&format!(".{suffix}")) - { - return true; - } - - false - } - } - } -} - -impl AuthTokens { - /// Create a new set of tokens based on the provided string. It is intended - /// that the string be the value of an environment variable and the string is - /// parsed for token values. The string is expected to be a semi-colon - /// separated string, where each value is `{token}@{hostname}`. - pub fn new(maybe_tokens_str: Option) -> Self { - let mut tokens = Vec::new(); - if let Some(tokens_str) = maybe_tokens_str { - for token_str in tokens_str.trim().split(';') { - if token_str.contains('@') { - let mut iter = token_str.rsplitn(2, '@'); - let host = AuthDomain::from(iter.next().unwrap()); - let token = iter.next().unwrap(); - if token.contains(':') { - let mut iter = token.rsplitn(2, ':'); - let password = iter.next().unwrap().to_owned(); - let username = iter.next().unwrap().to_owned(); - tokens.push(AuthToken { - host, - token: AuthTokenData::Basic { username, password }, - }); - } else { - tokens.push(AuthToken { - host, - token: AuthTokenData::Bearer(token.to_string()), - }); - } - } else { - error!("Badly formed auth token discarded."); - } - } - debug!("Parsed {} auth token(s).", tokens.len()); - } - - Self(tokens) - } - - /// Attempt to match the provided specifier to the tokens in the set. The - /// matching occurs from the right of the hostname plus port, irrespective of - /// scheme. For example `https://www.deno.land:8080/` would match a token - /// with a host value of `deno.land:8080` but not match `www.deno.land`. The - /// matching is case insensitive. - pub fn get(&self, specifier: &ModuleSpecifier) -> Option { - self.0.iter().find_map(|t| { - if t.host.matches(specifier) { - Some(t.clone()) - } else { - None - } - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use deno_core::resolve_url; - - #[test] - fn test_auth_token() { - let auth_tokens = AuthTokens::new(Some("abc123@deno.land".to_string())); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc123" - ); - let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc123".to_string() - ); - let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - let fixture = - resolve_url("https://deno.land.example.com/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - } - - #[test] - fn test_auth_tokens_multiple() { - let auth_tokens = - AuthTokens::new(Some("abc123@deno.land;def456@example.com".to_string())); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc123".to_string() - ); - let fixture = resolve_url("http://example.com/a/file.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer def456".to_string() - ); - } - - #[test] - fn test_auth_tokens_space() { - let auth_tokens = AuthTokens::new(Some( - " abc123@deno.land;def456@example.com\t".to_string(), - )); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc123".to_string() - ); - let fixture = resolve_url("http://example.com/a/file.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer def456".to_string() - ); - } - - #[test] - fn test_auth_tokens_newline() { - let auth_tokens = AuthTokens::new(Some( - "\nabc123@deno.land;def456@example.com\n".to_string(), - )); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc123".to_string() - ); - let fixture = resolve_url("http://example.com/a/file.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer def456".to_string() - ); - } - - #[test] - fn test_auth_tokens_port() { - let auth_tokens = - AuthTokens::new(Some("abc123@deno.land:8080".to_string())); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - let fixture = resolve_url("http://deno.land:8080/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc123".to_string() - ); - } - - #[test] - fn test_auth_tokens_contain_at() { - let auth_tokens = AuthTokens::new(Some("abc@123@deno.land".to_string())); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Bearer abc@123".to_string() - ); - } - - #[test] - fn test_auth_token_basic() { - let auth_tokens = AuthTokens::new(Some("abc:123@deno.land".to_string())); - let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Basic YWJjOjEyMw==" - ); - let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap(); - assert_eq!( - auth_tokens.get(&fixture).unwrap().to_string(), - "Basic YWJjOjEyMw==".to_string() - ); - let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - let fixture = - resolve_url("https://deno.land.example.com/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap(); - assert_eq!(auth_tokens.get(&fixture), None); - } - - #[test] - fn test_parse_ip() { - let ip = AuthDomain::from("[2001:db8:a::123]"); - assert_eq!("Ip(2001:db8:a::123)", format!("{ip:?}")); - let ip = AuthDomain::from("[2001:db8:a::123]:8080"); - assert_eq!("IpPort([2001:db8:a::123]:8080)", format!("{ip:?}")); - let ip = AuthDomain::from("1.1.1.1"); - assert_eq!("Ip(1.1.1.1)", format!("{ip:?}")); - } - - #[test] - fn test_case_insensitive() { - let domain = AuthDomain::from("EXAMPLE.com"); - assert!( - domain.matches(&ModuleSpecifier::parse("http://example.com").unwrap()) - ); - assert!( - domain.matches(&ModuleSpecifier::parse("http://example.COM").unwrap()) - ); - } - - #[test] - fn test_matches() { - let candidates = [ - "example.com", - "www.example.com", - "1.1.1.1", - "[2001:db8:a::123]", - // These will never match - "example.com.evil.com", - "1.1.1.1.evil.com", - "notexample.com", - "www.notexample.com", - ]; - let domains = [ - ("example.com", vec!["example.com", "www.example.com"]), - (".example.com", vec!["example.com", "www.example.com"]), - ("www.example.com", vec!["www.example.com"]), - ("1.1.1.1", vec!["1.1.1.1"]), - ("[2001:db8:a::123]", vec!["[2001:db8:a::123]"]), - ]; - let url = |c: &str| ModuleSpecifier::parse(&format!("http://{c}")).unwrap(); - let url_port = - |c: &str| ModuleSpecifier::parse(&format!("http://{c}:8080")).unwrap(); - - // Generate each candidate with and without a port - let candidates = candidates - .into_iter() - .flat_map(|c| [url(c), url_port(c)]) - .collect::>(); - - for (domain, expected_domain) in domains { - // Test without a port -- all candidates return without a port - let auth_domain = AuthDomain::from(domain); - let actual = candidates - .iter() - .filter(|c| auth_domain.matches(c)) - .cloned() - .collect::>(); - let expected = expected_domain.iter().map(|u| url(u)).collect::>(); - assert_eq!(actual, expected); - - // Test with a port, all candidates return with a port - let auth_domain = AuthDomain::from(&format!("{domain}:8080")); - let actual = candidates - .iter() - .filter(|c| auth_domain.matches(c)) - .cloned() - .collect::>(); - let expected = expected_domain - .iter() - .map(|u| url_port(u)) - .collect::>(); - assert_eq!(actual, expected); - } - } -} diff --git a/cli/cache/deno_dir.rs b/cli/cache/deno_dir.rs index 7b7059c224..90a3add54e 100644 --- a/cli/cache/deno_dir.rs +++ b/cli/cache/deno_dir.rs @@ -1,7 +1,10 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use deno_cache_dir::DenoDirResolutionError; use once_cell::sync::OnceCell; +use crate::sys::CliSys; + use super::DiskCache; use std::env; @@ -10,24 +13,37 @@ use std::path::PathBuf; /// Lazily creates the deno dir which might be useful in scenarios /// where functionality wants to continue if the DENO_DIR can't be created. pub struct DenoDirProvider { + sys: CliSys, maybe_custom_root: Option, - deno_dir: OnceCell>, + deno_dir: OnceCell>, } impl DenoDirProvider { - pub fn new(maybe_custom_root: Option) -> Self { + pub fn new(sys: CliSys, maybe_custom_root: Option) -> Self { Self { + sys, maybe_custom_root, deno_dir: Default::default(), } } - pub fn get_or_create(&self) -> Result<&DenoDir, std::io::Error> { + pub fn get_or_create(&self) -> Result<&DenoDir, DenoDirResolutionError> { self .deno_dir - .get_or_init(|| DenoDir::new(self.maybe_custom_root.clone())) + .get_or_init(|| { + DenoDir::new(self.sys.clone(), self.maybe_custom_root.clone()) + }) .as_ref() - .map_err(|err| std::io::Error::new(err.kind(), err.to_string())) + .map_err(|err| match err { + DenoDirResolutionError::NoCacheOrHomeDir => { + DenoDirResolutionError::NoCacheOrHomeDir + } + DenoDirResolutionError::FailedCwd { source } => { + DenoDirResolutionError::FailedCwd { + source: std::io::Error::new(source.kind(), source.to_string()), + } + } + }) } } @@ -42,33 +58,20 @@ pub struct DenoDir { } impl DenoDir { - pub fn new(maybe_custom_root: Option) -> std::io::Result { - let maybe_custom_root = - maybe_custom_root.or_else(|| env::var("DENO_DIR").map(String::into).ok()); - let root: PathBuf = if let Some(root) = maybe_custom_root { - root - } else if let Some(cache_dir) = dirs::cache_dir() { - // We use the OS cache dir because all files deno writes are cache files - // Once that changes we need to start using different roots if DENO_DIR - // is not set, and keep a single one if it is. - cache_dir.join("deno") - } else if let Some(home_dir) = dirs::home_dir() { - // fallback path - home_dir.join(".deno") - } else { - panic!("Could not set the Deno root directory") - }; - let root = if root.is_absolute() { - root - } else { - std::env::current_dir()?.join(root) - }; + pub fn new( + sys: CliSys, + maybe_custom_root: Option, + ) -> Result { + let root = deno_cache_dir::resolve_deno_dir( + &sys_traits::impls::RealSys, + maybe_custom_root, + )?; assert!(root.is_absolute()); let gen_path = root.join("gen"); let deno_dir = Self { root, - gen_cache: DiskCache::new(&gen_path), + gen_cache: DiskCache::new(sys, &gen_path), }; Ok(deno_dir) @@ -166,112 +169,3 @@ impl DenoDir { self.root.join("dl") } } - -/// To avoid the poorly managed dirs crate -#[cfg(not(windows))] -pub mod dirs { - use std::path::PathBuf; - - pub fn cache_dir() -> Option { - if cfg!(target_os = "macos") { - home_dir().map(|h| h.join("Library/Caches")) - } else { - std::env::var_os("XDG_CACHE_HOME") - .map(PathBuf::from) - .or_else(|| home_dir().map(|h| h.join(".cache"))) - } - } - - pub fn home_dir() -> Option { - std::env::var_os("HOME") - .and_then(|h| if h.is_empty() { None } else { Some(h) }) - .or_else(|| { - // TODO(bartlomieju): - #[allow(clippy::undocumented_unsafe_blocks)] - unsafe { - fallback() - } - }) - .map(PathBuf::from) - } - - // This piece of code is taken from the deprecated home_dir() function in Rust's standard library: https://github.com/rust-lang/rust/blob/master/src/libstd/sys/unix/os.rs#L579 - // The same code is used by the dirs crate - unsafe fn fallback() -> Option { - let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) { - n if n < 0 => 512_usize, - n => n as usize, - }; - let mut buf = Vec::with_capacity(amt); - let mut passwd: libc::passwd = std::mem::zeroed(); - let mut result = std::ptr::null_mut(); - match libc::getpwuid_r( - libc::getuid(), - &mut passwd, - buf.as_mut_ptr(), - buf.capacity(), - &mut result, - ) { - 0 if !result.is_null() => { - let ptr = passwd.pw_dir as *const _; - let bytes = std::ffi::CStr::from_ptr(ptr).to_bytes().to_vec(); - Some(std::os::unix::ffi::OsStringExt::from_vec(bytes)) - } - _ => None, - } - } -} - -/// To avoid the poorly managed dirs crate -// Copied from -// https://github.com/dirs-dev/dirs-sys-rs/blob/ec7cee0b3e8685573d847f0a0f60aae3d9e07fa2/src/lib.rs#L140-L164 -// MIT license. Copyright (c) 2018-2019 dirs-rs contributors -#[cfg(windows)] -pub mod dirs { - use std::ffi::OsString; - use std::os::windows::ffi::OsStringExt; - use std::path::PathBuf; - use winapi::shared::winerror; - use winapi::um::combaseapi; - use winapi::um::knownfolders; - use winapi::um::shlobj; - use winapi::um::shtypes; - use winapi::um::winbase; - use winapi::um::winnt; - - fn known_folder(folder_id: shtypes::REFKNOWNFOLDERID) -> Option { - // SAFETY: winapi calls - unsafe { - let mut path_ptr: winnt::PWSTR = std::ptr::null_mut(); - let result = shlobj::SHGetKnownFolderPath( - folder_id, - 0, - std::ptr::null_mut(), - &mut path_ptr, - ); - if result == winerror::S_OK { - let len = winbase::lstrlenW(path_ptr) as usize; - let path = std::slice::from_raw_parts(path_ptr, len); - let ostr: OsString = OsStringExt::from_wide(path); - combaseapi::CoTaskMemFree(path_ptr as *mut winapi::ctypes::c_void); - Some(PathBuf::from(ostr)) - } else { - None - } - } - } - - pub fn cache_dir() -> Option { - known_folder(&knownfolders::FOLDERID_LocalAppData) - } - - pub fn home_dir() -> Option { - if let Some(userprofile) = std::env::var_os("USERPROFILE") { - if !userprofile.is_empty() { - return Some(PathBuf::from(userprofile)); - } - } - - known_folder(&knownfolders::FOLDERID_Profile) - } -} diff --git a/cli/cache/disk_cache.rs b/cli/cache/disk_cache.rs index 2fee1efe09..c96a3943c0 100644 --- a/cli/cache/disk_cache.rs +++ b/cli/cache/disk_cache.rs @@ -1,11 +1,13 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use crate::sys::CliSys; + use super::CACHE_PERM; -use crate::util::fs::atomic_write_file_with_retries; use deno_cache_dir::url_to_filename; use deno_core::url::Host; use deno_core::url::Url; +use deno_path_util::fs::atomic_write_file_with_retries; use std::ffi::OsStr; use std::fs; use std::path::Component; @@ -16,14 +18,16 @@ use std::str; #[derive(Debug, Clone)] pub struct DiskCache { + sys: CliSys, pub location: PathBuf, } impl DiskCache { /// `location` must be an absolute path. - pub fn new(location: &Path) -> Self { + pub fn new(sys: CliSys, location: &Path) -> Self { assert!(location.is_absolute()); Self { + sys, location: location.to_owned(), } } @@ -120,7 +124,7 @@ impl DiskCache { pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> { let path = self.location.join(filename); - atomic_write_file_with_retries(&path, data, CACHE_PERM) + atomic_write_file_with_retries(&self.sys, &path, data, CACHE_PERM) } } @@ -133,7 +137,7 @@ mod tests { fn test_set_get_cache_file() { let temp_dir = TempDir::new(); let sub_dir = temp_dir.path().join("sub_dir"); - let cache = DiskCache::new(&sub_dir.to_path_buf()); + let cache = DiskCache::new(CliSys::default(), &sub_dir.to_path_buf()); let path = PathBuf::from("foo/bar.txt"); cache.set(&path, b"hello").unwrap(); assert_eq!(cache.get(&path).unwrap(), b"hello"); @@ -147,7 +151,7 @@ mod tests { PathBuf::from("/deno_dir/") }; - let cache = DiskCache::new(&cache_location); + let cache = DiskCache::new(CliSys::default(), &cache_location); let mut test_cases = vec![ ( @@ -203,7 +207,7 @@ mod tests { } else { "/foo" }; - let cache = DiskCache::new(&PathBuf::from(p)); + let cache = DiskCache::new(CliSys::default(), &PathBuf::from(p)); let mut test_cases = vec![ ( @@ -251,7 +255,7 @@ mod tests { PathBuf::from("/deno_dir/") }; - let cache = DiskCache::new(&cache_location); + let cache = DiskCache::new(CliSys::default(), &cache_location); let mut test_cases = vec!["unknown://localhost/test.ts"]; diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs index 3c9eecfcbd..b239cc93ba 100644 --- a/cli/cache/emit.rs +++ b/cli/cache/emit.rs @@ -159,12 +159,15 @@ impl EmitFileSerializer { mod test { use test_util::TempDir; + use crate::sys::CliSys; + use super::*; #[test] pub fn emit_cache_general_use() { let temp_dir = TempDir::new(); - let disk_cache = DiskCache::new(temp_dir.path().as_path()); + let disk_cache = + DiskCache::new(CliSys::default(), temp_dir.path().as_path()); let cache = EmitCache { disk_cache: disk_cache.clone(), file_serializer: EmitFileSerializer { diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index e3e242e975..bc6f792667 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -1,18 +1,17 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use crate::args::jsr_url; -use crate::args::CacheSetting; -use crate::errors::get_error_class_name; +use crate::file_fetcher::CliFetchNoFollowErrorKind; +use crate::file_fetcher::CliFileFetcher; use crate::file_fetcher::FetchNoFollowOptions; -use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchPermissionsOptionRef; -use crate::file_fetcher::FileFetcher; -use crate::file_fetcher::FileOrRedirect; -use crate::util::fs::atomic_write_file_with_retries; -use crate::util::fs::atomic_write_file_with_retries_and_fs; -use crate::util::fs::AtomicWriteFileFsAdapter; +use crate::sys::CliSys; use deno_ast::MediaType; +use deno_cache_dir::file_fetcher::CacheSetting; +use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind; +use deno_cache_dir::file_fetcher::FileOrRedirect; +use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::FutureExt; use deno_core::ModuleSpecifier; @@ -20,15 +19,11 @@ use deno_graph::source::CacheInfo; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; -use deno_runtime::deno_fs; use deno_runtime::deno_permissions::PermissionsContainer; use node_resolver::InNpmPackageChecker; -use std::borrow::Cow; use std::collections::HashMap; -use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use std::time::SystemTime; mod cache_db; mod caches; @@ -49,7 +44,6 @@ pub use caches::Caches; pub use check::TypeCheckCache; pub use code_cache::CodeCache; pub use common::FastInsecureHasher; -pub use deno_dir::dirs::home_dir; pub use deno_dir::DenoDir; pub use deno_dir::DenoDirProvider; pub use disk_cache::DiskCache; @@ -62,121 +56,11 @@ pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::ParsedSourceCache; /// Permissions used to save a file in the disk caches. -pub const CACHE_PERM: u32 = 0o644; +pub use deno_cache_dir::CACHE_PERM; -#[derive(Debug, Clone)] -pub struct RealDenoCacheEnv; - -impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv { - fn read_file_bytes( - &self, - path: &Path, - ) -> std::io::Result> { - std::fs::read(path).map(Cow::Owned) - } - - fn atomic_write_file( - &self, - path: &Path, - bytes: &[u8], - ) -> std::io::Result<()> { - atomic_write_file_with_retries(path, bytes, CACHE_PERM) - } - - fn canonicalize_path(&self, path: &Path) -> std::io::Result { - crate::util::fs::canonicalize_path(path) - } - - fn create_dir_all(&self, path: &Path) -> std::io::Result<()> { - std::fs::create_dir_all(path) - } - - fn modified(&self, path: &Path) -> std::io::Result> { - match std::fs::metadata(path) { - Ok(metadata) => Ok(Some( - metadata.modified().unwrap_or_else(|_| SystemTime::now()), - )), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), - Err(err) => Err(err), - } - } - - fn is_file(&self, path: &Path) -> bool { - path.is_file() - } - - fn time_now(&self) -> SystemTime { - SystemTime::now() - } -} - -#[derive(Debug, Clone)] -pub struct DenoCacheEnvFsAdapter<'a>( - pub &'a dyn deno_runtime::deno_fs::FileSystem, -); - -impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> { - fn read_file_bytes( - &self, - path: &Path, - ) -> std::io::Result> { - self - .0 - .read_file_sync(path, None) - .map_err(|err| err.into_io_error()) - } - - fn atomic_write_file( - &self, - path: &Path, - bytes: &[u8], - ) -> std::io::Result<()> { - atomic_write_file_with_retries_and_fs( - &AtomicWriteFileFsAdapter { - fs: self.0, - write_mode: CACHE_PERM, - }, - path, - bytes, - ) - } - - fn canonicalize_path(&self, path: &Path) -> std::io::Result { - self.0.realpath_sync(path).map_err(|e| e.into_io_error()) - } - - fn create_dir_all(&self, path: &Path) -> std::io::Result<()> { - self - .0 - .mkdir_sync(path, true, None) - .map_err(|e| e.into_io_error()) - } - - fn modified(&self, path: &Path) -> std::io::Result> { - self - .0 - .stat_sync(path) - .map(|stat| { - stat - .mtime - .map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts)) - }) - .map_err(|e| e.into_io_error()) - } - - fn is_file(&self, path: &Path) -> bool { - self.0.is_file_sync(path) - } - - fn time_now(&self) -> SystemTime { - SystemTime::now() - } -} - -pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache; -pub type LocalHttpCache = deno_cache_dir::LocalHttpCache; -pub type LocalLspHttpCache = - deno_cache_dir::LocalLspHttpCache; +pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache; +pub type LocalHttpCache = deno_cache_dir::LocalHttpCache; +pub type LocalLspHttpCache = deno_cache_dir::LocalLspHttpCache; pub use deno_cache_dir::HttpCache; pub struct FetchCacherOptions { @@ -190,31 +74,31 @@ pub struct FetchCacherOptions { /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { pub file_header_overrides: HashMap>, - file_fetcher: Arc, - fs: Arc, + file_fetcher: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, module_info_cache: Arc, permissions: PermissionsContainer, + sys: CliSys, is_deno_publish: bool, cache_info_enabled: bool, } impl FetchCacher { pub fn new( - file_fetcher: Arc, - fs: Arc, + file_fetcher: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, module_info_cache: Arc, + sys: CliSys, options: FetchCacherOptions, ) -> Self { Self { file_fetcher, - fs, global_http_cache, in_npm_pkg_checker, module_info_cache, + sys, file_header_overrides: options.file_header_overrides, permissions: options.permissions, is_deno_publish: options.is_deno_publish, @@ -276,9 +160,8 @@ impl Loader for FetchCacher { // symlinked to `/my-project-2/node_modules`), so first we checked if the path // is in a node_modules dir to avoid needlessly canonicalizing, then now compare // against the canonicalized specifier. - let specifier = crate::node::resolve_specifier_into_node_modules( - specifier, - self.fs.as_ref(), + let specifier = node_resolver::resolve_specifier_into_node_modules( + &self.sys, specifier, ); if self.in_npm_pkg_checker.in_npm_package(&specifier) { return Box::pin(futures::future::ready(Ok(Some( @@ -320,18 +203,18 @@ impl Loader for FetchCacher { LoaderCacheSetting::Only => Some(CacheSetting::Only), }; file_fetcher - .fetch_no_follow_with_options(FetchNoFollowOptions { - fetch_options: FetchOptions { - specifier: &specifier, - permissions: if is_statically_analyzable { - FetchPermissionsOptionRef::StaticContainer(&permissions) - } else { - FetchPermissionsOptionRef::DynamicContainer(&permissions) - }, - maybe_auth: None, - maybe_accept: None, - maybe_cache_setting: maybe_cache_setting.as_ref(), - }, + .fetch_no_follow( + &specifier, + FetchPermissionsOptionRef::Restricted(&permissions, + if is_statically_analyzable { + deno_runtime::deno_permissions::CheckSpecifierKind::Static + } else { + deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic + }), + FetchNoFollowOptions { + maybe_auth: None, + maybe_accept: None, + maybe_cache_setting: maybe_cache_setting.as_ref(), maybe_checksum: options.maybe_checksum.as_ref(), }) .await @@ -348,7 +231,7 @@ impl Loader for FetchCacher { (None, None) => None, }; Ok(Some(LoadResponse::Module { - specifier: file.specifier, + specifier: file.url, maybe_headers, content: file.source, })) @@ -361,18 +244,46 @@ impl Loader for FetchCacher { } }) .unwrap_or_else(|err| { - if let Some(io_err) = err.downcast_ref::() { - if io_err.kind() == std::io::ErrorKind::NotFound { - return Ok(None); - } else { - return Err(err); - } - } - let error_class_name = get_error_class_name(&err); - match error_class_name { - "NotFound" => Ok(None), - "NotCached" if options.cache_setting == LoaderCacheSetting::Only => Ok(None), - _ => Err(err), + let err = err.into_kind(); + match err { + CliFetchNoFollowErrorKind::FetchNoFollow(err) => { + let err = err.into_kind(); + match err { + FetchNoFollowErrorKind::NotFound(_) => Ok(None), + FetchNoFollowErrorKind::UrlToFilePath { .. } | + FetchNoFollowErrorKind::ReadingBlobUrl { .. } | + FetchNoFollowErrorKind::ReadingFile { .. } | + FetchNoFollowErrorKind::FetchingRemote { .. } | + FetchNoFollowErrorKind::ClientError { .. } | + FetchNoFollowErrorKind::NoRemote { .. } | + FetchNoFollowErrorKind::DataUrlDecode { .. } | + FetchNoFollowErrorKind::RedirectResolution { .. } | + FetchNoFollowErrorKind::CacheRead { .. } | + FetchNoFollowErrorKind::CacheSave { .. } | + FetchNoFollowErrorKind::UnsupportedScheme { .. } | + FetchNoFollowErrorKind::RedirectHeaderParse { .. } | + FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)), + FetchNoFollowErrorKind::NotCached { .. } => { + if options.cache_setting == LoaderCacheSetting::Only { + Ok(None) + } else { + Err(AnyError::from(err)) + } + }, + FetchNoFollowErrorKind::ChecksumIntegrity(err) => { + // convert to the equivalent deno_graph error so that it + // enhances it if this is passed to deno_graph + Err( + deno_graph::source::ChecksumIntegrityError { + actual: err.actual, + expected: err.expected, + } + .into(), + ) + } + } + }, + CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)), } }) } diff --git a/cli/clippy.toml b/cli/clippy.toml index f1c25acfb8..21a544aebd 100644 --- a/cli/clippy.toml +++ b/cli/clippy.toml @@ -4,6 +4,7 @@ disallowed-methods = [ ] disallowed-types = [ { path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" }, + { path = "sys_traits::impls::RealSys", reason = "use crate::sys::CliSys instead" }, ] ignore-interior-mutability = [ "lsp_types::Uri", diff --git a/cli/emit.rs b/cli/emit.rs index 3cd23b7abb..733a89d832 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -5,6 +5,7 @@ use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; use crate::resolver::CjsTracker; +use deno_ast::EmittedSourceText; use deno_ast::ModuleKind; use deno_ast::SourceMapOption; use deno_ast::SourceRange; @@ -132,6 +133,7 @@ impl Emitter { &transpile_and_emit_options.0, &transpile_and_emit_options.1, ) + .map(|r| r.text) } }) .await @@ -166,7 +168,8 @@ impl Emitter { source.clone(), &self.transpile_and_emit_options.0, &self.transpile_and_emit_options.1, - )?; + )? + .text; helper.post_emit_parsed_source( specifier, &transpiled_source, @@ -177,6 +180,31 @@ impl Emitter { } } + pub fn emit_parsed_source_for_deno_compile( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + module_kind: deno_ast::ModuleKind, + source: &Arc, + ) -> Result<(String, String), AnyError> { + let mut emit_options = self.transpile_and_emit_options.1.clone(); + emit_options.inline_sources = false; + emit_options.source_map = SourceMapOption::Separate; + // strip off the path to have more deterministic builds as we don't care + // about the source name because we manually provide the source map to v8 + emit_options.source_map_base = Some(deno_path_util::url_parent(specifier)); + let source = EmitParsedSourceHelper::transpile( + &self.parsed_source_cache, + specifier, + media_type, + module_kind, + source.clone(), + &self.transpile_and_emit_options.0, + &emit_options, + )?; + Ok((source.text, source.source_map.unwrap())) + } + /// Expects a file URL, panics otherwise. pub async fn load_and_emit_for_hmr( &self, @@ -282,7 +310,7 @@ impl<'a> EmitParsedSourceHelper<'a> { source: Arc, transpile_options: &deno_ast::TranspileOptions, emit_options: &deno_ast::EmitOptions, - ) -> Result { + ) -> Result { // nothing else needs the parsed source at this point, so remove from // the cache in order to not transpile owned let parsed_source = parsed_source_cache @@ -302,8 +330,7 @@ impl<'a> EmitParsedSourceHelper<'a> { source } }; - debug_assert!(transpiled_source.source_map.is_none()); - Ok(transpiled_source.text) + Ok(transpiled_source) } pub fn post_emit_parsed_source( diff --git a/cli/factory.rs b/cli/factory.rs index f08bf7e4b1..e33b95d235 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -11,7 +11,6 @@ use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; use crate::cache::CodeCache; -use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; use crate::cache::EmitCache; @@ -22,7 +21,7 @@ use crate::cache::ModuleInfoCache; use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::graph_container::MainModuleGraphContainer; use crate::graph_util::FileWatcherReporter; use crate::graph_util::ModuleGraphBuilder; @@ -32,6 +31,8 @@ use crate::module_loader::CliModuleLoaderFactory; use crate::module_loader::ModuleLoadPreparer; use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; +use crate::node::CliNodeResolver; +use crate::node::CliPackageJsonResolver; use crate::npm::create_cli_npm_resolver; use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; @@ -43,14 +44,14 @@ use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::resolver::CjsTracker; use crate::resolver::CliDenoResolver; -use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; use crate::resolver::CliResolverOptions; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::NpmModuleLoader; use crate::resolver::SloppyImportsCachedFs; -use crate::standalone::DenoCompileBinaryWriter; +use crate::standalone::binary::DenoCompileBinaryWriter; +use crate::sys::CliSys; use crate::tools::check::TypeChecker; use crate::tools::coverage::CoverageCollector; use crate::tools::lint::LintRuleProvider; @@ -76,9 +77,8 @@ use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::DenoResolverOptions; use deno_resolver::NodeAndNpmReqResolver; use deno_runtime::deno_fs; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; -use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::PackageJsonResolver; +use deno_runtime::deno_fs::RealFs; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -185,7 +185,7 @@ struct CliFactoryServices { emit_cache: Deferred>, emitter: Deferred>, feature_checker: Deferred>, - file_fetcher: Deferred>, + file_fetcher: Deferred>, fs: Deferred>, global_http_cache: Deferred>, http_cache: Deferred>, @@ -199,13 +199,14 @@ struct CliFactoryServices { module_info_cache: Deferred>, module_load_preparer: Deferred>, node_code_translator: Deferred>, - node_resolver: Deferred>, + node_resolver: Deferred>, npm_cache_dir: Deferred>, npm_req_resolver: Deferred>, npm_resolver: Deferred>, parsed_source_cache: Deferred>, - permission_desc_parser: Deferred>, - pkg_json_resolver: Deferred>, + permission_desc_parser: + Deferred>>, + pkg_json_resolver: Deferred>, resolver: Deferred>, root_cert_store_provider: Deferred>, root_permissions_container: Deferred, @@ -255,7 +256,7 @@ impl CliFactory { pub fn cli_options(&self) -> Result<&Arc, AnyError> { self.services.cli_options.get_or_try_init(|| { - CliOptions::from_flags(self.flags.clone()).map(Arc::new) + CliOptions::from_flags(&self.sys(), self.flags.clone()).map(Arc::new) }) } @@ -318,8 +319,8 @@ impl CliFactory { pub fn global_http_cache(&self) -> Result<&Arc, AnyError> { self.services.global_http_cache.get_or_try_init(|| { Ok(Arc::new(GlobalHttpCache::new( + self.sys(), self.deno_dir()?.remote_folder_path(), - crate::cache::RealDenoCacheEnv, ))) }) } @@ -350,22 +351,28 @@ impl CliFactory { }) } - pub fn file_fetcher(&self) -> Result<&Arc, AnyError> { + pub fn file_fetcher(&self) -> Result<&Arc, AnyError> { self.services.file_fetcher.get_or_try_init(|| { let cli_options = self.cli_options()?; - Ok(Arc::new(FileFetcher::new( + Ok(Arc::new(CliFileFetcher::new( self.http_cache()?.clone(), - cli_options.cache_setting(), - !cli_options.no_remote(), self.http_client_provider().clone(), + self.sys(), self.blob_store().clone(), Some(self.text_only_progress_bar().clone()), + !cli_options.no_remote(), + cli_options.cache_setting(), + log::Level::Info, ))) }) } pub fn fs(&self) -> &Arc { - self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) + self.services.fs.get_or_init(|| Arc::new(RealFs)) + } + + pub fn sys(&self) -> CliSys { + CliSys::default() // very cheap to make } pub fn in_npm_pkg_checker( @@ -391,11 +398,10 @@ impl CliFactory { pub fn npm_cache_dir(&self) -> Result<&Arc, AnyError> { self.services.npm_cache_dir.get_or_try_init(|| { - let fs = self.fs(); let global_path = self.deno_dir()?.npm_folder_path(); let cli_options = self.cli_options()?; Ok(Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(fs.as_ref()), + &self.sys(), global_path, cli_options.npmrc().get_all_known_registries_urls(), ))) @@ -410,12 +416,11 @@ impl CliFactory { .npm_resolver .get_or_try_init_async( async { - let fs = self.fs(); let cli_options = self.cli_options()?; create_cli_npm_resolver(if cli_options.use_byonm() { CliNpmResolverCreateOptions::Byonm( CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), + sys: self.sys(), pkg_json_resolver: self.pkg_json_resolver().clone(), root_node_modules_dir: Some( match cli_options.node_modules_dir_path() { @@ -433,6 +438,13 @@ impl CliFactory { } else { CliNpmResolverCreateOptions::Managed( CliManagedNpmResolverCreateOptions { + http_client_provider: self.http_client_provider().clone(), + npm_install_deps_provider: Arc::new( + NpmInstallDepsProvider::from_workspace( + cli_options.workspace(), + ), + ), + sys: self.sys(), snapshot: match cli_options.resolve_npm_resolution_snapshot()? { Some(snapshot) => { CliNpmResolverManagedSnapshotOption::Specified(Some( @@ -451,19 +463,12 @@ impl CliFactory { }, }, maybe_lockfile: cli_options.maybe_lockfile().cloned(), - fs: fs.clone(), - http_client_provider: self.http_client_provider().clone(), npm_cache_dir: self.npm_cache_dir()?.clone(), cache_setting: cli_options.cache_setting(), text_only_progress_bar: self.text_only_progress_bar().clone(), maybe_node_modules_path: cli_options .node_modules_dir_path() .cloned(), - npm_install_deps_provider: Arc::new( - NpmInstallDepsProvider::from_workspace( - cli_options.workspace(), - ), - ), npm_system_info: cli_options.npm_system_info(), npmrc: cli_options.npmrc().clone(), lifecycle_scripts: cli_options.lifecycle_scripts_config(), @@ -486,7 +491,7 @@ impl CliFactory { .get_or_try_init(|| { Ok(self.cli_options()?.unstable_sloppy_imports().then(|| { Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new( - self.fs().clone(), + self.sys(), ))) })) }) @@ -647,21 +652,22 @@ impl CliFactory { )) } - pub async fn node_resolver(&self) -> Result<&Arc, AnyError> { + pub async fn node_resolver(&self) -> Result<&Arc, AnyError> { self .services .node_resolver .get_or_try_init_async( async { - Ok(Arc::new(NodeResolver::new( - DenoFsNodeResolverEnv::new(self.fs().clone()), + Ok(Arc::new(CliNodeResolver::new( self.in_npm_pkg_checker()?.clone(), + RealIsBuiltInNodeModuleChecker, self .npm_resolver() .await? .clone() .into_npm_pkg_folder_resolver(), self.pkg_json_resolver().clone(), + self.sys(), ))) } .boxed_local(), @@ -689,7 +695,6 @@ impl CliFactory { Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, - DenoFsNodeResolverEnv::new(self.fs().clone()), self.in_npm_pkg_checker()?.clone(), node_resolver, self @@ -698,6 +703,7 @@ impl CliFactory { .clone() .into_npm_pkg_folder_resolver(), self.pkg_json_resolver().clone(), + self.sys(), ))) }) .await @@ -713,7 +719,7 @@ impl CliFactory { let npm_resolver = self.npm_resolver().await?; Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), - fs: CliDenoResolverFs(self.fs().clone()), + sys: self.sys(), in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(), node_resolver: self.node_resolver().await?.clone(), npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), @@ -722,12 +728,11 @@ impl CliFactory { .await } - pub fn pkg_json_resolver(&self) -> &Arc { - self.services.pkg_json_resolver.get_or_init(|| { - Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new( - self.fs().clone(), - ))) - }) + pub fn pkg_json_resolver(&self) -> &Arc { + self + .services + .pkg_json_resolver + .get_or_init(|| Arc::new(CliPackageJsonResolver::new(self.sys()))) } pub async fn type_checker(&self) -> Result<&Arc, AnyError> { @@ -764,7 +769,6 @@ impl CliFactory { self.cjs_tracker()?.clone(), cli_options.clone(), self.file_fetcher()?.clone(), - self.fs().clone(), self.global_http_cache()?.clone(), self.in_npm_pkg_checker()?.clone(), cli_options.maybe_lockfile().cloned(), @@ -774,6 +778,7 @@ impl CliFactory { self.parsed_source_cache().clone(), self.resolver().await?.clone(), self.root_permissions_container()?.clone(), + self.sys(), ))) }) .await @@ -863,10 +868,9 @@ impl CliFactory { pub fn permission_desc_parser( &self, - ) -> Result<&Arc, AnyError> { + ) -> Result<&Arc>, AnyError> { self.services.permission_desc_parser.get_or_try_init(|| { - let fs = self.fs().clone(); - Ok(Arc::new(RuntimePermissionDescriptorParser::new(fs))) + Ok(Arc::new(RuntimePermissionDescriptorParser::new(self.sys()))) }) } @@ -959,7 +963,6 @@ impl CliFactory { None }, self.emitter()?.clone(), - fs.clone(), in_npm_pkg_checker.clone(), self.main_module_graph_container().await?.clone(), self.module_load_preparer().await?.clone(), @@ -974,6 +977,7 @@ impl CliFactory { ), self.parsed_source_cache().clone(), self.resolver().await?.clone(), + self.sys(), )), node_resolver.clone(), npm_resolver.clone(), @@ -981,6 +985,7 @@ impl CliFactory { self.root_cert_store_provider().clone(), self.root_permissions_container()?.clone(), StorageKeyResolver::from_options(cli_options), + self.sys(), cli_options.sub_command().clone(), self.create_cli_main_worker_options()?, self.cli_options()?.otel_config(), diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 29f9c6ba3f..7e8438d639 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -1,41 +1,44 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::args::CacheSetting; -use crate::auth_tokens::AuthTokens; -use crate::cache::HttpCache; -use crate::colors; -use crate::http_util::CacheSemantics; -use crate::http_util::FetchOnceArgs; -use crate::http_util::FetchOnceResult; -use crate::http_util::HttpClientProvider; -use crate::util::progress_bar::ProgressBar; +use std::borrow::Cow; +use std::collections::HashMap; +use std::sync::Arc; +use boxed_error::Boxed; use deno_ast::MediaType; +use deno_cache_dir::file_fetcher::AuthTokens; +use deno_cache_dir::file_fetcher::BlobData; +use deno_cache_dir::file_fetcher::CacheSetting; +use deno_cache_dir::file_fetcher::FetchNoFollowError; +use deno_cache_dir::file_fetcher::File; +use deno_cache_dir::file_fetcher::FileFetcherOptions; +use deno_cache_dir::file_fetcher::FileOrRedirect; +use deno_cache_dir::file_fetcher::SendError; +use deno_cache_dir::file_fetcher::SendResponse; +use deno_cache_dir::file_fetcher::TooManyRedirectsError; +use deno_cache_dir::file_fetcher::UnsupportedSchemeError; use deno_core::anyhow::Context; -use deno_core::error::custom_error; -use deno_core::error::generic_error; -use deno_core::error::uri_error; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::url::Url; use deno_core::ModuleSpecifier; +use deno_error::JsError; use deno_graph::source::LoaderChecksum; - -use deno_path_util::url_to_file_path; +use deno_runtime::deno_permissions::CheckSpecifierKind; +use deno_runtime::deno_permissions::PermissionCheckError; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_web::BlobStore; use http::header; -use log::debug; -use std::borrow::Cow; -use std::collections::HashMap; -use std::env; -use std::fs; -use std::path::PathBuf; -use std::sync::Arc; -use std::time::SystemTime; +use http::HeaderMap; +use http::StatusCode; +use thiserror::Error; -pub const SUPPORTED_SCHEMES: [&str; 5] = - ["data", "blob", "file", "http", "https"]; +use crate::cache::HttpCache; +use crate::colors; +use crate::http_util::get_response_body_with_progress; +use crate::http_util::HttpClientProvider; +use crate::sys::CliSys; +use crate::util::progress_bar::ProgressBar; #[derive(Debug, Clone, Eq, PartialEq)] pub struct TextDecodedFile { @@ -47,62 +50,19 @@ pub struct TextDecodedFile { pub source: Arc, } -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum FileOrRedirect { - File(File), - Redirect(ModuleSpecifier), -} - -impl FileOrRedirect { - fn from_deno_cache_entry( - specifier: &ModuleSpecifier, - cache_entry: deno_cache_dir::CacheEntry, - ) -> Result { - if let Some(redirect_to) = cache_entry.metadata.headers.get("location") { - let redirect = specifier.join(redirect_to)?; - Ok(FileOrRedirect::Redirect(redirect)) - } else { - Ok(FileOrRedirect::File(File { - specifier: specifier.clone(), - maybe_headers: Some(cache_entry.metadata.headers), - source: Arc::from(cache_entry.content), - })) - } - } -} - -/// A structure representing a source file. -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct File { - /// The _final_ specifier for the file. The requested specifier and the final - /// specifier maybe different for remote files that have been redirected. - pub specifier: ModuleSpecifier, - pub maybe_headers: Option>, - /// The source of the file. - pub source: Arc<[u8]>, -} - -impl File { - pub fn resolve_media_type_and_charset(&self) -> (MediaType, Option<&str>) { - deno_graph::source::resolve_media_type_and_charset_from_headers( - &self.specifier, - self.maybe_headers.as_ref(), - ) - } - +impl TextDecodedFile { /// Decodes the source bytes into a string handling any encoding rules /// for local vs remote files and dealing with the charset. - pub fn into_text_decoded(self) -> Result { - // lots of borrow checker fighting here + pub fn decode(file: File) -> Result { let (media_type, maybe_charset) = deno_graph::source::resolve_media_type_and_charset_from_headers( - &self.specifier, - self.maybe_headers.as_ref(), + &file.url, + file.maybe_headers.as_ref(), ); - let specifier = self.specifier; + let specifier = file.url; match deno_graph::source::decode_source( &specifier, - self.source, + file.source, maybe_charset, ) { Ok(source) => Ok(TextDecodedFile { @@ -117,14 +77,146 @@ impl File { } } -#[derive(Debug, Clone, Default)] -struct MemoryFiles(Arc>>); +#[derive(Debug)] +struct BlobStoreAdapter(Arc); + +#[async_trait::async_trait(?Send)] +impl deno_cache_dir::file_fetcher::BlobStore for BlobStoreAdapter { + async fn get(&self, specifier: &Url) -> std::io::Result> { + let Some(blob) = self.0.get_object_url(specifier.clone()) else { + return Ok(None); + }; + Ok(Some(BlobData { + media_type: blob.media_type.clone(), + bytes: blob.read_all().await, + })) + } +} + +#[derive(Debug)] +struct HttpClientAdapter { + http_client_provider: Arc, + download_log_level: log::Level, + progress_bar: Option, +} + +#[async_trait::async_trait(?Send)] +impl deno_cache_dir::file_fetcher::HttpClient for HttpClientAdapter { + async fn send_no_follow( + &self, + url: &Url, + headers: HeaderMap, + ) -> Result { + async fn handle_request_or_server_error( + retried: &mut bool, + specifier: &Url, + err_str: String, + ) -> Result<(), ()> { + // Retry once, and bail otherwise. + if !*retried { + *retried = true; + log::debug!("Import '{}' failed: {}. Retrying...", specifier, err_str); + tokio::time::sleep(std::time::Duration::from_millis(50)).await; + Ok(()) + } else { + Err(()) + } + } + + let mut maybe_progress_guard = None; + if let Some(pb) = self.progress_bar.as_ref() { + maybe_progress_guard = Some(pb.update(url.as_str())); + } else { + log::log!( + self.download_log_level, + "{} {}", + colors::green("Download"), + url + ); + } + + let mut retried = false; // retry intermittent failures + loop { + let response = match self + .http_client_provider + .get_or_create() + .map_err(|err| SendError::Failed(err.into()))? + .send(url, headers.clone()) + .await + { + Ok(response) => response, + Err(crate::http_util::SendError::Send(err)) => { + if err.is_connect_error() { + handle_request_or_server_error(&mut retried, url, err.to_string()) + .await + .map_err(|()| SendError::Failed(err.into()))?; + continue; + } else { + return Err(SendError::Failed(err.into())); + } + } + Err(crate::http_util::SendError::InvalidUri(err)) => { + return Err(SendError::Failed(err.into())); + } + }; + if response.status() == StatusCode::NOT_MODIFIED { + return Ok(SendResponse::NotModified); + } + + if let Some(warning) = response.headers().get("X-Deno-Warning") { + log::warn!( + "{} {}", + crate::colors::yellow("Warning"), + warning.to_str().unwrap() + ); + } + + if response.status().is_redirection() { + return Ok(SendResponse::Redirect(response.into_parts().0.headers)); + } + + if response.status().is_server_error() { + handle_request_or_server_error( + &mut retried, + url, + response.status().to_string(), + ) + .await + .map_err(|()| SendError::StatusCode(response.status()))?; + } else if response.status().is_client_error() { + let err = if response.status() == StatusCode::NOT_FOUND { + SendError::NotFound + } else { + SendError::StatusCode(response.status()) + }; + return Err(err); + } else { + let body_result = get_response_body_with_progress( + response, + maybe_progress_guard.as_ref(), + ) + .await; + + match body_result { + Ok((headers, body)) => { + return Ok(SendResponse::Success(headers, body)); + } + Err(err) => { + handle_request_or_server_error(&mut retried, url, err.to_string()) + .await + .map_err(|()| SendError::Failed(err.into()))?; + continue; + } + } + } + } + } +} + +#[derive(Debug, Default)] +struct MemoryFiles(Mutex>); impl MemoryFiles { - pub fn get(&self, specifier: &ModuleSpecifier) -> Option { - self.0.lock().get(specifier).cloned() - } - pub fn insert(&self, specifier: ModuleSpecifier, file: File) -> Option { self.0.lock().insert(specifier, file) } @@ -134,416 +226,96 @@ impl MemoryFiles { } } -/// Fetch a source file from the local file system. -fn fetch_local(specifier: &ModuleSpecifier) -> Result { - let local = url_to_file_path(specifier).map_err(|_| { - uri_error(format!("Invalid file path.\n Specifier: {specifier}")) - })?; - // If it doesnt have a extension, we want to treat it as typescript by default - let headers = if local.extension().is_none() { - Some(HashMap::from([( - "content-type".to_string(), - "application/typescript".to_string(), - )])) - } else { - None - }; - let bytes = fs::read(local)?; - - Ok(File { - specifier: specifier.clone(), - maybe_headers: headers, - source: bytes.into(), - }) +impl deno_cache_dir::file_fetcher::MemoryFiles for MemoryFiles { + fn get(&self, specifier: &ModuleSpecifier) -> Option { + self.0.lock().get(specifier).cloned() + } } -/// Return a validated scheme for a given module specifier. -fn get_validated_scheme( - specifier: &ModuleSpecifier, -) -> Result { - let scheme = specifier.scheme(); - if !SUPPORTED_SCHEMES.contains(&scheme) { - // NOTE(bartlomieju): this message list additional `npm` and `jsr` schemes, but they should actually be handled - // before `file_fetcher.rs` APIs are even hit. - let mut all_supported_schemes = SUPPORTED_SCHEMES.to_vec(); - all_supported_schemes.extend_from_slice(&["npm", "jsr"]); - all_supported_schemes.sort(); - let scheme_list = all_supported_schemes - .iter() - .map(|scheme| format!(" - \"{}\"", scheme)) - .collect::>() - .join("\n"); - Err(generic_error(format!( - "Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes:\n{}", - scheme_list - ))) - } else { - Ok(scheme.to_string()) - } +#[derive(Debug, Boxed, JsError)] +pub struct CliFetchNoFollowError(pub Box); + +#[derive(Debug, Error, JsError)] +pub enum CliFetchNoFollowErrorKind { + #[error(transparent)] + #[class(inherit)] + FetchNoFollow(#[from] FetchNoFollowError), + #[error(transparent)] + #[class(generic)] + PermissionCheck(#[from] PermissionCheckError), } #[derive(Debug, Copy, Clone)] pub enum FetchPermissionsOptionRef<'a> { AllowAll, - DynamicContainer(&'a PermissionsContainer), - StaticContainer(&'a PermissionsContainer), + Restricted(&'a PermissionsContainer, CheckSpecifierKind), } +#[derive(Debug, Default)] pub struct FetchOptions<'a> { - pub specifier: &'a ModuleSpecifier, - pub permissions: FetchPermissionsOptionRef<'a>, pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, pub maybe_accept: Option<&'a str>, pub maybe_cache_setting: Option<&'a CacheSetting>, } pub struct FetchNoFollowOptions<'a> { - pub fetch_options: FetchOptions<'a>, - /// This setting doesn't make sense to provide for `FetchOptions` - /// since the required checksum may change for a redirect. + pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, + pub maybe_accept: Option<&'a str>, + pub maybe_cache_setting: Option<&'a CacheSetting>, pub maybe_checksum: Option<&'a LoaderChecksum>, } +type DenoCacheDirFileFetcher = deno_cache_dir::file_fetcher::FileFetcher< + BlobStoreAdapter, + CliSys, + HttpClientAdapter, +>; + /// A structure for resolving, fetching and caching source files. #[derive(Debug)] -pub struct FileFetcher { - auth_tokens: AuthTokens, - allow_remote: bool, - memory_files: MemoryFiles, - cache_setting: CacheSetting, - http_cache: Arc, - http_client_provider: Arc, - blob_store: Arc, - download_log_level: log::Level, - progress_bar: Option, +pub struct CliFileFetcher { + file_fetcher: DenoCacheDirFileFetcher, + memory_files: Arc, } -impl FileFetcher { +impl CliFileFetcher { + #[allow(clippy::too_many_arguments)] pub fn new( http_cache: Arc, - cache_setting: CacheSetting, - allow_remote: bool, http_client_provider: Arc, + sys: CliSys, blob_store: Arc, progress_bar: Option, + allow_remote: bool, + cache_setting: CacheSetting, + download_log_level: log::Level, ) -> Self { - Self { - auth_tokens: AuthTokens::new(env::var("DENO_AUTH_TOKENS").ok()), - allow_remote, - memory_files: Default::default(), - cache_setting, + let memory_files = Arc::new(MemoryFiles::default()); + let auth_tokens = AuthTokens::new_from_sys(&sys); + let file_fetcher = DenoCacheDirFileFetcher::new( + BlobStoreAdapter(blob_store), + sys, http_cache, - http_client_provider, - blob_store, - download_log_level: log::Level::Info, - progress_bar, + HttpClientAdapter { + http_client_provider: http_client_provider.clone(), + download_log_level, + progress_bar, + }, + memory_files.clone(), + FileFetcherOptions { + allow_remote, + cache_setting, + auth_tokens, + }, + ); + Self { + file_fetcher, + memory_files, } } pub fn cache_setting(&self) -> &CacheSetting { - &self.cache_setting - } - - /// Sets the log level to use when outputting the download message. - pub fn set_download_log_level(&mut self, level: log::Level) { - self.download_log_level = level; - } - - /// Fetch cached remote file. - /// - /// This is a recursive operation if source file has redirections. - pub fn fetch_cached( - &self, - specifier: &ModuleSpecifier, - redirect_limit: i64, - ) -> Result, AnyError> { - let mut specifier = Cow::Borrowed(specifier); - for _ in 0..=redirect_limit { - match self.fetch_cached_no_follow(&specifier, None)? { - Some(FileOrRedirect::File(file)) => { - return Ok(Some(file)); - } - Some(FileOrRedirect::Redirect(redirect_specifier)) => { - specifier = Cow::Owned(redirect_specifier); - } - None => { - return Ok(None); - } - } - } - Err(custom_error("Http", "Too many redirects.")) - } - - fn fetch_cached_no_follow( - &self, - specifier: &ModuleSpecifier, - maybe_checksum: Option<&LoaderChecksum>, - ) -> Result, AnyError> { - debug!( - "FileFetcher::fetch_cached_no_follow - specifier: {}", - specifier - ); - - let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once - let result = self.http_cache.get( - &cache_key, - maybe_checksum - .as_ref() - .map(|c| deno_cache_dir::Checksum::new(c.as_str())), - ); - match result { - Ok(Some(cache_data)) => Ok(Some(FileOrRedirect::from_deno_cache_entry( - specifier, cache_data, - )?)), - Ok(None) => Ok(None), - Err(err) => match err { - deno_cache_dir::CacheReadFileError::Io(err) => Err(err.into()), - deno_cache_dir::CacheReadFileError::ChecksumIntegrity(err) => { - // convert to the equivalent deno_graph error so that it - // enhances it if this is passed to deno_graph - Err( - deno_graph::source::ChecksumIntegrityError { - actual: err.actual, - expected: err.expected, - } - .into(), - ) - } - }, - } - } - - /// Convert a data URL into a file, resulting in an error if the URL is - /// invalid. - fn fetch_data_url( - &self, - specifier: &ModuleSpecifier, - ) -> Result { - debug!("FileFetcher::fetch_data_url() - specifier: {}", specifier); - let data_url = deno_graph::source::RawDataUrl::parse(specifier)?; - let (bytes, headers) = data_url.into_bytes_and_headers(); - Ok(File { - specifier: specifier.clone(), - maybe_headers: Some(headers), - source: Arc::from(bytes), - }) - } - - /// Get a blob URL. - async fn fetch_blob_url( - &self, - specifier: &ModuleSpecifier, - ) -> Result { - debug!("FileFetcher::fetch_blob_url() - specifier: {}", specifier); - let blob = self - .blob_store - .get_object_url(specifier.clone()) - .ok_or_else(|| { - custom_error( - "NotFound", - format!("Blob URL not found: \"{specifier}\"."), - ) - })?; - - let bytes = blob.read_all().await; - let headers = - HashMap::from([("content-type".to_string(), blob.media_type.clone())]); - - Ok(File { - specifier: specifier.clone(), - maybe_headers: Some(headers), - source: Arc::from(bytes), - }) - } - - async fn fetch_remote_no_follow( - &self, - specifier: &ModuleSpecifier, - maybe_accept: Option<&str>, - cache_setting: &CacheSetting, - maybe_checksum: Option<&LoaderChecksum>, - maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, - ) -> Result { - debug!( - "FileFetcher::fetch_remote_no_follow - specifier: {}", - specifier - ); - - if self.should_use_cache(specifier, cache_setting) { - if let Some(file_or_redirect) = - self.fetch_cached_no_follow(specifier, maybe_checksum)? - { - return Ok(file_or_redirect); - } - } - - if *cache_setting == CacheSetting::Only { - return Err(custom_error( - "NotCached", - format!( - "Specifier not found in cache: \"{specifier}\", --cached-only is specified." - ), - )); - } - - let mut maybe_progress_guard = None; - if let Some(pb) = self.progress_bar.as_ref() { - maybe_progress_guard = Some(pb.update(specifier.as_str())); - } else { - log::log!( - self.download_log_level, - "{} {}", - colors::green("Download"), - specifier - ); - } - - let maybe_etag_cache_entry = self - .http_cache - .cache_item_key(specifier) - .ok() - .and_then(|key| { - self - .http_cache - .get( - &key, - maybe_checksum - .as_ref() - .map(|c| deno_cache_dir::Checksum::new(c.as_str())), - ) - .ok() - .flatten() - }) - .and_then(|cache_entry| { - cache_entry - .metadata - .headers - .get("etag") - .cloned() - .map(|etag| (cache_entry, etag)) - }); - let maybe_auth_token = self.auth_tokens.get(specifier); - - async fn handle_request_or_server_error( - retried: &mut bool, - specifier: &Url, - err_str: String, - ) -> Result<(), AnyError> { - // Retry once, and bail otherwise. - if !*retried { - *retried = true; - log::debug!("Import '{}' failed: {}. Retrying...", specifier, err_str); - tokio::time::sleep(std::time::Duration::from_millis(50)).await; - Ok(()) - } else { - Err(generic_error(format!( - "Import '{}' failed: {}", - specifier, err_str - ))) - } - } - - let mut retried = false; // retry intermittent failures - let result = loop { - let result = match self - .http_client_provider - .get_or_create()? - .fetch_no_follow(FetchOnceArgs { - url: specifier.clone(), - maybe_accept: maybe_accept.map(ToOwned::to_owned), - maybe_etag: maybe_etag_cache_entry - .as_ref() - .map(|(_, etag)| etag.clone()), - maybe_auth_token: maybe_auth_token.clone(), - maybe_auth: maybe_auth.clone(), - maybe_progress_guard: maybe_progress_guard.as_ref(), - }) - .await? - { - FetchOnceResult::NotModified => { - let (cache_entry, _) = maybe_etag_cache_entry.unwrap(); - FileOrRedirect::from_deno_cache_entry(specifier, cache_entry) - } - FetchOnceResult::Redirect(redirect_url, headers) => { - self.http_cache.set(specifier, headers, &[])?; - Ok(FileOrRedirect::Redirect(redirect_url)) - } - FetchOnceResult::Code(bytes, headers) => { - self.http_cache.set(specifier, headers.clone(), &bytes)?; - if let Some(checksum) = &maybe_checksum { - checksum.check_source(&bytes)?; - } - Ok(FileOrRedirect::File(File { - specifier: specifier.clone(), - maybe_headers: Some(headers), - source: Arc::from(bytes), - })) - } - FetchOnceResult::RequestError(err) => { - handle_request_or_server_error(&mut retried, specifier, err).await?; - continue; - } - FetchOnceResult::ServerError(status) => { - handle_request_or_server_error( - &mut retried, - specifier, - status.to_string(), - ) - .await?; - continue; - } - }; - break result; - }; - - drop(maybe_progress_guard); - result - } - - /// Returns if the cache should be used for a given specifier. - fn should_use_cache( - &self, - specifier: &ModuleSpecifier, - cache_setting: &CacheSetting, - ) -> bool { - match cache_setting { - CacheSetting::ReloadAll => false, - CacheSetting::Use | CacheSetting::Only => true, - CacheSetting::RespectHeaders => { - let Ok(cache_key) = self.http_cache.cache_item_key(specifier) else { - return false; - }; - let Ok(Some(headers)) = self.http_cache.read_headers(&cache_key) else { - return false; - }; - let Ok(Some(download_time)) = - self.http_cache.read_download_time(&cache_key) - else { - return false; - }; - let cache_semantics = - CacheSemantics::new(headers, download_time, SystemTime::now()); - cache_semantics.should_use() - } - CacheSetting::ReloadSome(list) => { - let mut url = specifier.clone(); - url.set_fragment(None); - if list.iter().any(|x| x == url.as_str()) { - return false; - } - url.set_query(None); - let mut path = PathBuf::from(url.as_str()); - loop { - if list.contains(&path.to_str().unwrap().to_string()) { - return false; - } - if !path.pop() { - break; - } - } - true - } - } + self.file_fetcher.cache_setting() } #[inline(always)] @@ -578,7 +350,10 @@ impl FileFetcher { .fetch_inner( specifier, None, - FetchPermissionsOptionRef::StaticContainer(permissions), + FetchPermissionsOptionRef::Restricted( + permissions, + CheckSpecifierKind::Static, + ), ) .await } @@ -590,42 +365,50 @@ impl FileFetcher { permissions: FetchPermissionsOptionRef<'_>, ) -> Result { self - .fetch_with_options(FetchOptions { + .fetch_with_options( specifier, permissions, - maybe_auth, - maybe_accept: None, - maybe_cache_setting: None, - }) + FetchOptions { + maybe_auth, + maybe_accept: None, + maybe_cache_setting: None, + }, + ) .await } pub async fn fetch_with_options( &self, + specifier: &ModuleSpecifier, + permissions: FetchPermissionsOptionRef<'_>, options: FetchOptions<'_>, ) -> Result { - self.fetch_with_options_and_max_redirect(options, 10).await + self + .fetch_with_options_and_max_redirect(specifier, permissions, options, 10) + .await } async fn fetch_with_options_and_max_redirect( &self, + specifier: &ModuleSpecifier, + permissions: FetchPermissionsOptionRef<'_>, options: FetchOptions<'_>, max_redirect: usize, ) -> Result { - let mut specifier = Cow::Borrowed(options.specifier); - let mut maybe_auth = options.maybe_auth.clone(); + let mut specifier = Cow::Borrowed(specifier); + let mut maybe_auth = options.maybe_auth; for _ in 0..=max_redirect { match self - .fetch_no_follow_with_options(FetchNoFollowOptions { - fetch_options: FetchOptions { - specifier: &specifier, - permissions: options.permissions, + .fetch_no_follow( + &specifier, + permissions, + FetchNoFollowOptions { maybe_auth: maybe_auth.clone(), maybe_accept: options.maybe_accept, maybe_cache_setting: options.maybe_cache_setting, + maybe_checksum: None, }, - maybe_checksum: None, - }) + ) .await? { FileOrRedirect::File(file) => { @@ -641,92 +424,61 @@ impl FileFetcher { } } - Err(custom_error("Http", "Too many redirects.")) + Err(TooManyRedirectsError(specifier.into_owned()).into()) } /// Fetches without following redirects. - pub async fn fetch_no_follow_with_options( + pub async fn fetch_no_follow( &self, + specifier: &ModuleSpecifier, + permissions: FetchPermissionsOptionRef<'_>, options: FetchNoFollowOptions<'_>, - ) -> Result { - let maybe_checksum = options.maybe_checksum; - let options = options.fetch_options; - let specifier = options.specifier; - // note: this debug output is used by the tests - debug!( - "FileFetcher::fetch_no_follow_with_options - specifier: {}", - specifier - ); - let scheme = get_validated_scheme(specifier)?; - match options.permissions { + ) -> Result { + validate_scheme(specifier).map_err(|err| { + CliFetchNoFollowErrorKind::FetchNoFollow(err.into()).into_box() + })?; + match permissions { FetchPermissionsOptionRef::AllowAll => { // allow } - FetchPermissionsOptionRef::StaticContainer(permissions) => { - permissions.check_specifier( - specifier, - deno_runtime::deno_permissions::CheckSpecifierKind::Static, - )?; - } - FetchPermissionsOptionRef::DynamicContainer(permissions) => { - permissions.check_specifier( - specifier, - deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic, - )?; + FetchPermissionsOptionRef::Restricted(permissions, kind) => { + permissions.check_specifier(specifier, kind)?; } } - if let Some(file) = self.memory_files.get(specifier) { - Ok(FileOrRedirect::File(file)) - } else if scheme == "file" { - // we do not in memory cache files, as this would prevent files on the - // disk changing effecting things like workers and dynamic imports. - fetch_local(specifier).map(FileOrRedirect::File) - } else if scheme == "data" { - self.fetch_data_url(specifier).map(FileOrRedirect::File) - } else if scheme == "blob" { - self - .fetch_blob_url(specifier) - .await - .map(FileOrRedirect::File) - } else if !self.allow_remote { - Err(custom_error( - "NoRemote", - format!("A remote specifier was requested: \"{specifier}\", but --no-remote is specified."), - )) - } else { - self - .fetch_remote_no_follow( - specifier, - options.maybe_accept, - options.maybe_cache_setting.unwrap_or(&self.cache_setting), - maybe_checksum, - options.maybe_auth, - ) - .await - } + self + .file_fetcher + .fetch_no_follow( + specifier, + deno_cache_dir::file_fetcher::FetchNoFollowOptions { + maybe_auth: options.maybe_auth, + maybe_checksum: options + .maybe_checksum + .map(|c| deno_cache_dir::Checksum::new(c.as_str())), + maybe_accept: options.maybe_accept, + maybe_cache_setting: options.maybe_cache_setting, + }, + ) + .await + .map_err(|err| CliFetchNoFollowErrorKind::FetchNoFollow(err).into_box()) } /// A synchronous way to retrieve a source file, where if the file has already /// been cached in memory it will be returned, otherwise for local files will /// be read from disk. - pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option { - let maybe_file = self.memory_files.get(specifier); - if maybe_file.is_none() { - let is_local = specifier.scheme() == "file"; - if is_local { - if let Ok(file) = fetch_local(specifier) { - return Some(file); - } - } - None + pub fn get_cached_source_or_local( + &self, + specifier: &ModuleSpecifier, + ) -> Result, AnyError> { + if specifier.scheme() == "file" { + Ok(self.file_fetcher.fetch_local(specifier)?) } else { - maybe_file + Ok(self.file_fetcher.fetch_cached(specifier, 10)?) } } /// Insert a temporary module for the file fetcher. pub fn insert_memory_files(&self, file: File) -> Option { - self.memory_files.insert(file.specifier.clone(), file) + self.memory_files.insert(file.url.clone(), file) } pub fn clear_memory_files(&self) { @@ -734,14 +486,24 @@ impl FileFetcher { } } +fn validate_scheme(specifier: &Url) -> Result<(), UnsupportedSchemeError> { + match deno_cache_dir::file_fetcher::is_valid_scheme(specifier.scheme()) { + true => Ok(()), + false => Err(UnsupportedSchemeError { + scheme: specifier.scheme().to_string(), + url: specifier.clone(), + }), + } +} + #[cfg(test)] mod tests { use crate::cache::GlobalHttpCache; - use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use super::*; - use deno_core::error::get_custom_error_class; + use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind; + use deno_cache_dir::file_fetcher::HttpClient; use deno_core::resolve_url; use deno_runtime::deno_web::Blob; use deno_runtime::deno_web::InMemoryBlobPart; @@ -750,7 +512,7 @@ mod tests { fn setup( cache_setting: CacheSetting, maybe_temp_dir: Option, - ) -> (FileFetcher, TempDir) { + ) -> (CliFileFetcher, TempDir) { let (file_fetcher, temp_dir, _) = setup_with_blob_store(cache_setting, maybe_temp_dir); (file_fetcher, temp_dir) @@ -759,22 +521,39 @@ mod tests { fn setup_with_blob_store( cache_setting: CacheSetting, maybe_temp_dir: Option, - ) -> (FileFetcher, TempDir, Arc) { - let temp_dir = maybe_temp_dir.unwrap_or_default(); - let location = temp_dir.path().join("remote").to_path_buf(); - let blob_store: Arc = Default::default(); - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)), - cache_setting, - true, - Arc::new(HttpClientProvider::new(None, None)), - blob_store.clone(), - None, - ); + ) -> (CliFileFetcher, TempDir, Arc) { + let (file_fetcher, temp_dir, blob_store, _) = + setup_with_blob_store_and_cache(cache_setting, maybe_temp_dir); (file_fetcher, temp_dir, blob_store) } - async fn test_fetch(specifier: &ModuleSpecifier) -> (File, FileFetcher) { + fn setup_with_blob_store_and_cache( + cache_setting: CacheSetting, + maybe_temp_dir: Option, + ) -> ( + CliFileFetcher, + TempDir, + Arc, + Arc, + ) { + let temp_dir = maybe_temp_dir.unwrap_or_default(); + let location = temp_dir.path().join("remote").to_path_buf(); + let blob_store: Arc = Default::default(); + let cache = Arc::new(GlobalHttpCache::new(CliSys::default(), location)); + let file_fetcher = CliFileFetcher::new( + cache.clone(), + Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), + blob_store.clone(), + None, + true, + cache_setting, + log::Level::Info, + ); + (file_fetcher, temp_dir, blob_store, cache) + } + + async fn test_fetch(specifier: &ModuleSpecifier) -> (File, CliFileFetcher) { let (file_fetcher, _) = setup(CacheSetting::ReloadAll, None); let result = file_fetcher.fetch_bypass_permissions(specifier).await; assert!(result.is_ok()); @@ -785,27 +564,20 @@ mod tests { specifier: &ModuleSpecifier, ) -> (File, HashMap) { let _http_server_guard = test_util::http_server(); - let (file_fetcher, _) = setup(CacheSetting::ReloadAll, None); + let (file_fetcher, _, _, http_cache) = + setup_with_blob_store_and_cache(CacheSetting::ReloadAll, None); let result: Result = file_fetcher .fetch_with_options_and_max_redirect( - FetchOptions { - specifier, - permissions: FetchPermissionsOptionRef::AllowAll, - maybe_auth: None, - maybe_accept: None, - maybe_cache_setting: Some(&file_fetcher.cache_setting), - }, + specifier, + FetchPermissionsOptionRef::AllowAll, + Default::default(), 1, ) .await; - let cache_key = file_fetcher.http_cache.cache_item_key(specifier).unwrap(); + let cache_key = http_cache.cache_item_key(specifier).unwrap(); ( result.unwrap(), - file_fetcher - .http_cache - .read_headers(&cache_key) - .unwrap() - .unwrap(), + http_cache.read_headers(&cache_key).unwrap().unwrap(), ) } @@ -850,28 +622,6 @@ mod tests { ); } - #[test] - fn test_get_validated_scheme() { - let fixtures = vec![ - ("https://deno.land/x/mod.ts", true, "https"), - ("http://deno.land/x/mod.ts", true, "http"), - ("file:///a/b/c.ts", true, "file"), - ("file:///C:/a/b/c.ts", true, "file"), - ("data:,some%20text", true, "data"), - ("ftp://a/b/c.ts", false, ""), - ("mailto:dino@deno.land", false, ""), - ]; - - for (specifier, is_ok, expected) in fixtures { - let specifier = ModuleSpecifier::parse(specifier).unwrap(); - let actual = get_validated_scheme(&specifier); - assert_eq!(actual.is_ok(), is_ok); - if is_ok { - assert_eq!(actual.unwrap(), expected); - } - } - } - #[tokio::test] async fn test_insert_cached() { let (file_fetcher, temp_dir) = setup(CacheSetting::Use, None); @@ -879,7 +629,7 @@ mod tests { let specifier = ModuleSpecifier::from_file_path(&local).unwrap(); let file = File { source: Arc::from("some source code".as_bytes()), - specifier: specifier.clone(), + url: specifier.clone(), maybe_headers: Some(HashMap::from([( "content-type".to_string(), "application/javascript".to_string(), @@ -900,7 +650,7 @@ mod tests { let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!( &*file.source, "export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n" @@ -929,7 +679,7 @@ mod tests { let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!( &*file.source, "export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n" @@ -941,33 +691,36 @@ mod tests { #[tokio::test] async fn test_fetch_complex() { let _http_server_guard = test_util::http_server(); - let (file_fetcher, temp_dir) = setup(CacheSetting::Use, None); + let (file_fetcher, temp_dir, _, http_cache) = + setup_with_blob_store_and_cache(CacheSetting::Use, None); let (file_fetcher_01, _) = setup(CacheSetting::Use, Some(temp_dir.clone())); - let (file_fetcher_02, _) = setup(CacheSetting::Use, Some(temp_dir.clone())); + let (file_fetcher_02, _, _, http_cache_02) = + setup_with_blob_store_and_cache( + CacheSetting::Use, + Some(temp_dir.clone()), + ); let specifier = ModuleSpecifier::parse("http://localhost:4545/subdir/mod2.ts").unwrap(); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!( &*file.source, "export { printHello } from \"./print_hello.ts\";\n" ); assert_eq!(file.media_type, MediaType::TypeScript); - let cache_item_key = - file_fetcher.http_cache.cache_item_key(&specifier).unwrap(); + let cache_item_key = http_cache.cache_item_key(&specifier).unwrap(); let mut headers = HashMap::new(); headers.insert("content-type".to_string(), "text/javascript".to_string()); - file_fetcher - .http_cache + http_cache .set(&specifier, headers.clone(), file.source.as_bytes()) .unwrap(); let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!( &*file.source, "export { printHello } from \"./print_hello.ts\";\n" @@ -976,22 +729,20 @@ mod tests { // the value above. assert_eq!(file.media_type, MediaType::JavaScript); - let headers2 = file_fetcher_02 - .http_cache + let headers2 = http_cache_02 .read_headers(&cache_item_key) .unwrap() .unwrap(); assert_eq!(headers2.get("content-type").unwrap(), "text/javascript"); headers = HashMap::new(); headers.insert("content-type".to_string(), "application/json".to_string()); - file_fetcher_02 - .http_cache + http_cache_02 .set(&specifier, headers.clone(), file.source.as_bytes()) .unwrap(); let result = file_fetcher_02.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!( &*file.source, "export { printHello } from \"./print_hello.ts\";\n" @@ -1001,20 +752,19 @@ mod tests { // This creates a totally new instance, simulating another Deno process // invocation and indicates to "cache bust". let location = temp_dir.path().join("remote").to_path_buf(); - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new( - location, - crate::cache::RealDenoCacheEnv, - )), - CacheSetting::ReloadAll, - true, + let file_fetcher = CliFileFetcher::new( + Arc::new(GlobalHttpCache::new(CliSys::default(), location)), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + true, + CacheSetting::ReloadAll, + log::Level::Info, ); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!( &*file.source, "export { printHello } from \"./print_hello.ts\";\n" @@ -1030,73 +780,49 @@ mod tests { let specifier = resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap(); + let http_cache = + Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone())); let file_modified_01 = { - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new( - location.clone(), - crate::cache::RealDenoCacheEnv, - )), - CacheSetting::Use, - true, + let file_fetcher = CliFileFetcher::new( + http_cache.clone(), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + true, + CacheSetting::Use, + log::Level::Info, ); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let cache_key = - file_fetcher.http_cache.cache_item_key(&specifier).unwrap(); + let cache_key = http_cache.cache_item_key(&specifier).unwrap(); ( - file_fetcher - .http_cache - .read_modified_time(&cache_key) - .unwrap(), - file_fetcher - .http_cache - .read_headers(&cache_key) - .unwrap() - .unwrap(), - file_fetcher - .http_cache - .read_download_time(&cache_key) - .unwrap() - .unwrap(), + http_cache.read_modified_time(&cache_key).unwrap(), + http_cache.read_headers(&cache_key).unwrap().unwrap(), + http_cache.read_download_time(&cache_key).unwrap().unwrap(), ) }; let file_modified_02 = { - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new( - location, - crate::cache::RealDenoCacheEnv, - )), - CacheSetting::Use, - true, + let file_fetcher = CliFileFetcher::new( + Arc::new(GlobalHttpCache::new(CliSys::default(), location)), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + true, + CacheSetting::Use, + log::Level::Info, ); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let cache_key = - file_fetcher.http_cache.cache_item_key(&specifier).unwrap(); + let cache_key = http_cache.cache_item_key(&specifier).unwrap(); ( - file_fetcher - .http_cache - .read_modified_time(&cache_key) - .unwrap(), - file_fetcher - .http_cache - .read_headers(&cache_key) - .unwrap() - .unwrap(), - file_fetcher - .http_cache - .read_download_time(&cache_key) - .unwrap() - .unwrap(), + http_cache.read_modified_time(&cache_key).unwrap(), + http_cache.read_headers(&cache_key).unwrap().unwrap(), + http_cache.read_download_time(&cache_key).unwrap().unwrap(), ) }; @@ -1106,7 +832,8 @@ mod tests { #[tokio::test] async fn test_fetch_redirected() { let _http_server_guard = test_util::http_server(); - let (file_fetcher, _) = setup(CacheSetting::Use, None); + let (file_fetcher, _, _, http_cache) = + setup_with_blob_store_and_cache(CacheSetting::Use, None); let specifier = resolve_url("http://localhost:4546/subdir/redirects/redirect1.js") .unwrap(); @@ -1117,24 +844,27 @@ mod tests { let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); let file = result.unwrap(); - assert_eq!(file.specifier, redirected_specifier); + assert_eq!(file.url, redirected_specifier); assert_eq!( - get_text_from_cache(&file_fetcher, &specifier), + get_text_from_cache(http_cache.as_ref(), &specifier), "", "redirected files should have empty cached contents" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &specifier), + get_location_header_from_cache(http_cache.as_ref(), &specifier), Some("http://localhost:4545/subdir/redirects/redirect1.js".to_string()), ); assert_eq!( - get_text_from_cache(&file_fetcher, &redirected_specifier), + get_text_from_cache(http_cache.as_ref(), &redirected_specifier), "export const redirect = 1;\n" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &redirected_specifier), + get_location_header_from_cache( + http_cache.as_ref(), + &redirected_specifier + ), None, ); } @@ -1142,7 +872,8 @@ mod tests { #[tokio::test] async fn test_fetch_multiple_redirects() { let _http_server_guard = test_util::http_server(); - let (file_fetcher, _) = setup(CacheSetting::Use, None); + let (file_fetcher, _, _, http_cache) = + setup_with_blob_store_and_cache(CacheSetting::Use, None); let specifier = resolve_url("http://localhost:4548/subdir/redirects/redirect1.js") .unwrap(); @@ -1156,34 +887,40 @@ mod tests { let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); let file = result.unwrap(); - assert_eq!(file.specifier, redirected_02_specifier); + assert_eq!(file.url, redirected_02_specifier); assert_eq!( - get_text_from_cache(&file_fetcher, &specifier), + get_text_from_cache(http_cache.as_ref(), &specifier), "", "redirected files should have empty cached contents" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &specifier), + get_location_header_from_cache(http_cache.as_ref(), &specifier), Some("http://localhost:4546/subdir/redirects/redirect1.js".to_string()), ); assert_eq!( - get_text_from_cache(&file_fetcher, &redirected_01_specifier), + get_text_from_cache(http_cache.as_ref(), &redirected_01_specifier), "", "redirected files should have empty cached contents" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &redirected_01_specifier), + get_location_header_from_cache( + http_cache.as_ref(), + &redirected_01_specifier + ), Some("http://localhost:4545/subdir/redirects/redirect1.js".to_string()), ); assert_eq!( - get_text_from_cache(&file_fetcher, &redirected_02_specifier), + get_text_from_cache(http_cache.as_ref(), &redirected_02_specifier), "export const redirect = 1;\n" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &redirected_02_specifier), + get_location_header_from_cache( + http_cache.as_ref(), + &redirected_02_specifier + ), None, ); } @@ -1197,81 +934,53 @@ mod tests { resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap(); let redirected_specifier = resolve_url("http://localhost:4546/subdir/mismatch_ext.ts").unwrap(); + let http_cache = + Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone())); let metadata_file_modified_01 = { - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new( - location.clone(), - crate::cache::RealDenoCacheEnv, - )), - CacheSetting::Use, - true, + let file_fetcher = CliFileFetcher::new( + http_cache.clone(), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + true, + CacheSetting::Use, + log::Level::Info, ); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let cache_key = file_fetcher - .http_cache - .cache_item_key(&redirected_specifier) - .unwrap(); + let cache_key = http_cache.cache_item_key(&redirected_specifier).unwrap(); ( - file_fetcher - .http_cache - .read_modified_time(&cache_key) - .unwrap(), - file_fetcher - .http_cache - .read_headers(&cache_key) - .unwrap() - .unwrap(), - file_fetcher - .http_cache - .read_download_time(&cache_key) - .unwrap() - .unwrap(), + http_cache.read_modified_time(&cache_key).unwrap(), + http_cache.read_headers(&cache_key).unwrap().unwrap(), + http_cache.read_download_time(&cache_key).unwrap().unwrap(), ) }; let metadata_file_modified_02 = { - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new( - location, - crate::cache::RealDenoCacheEnv, - )), - CacheSetting::Use, - true, + let file_fetcher = CliFileFetcher::new( + http_cache.clone(), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + true, + CacheSetting::Use, + log::Level::Info, ); let result = file_fetcher .fetch_bypass_permissions(&redirected_specifier) .await; assert!(result.is_ok()); - let cache_key = file_fetcher - .http_cache - .cache_item_key(&redirected_specifier) - .unwrap(); + let cache_key = http_cache.cache_item_key(&redirected_specifier).unwrap(); ( - file_fetcher - .http_cache - .read_modified_time(&cache_key) - .unwrap(), - file_fetcher - .http_cache - .read_headers(&cache_key) - .unwrap() - .unwrap(), - file_fetcher - .http_cache - .read_download_time(&cache_key) - .unwrap() - .unwrap(), + http_cache.read_modified_time(&cache_key).unwrap(), + http_cache.read_headers(&cache_key).unwrap().unwrap(), + http_cache.read_download_time(&cache_key).unwrap().unwrap(), ) }; @@ -1288,13 +997,9 @@ mod tests { let result = file_fetcher .fetch_with_options_and_max_redirect( - FetchOptions { - specifier: &specifier, - permissions: FetchPermissionsOptionRef::AllowAll, - maybe_auth: None, - maybe_accept: None, - maybe_cache_setting: Some(&file_fetcher.cache_setting), - }, + &specifier, + FetchPermissionsOptionRef::AllowAll, + Default::default(), 2, ) .await; @@ -1302,29 +1007,26 @@ mod tests { let result = file_fetcher .fetch_with_options_and_max_redirect( - FetchOptions { - specifier: &specifier, - permissions: FetchPermissionsOptionRef::AllowAll, - maybe_auth: None, - maybe_accept: None, - maybe_cache_setting: Some(&file_fetcher.cache_setting), - }, + &specifier, + FetchPermissionsOptionRef::AllowAll, + Default::default(), 1, ) .await; assert!(result.is_err()); - let result = file_fetcher.fetch_cached(&specifier, 2); + let result = file_fetcher.file_fetcher.fetch_cached(&specifier, 2); assert!(result.is_ok()); - let result = file_fetcher.fetch_cached(&specifier, 1); + let result = file_fetcher.file_fetcher.fetch_cached(&specifier, 1); assert!(result.is_err()); } #[tokio::test] async fn test_fetch_same_host_redirect() { let _http_server_guard = test_util::http_server(); - let (file_fetcher, _) = setup(CacheSetting::Use, None); + let (file_fetcher, _, _, http_cache) = + setup_with_blob_store_and_cache(CacheSetting::Use, None); let specifier = resolve_url( "http://localhost:4550/REDIRECT/subdir/redirects/redirect1.js", ) @@ -1336,24 +1038,27 @@ mod tests { let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); let file = result.unwrap(); - assert_eq!(file.specifier, redirected_specifier); + assert_eq!(file.url, redirected_specifier); assert_eq!( - get_text_from_cache(&file_fetcher, &specifier), + get_text_from_cache(http_cache.as_ref(), &specifier), "", "redirected files should have empty cached contents" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &specifier), + get_location_header_from_cache(http_cache.as_ref(), &specifier), Some("/subdir/redirects/redirect1.js".to_string()), ); assert_eq!( - get_text_from_cache(&file_fetcher, &redirected_specifier), + get_text_from_cache(http_cache.as_ref(), &redirected_specifier), "export const redirect = 1;\n" ); assert_eq!( - get_location_header_from_cache(&file_fetcher, &redirected_specifier), + get_location_header_from_cache( + http_cache.as_ref(), + &redirected_specifier + ), None ); } @@ -1363,16 +1068,15 @@ mod tests { let _http_server_guard = test_util::http_server(); let temp_dir = TempDir::new(); let location = temp_dir.path().join("remote").to_path_buf(); - let file_fetcher = FileFetcher::new( - Arc::new(GlobalHttpCache::new( - location, - crate::cache::RealDenoCacheEnv, - )), - CacheSetting::Use, - false, + let file_fetcher = CliFileFetcher::new( + Arc::new(GlobalHttpCache::new(CliSys::default(), location)), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + false, + CacheSetting::Use, + log::Level::Info, ); let specifier = resolve_url("http://localhost:4545/run/002_hello.ts").unwrap(); @@ -1380,8 +1084,19 @@ mod tests { let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_err()); let err = result.unwrap_err(); - assert_eq!(get_custom_error_class(&err), Some("NoRemote")); - assert_eq!(err.to_string(), "A remote specifier was requested: \"http://localhost:4545/run/002_hello.ts\", but --no-remote is specified."); + let err = err.downcast::().unwrap().into_kind(); + match err { + CliFetchNoFollowErrorKind::FetchNoFollow(err) => { + let err = err.into_kind(); + match &err { + FetchNoFollowErrorKind::NoRemote { .. } => { + assert_eq!(err.to_string(), "A remote specifier was requested: \"http://localhost:4545/run/002_hello.ts\", but --no-remote is specified."); + } + _ => unreachable!(), + } + } + _ => unreachable!(), + } } #[tokio::test] @@ -1389,21 +1104,25 @@ mod tests { let _http_server_guard = test_util::http_server(); let temp_dir = TempDir::new(); let location = temp_dir.path().join("remote").to_path_buf(); - let file_fetcher_01 = FileFetcher::new( - Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)), + let file_fetcher_01 = CliFileFetcher::new( + Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone())), + Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), + Default::default(), + None, + true, CacheSetting::Only, - true, - Arc::new(HttpClientProvider::new(None, None)), - Default::default(), - None, + log::Level::Info, ); - let file_fetcher_02 = FileFetcher::new( - Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)), - CacheSetting::Use, - true, + let file_fetcher_02 = CliFileFetcher::new( + Arc::new(GlobalHttpCache::new(CliSys::default(), location)), Arc::new(HttpClientProvider::new(None, None)), + CliSys::default(), Default::default(), None, + true, + CacheSetting::Use, + log::Level::Info, ); let specifier = resolve_url("http://localhost:4545/run/002_hello.ts").unwrap(); @@ -1411,8 +1130,19 @@ mod tests { let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await; assert!(result.is_err()); let err = result.unwrap_err(); - assert_eq!(err.to_string(), "Specifier not found in cache: \"http://localhost:4545/run/002_hello.ts\", --cached-only is specified."); - assert_eq!(get_custom_error_class(&err), Some("NotCached")); + let err = err.downcast::().unwrap().into_kind(); + match err { + CliFetchNoFollowErrorKind::FetchNoFollow(err) => { + let err = err.into_kind(); + match &err { + FetchNoFollowErrorKind::NotCached { .. } => { + assert_eq!(err.to_string(), "Specifier not found in cache: \"http://localhost:4545/run/002_hello.ts\", --cached-only is specified."); + } + _ => unreachable!(), + } + } + _ => unreachable!(), + } let result = file_fetcher_02.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); @@ -1426,16 +1156,16 @@ mod tests { let (file_fetcher, temp_dir) = setup(CacheSetting::Use, None); let fixture_path = temp_dir.path().join("mod.ts"); let specifier = ModuleSpecifier::from_file_path(&fixture_path).unwrap(); - fs::write(fixture_path.clone(), r#"console.log("hello deno");"#).unwrap(); + fixture_path.write(r#"console.log("hello deno");"#); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!(&*file.source, r#"console.log("hello deno");"#); - fs::write(fixture_path, r#"console.log("goodbye deno");"#).unwrap(); + fixture_path.write(r#"console.log("goodbye deno");"#); let result = file_fetcher.fetch_bypass_permissions(&specifier).await; assert!(result.is_ok()); - let file = result.unwrap().into_text_decoded().unwrap(); + let file = TextDecodedFile::decode(result.unwrap()).unwrap(); assert_eq!(&*file.source, r#"console.log("goodbye deno");"#); } @@ -1527,29 +1257,169 @@ mod tests { test_fetch_remote_encoded("windows-1255", "windows-1255", expected).await; } + fn create_http_client_adapter() -> HttpClientAdapter { + HttpClientAdapter { + http_client_provider: Arc::new(HttpClientProvider::new(None, None)), + download_log_level: log::Level::Info, + progress_bar: None, + } + } + + #[tokio::test] + async fn test_fetch_string() { + let _http_server_guard = test_util::http_server(); + let url = Url::parse("http://127.0.0.1:4545/assets/fixture.json").unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, HeaderMap::new()).await; + if let Ok(SendResponse::Success(headers, body)) = result { + assert!(!body.is_empty()); + assert_eq!(headers.get("content-type").unwrap(), "application/json"); + assert_eq!(headers.get("etag"), None); + assert_eq!(headers.get("x-typescript-types"), None); + } else { + panic!(); + } + } + + #[tokio::test] + async fn test_fetch_gzip() { + let _http_server_guard = test_util::http_server(); + let url = Url::parse("http://127.0.0.1:4545/run/import_compression/gziped") + .unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, HeaderMap::new()).await; + if let Ok(SendResponse::Success(headers, body)) = result { + assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/javascript" + ); + assert_eq!(headers.get("etag"), None); + assert_eq!(headers.get("x-typescript-types"), None); + } else { + panic!(); + } + } + + #[tokio::test] + async fn test_fetch_with_etag() { + let _http_server_guard = test_util::http_server(); + let url = Url::parse("http://127.0.0.1:4545/etag_script.ts").unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, HeaderMap::new()).await; + if let Ok(SendResponse::Success(headers, body)) = result { + assert!(!body.is_empty()); + assert_eq!(String::from_utf8(body).unwrap(), "console.log('etag')"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/typescript" + ); + assert_eq!(headers.get("etag").unwrap(), "33a64df551425fcc55e"); + } else { + panic!(); + } + + let mut headers = HeaderMap::new(); + headers.insert("if-none-match", "33a64df551425fcc55e".parse().unwrap()); + let res = client.send_no_follow(&url, headers).await; + assert_eq!(res.unwrap(), SendResponse::NotModified); + } + + #[tokio::test] + async fn test_fetch_brotli() { + let _http_server_guard = test_util::http_server(); + let url = Url::parse("http://127.0.0.1:4545/run/import_compression/brotli") + .unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, HeaderMap::new()).await; + if let Ok(SendResponse::Success(headers, body)) = result { + assert!(!body.is_empty()); + assert_eq!(String::from_utf8(body).unwrap(), "console.log('brotli');"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/javascript" + ); + assert_eq!(headers.get("etag"), None); + assert_eq!(headers.get("x-typescript-types"), None); + } else { + panic!(); + } + } + + #[tokio::test] + async fn test_fetch_accept() { + let _http_server_guard = test_util::http_server(); + let url = Url::parse("http://127.0.0.1:4545/echo_accept").unwrap(); + let client = create_http_client_adapter(); + let mut headers = HeaderMap::new(); + headers.insert("accept", "application/json".parse().unwrap()); + let result = client.send_no_follow(&url, headers).await; + if let Ok(SendResponse::Success(_, body)) = result { + assert_eq!(body, r#"{"accept":"application/json"}"#.as_bytes()); + } else { + panic!(); + } + } + + #[tokio::test] + async fn test_fetch_no_follow_with_redirect() { + let _http_server_guard = test_util::http_server(); + let url = Url::parse("http://127.0.0.1:4546/assets/fixture.json").unwrap(); + // Dns resolver substitutes `127.0.0.1` with `localhost` + let target_url = + Url::parse("http://localhost:4545/assets/fixture.json").unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, Default::default()).await; + if let Ok(SendResponse::Redirect(headers)) = result { + assert_eq!(headers.get("location").unwrap(), target_url.as_str()); + } else { + panic!(); + } + } + + #[tokio::test] + async fn server_error() { + let _g = test_util::http_server(); + let url_str = "http://127.0.0.1:4545/server_error"; + let url = Url::parse(url_str).unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, Default::default()).await; + + if let Err(SendError::StatusCode(status)) = result { + assert_eq!(status, 500); + } else { + panic!("{:?}", result); + } + } + + #[tokio::test] + async fn request_error() { + let _g = test_util::http_server(); + let url_str = "http://127.0.0.1:9999/"; + let url = Url::parse(url_str).unwrap(); + let client = create_http_client_adapter(); + let result = client.send_no_follow(&url, Default::default()).await; + + assert!(matches!(result, Err(SendError::Failed(_)))); + } + #[track_caller] fn get_text_from_cache( - file_fetcher: &FileFetcher, + http_cache: &dyn HttpCache, url: &ModuleSpecifier, ) -> String { - let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap(); - let bytes = file_fetcher - .http_cache - .get(&cache_key, None) - .unwrap() - .unwrap() - .content; + let cache_key = http_cache.cache_item_key(url).unwrap(); + let bytes = http_cache.get(&cache_key, None).unwrap().unwrap().content; String::from_utf8(bytes.into_owned()).unwrap() } #[track_caller] fn get_location_header_from_cache( - file_fetcher: &FileFetcher, + http_cache: &dyn HttpCache, url: &ModuleSpecifier, ) -> Option { - let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap(); - file_fetcher - .http_cache + let cache_key = http_cache.cache_item_key(url).unwrap(); + http_cache .read_headers(&cache_key) .unwrap() .unwrap() diff --git a/cli/graph_util.rs b/cli/graph_util.rs index b655dda0f6..68d48d9bbc 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -1,5 +1,42 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::collections::HashSet; +use std::error::Error; +use std::ops::Deref; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_config::deno_json::JsxImportSourceConfig; +use deno_config::workspace::JsrPackageConfig; +use deno_core::anyhow::bail; +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; +use deno_core::ModuleSpecifier; +use deno_graph::source::Loader; +use deno_graph::source::LoaderChecksum; +use deno_graph::source::ResolutionKind; +use deno_graph::source::ResolveError; +use deno_graph::FillFromLockfileOptions; +use deno_graph::GraphKind; +use deno_graph::JsrLoadError; +use deno_graph::ModuleError; +use deno_graph::ModuleGraph; +use deno_graph::ModuleGraphError; +use deno_graph::ModuleLoadError; +use deno_graph::ResolutionError; +use deno_graph::SpecifierError; +use deno_graph::WorkspaceFastCheckOption; +use deno_path_util::url_to_file_path; +use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; +use deno_runtime::deno_node; +use deno_runtime::deno_permissions::PermissionsContainer; +use deno_semver::jsr::JsrDepPackageReq; +use deno_semver::package::PackageNv; +use deno_semver::SmallStackString; +use import_map::ImportMapError; +use node_resolver::InNpmPackageChecker; + use crate::args::config_to_deno_graph_workspace_member; use crate::args::jsr_url; use crate::args::CliLockfile; @@ -13,52 +50,17 @@ use crate::cache::ModuleInfoCache; use crate::cache::ParsedSourceCache; use crate::colors; use crate::errors::get_error_class_name; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::npm::CliNpmResolver; use crate::resolver::CjsTracker; use crate::resolver::CliResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; +use crate::sys::CliSys; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; -use deno_config::deno_json::JsxImportSourceConfig; -use deno_config::workspace::JsrPackageConfig; -use deno_core::anyhow::bail; -use deno_graph::source::LoaderChecksum; -use deno_graph::source::ResolutionKind; -use deno_graph::FillFromLockfileOptions; -use deno_graph::JsrLoadError; -use deno_graph::ModuleLoadError; -use deno_graph::WorkspaceFastCheckOption; - -use deno_core::error::custom_error; -use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; -use deno_core::ModuleSpecifier; -use deno_graph::source::Loader; -use deno_graph::source::ResolveError; -use deno_graph::GraphKind; -use deno_graph::ModuleError; -use deno_graph::ModuleGraph; -use deno_graph::ModuleGraphError; -use deno_graph::ResolutionError; -use deno_graph::SpecifierError; -use deno_path_util::url_to_file_path; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; -use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node; -use deno_runtime::deno_permissions::PermissionsContainer; -use deno_semver::jsr::JsrDepPackageReq; -use deno_semver::package::PackageNv; -use import_map::ImportMapError; -use node_resolver::InNpmPackageChecker; -use std::collections::HashSet; -use std::error::Error; -use std::ops::Deref; -use std::path::PathBuf; -use std::sync::Arc; #[derive(Clone)] pub struct GraphValidOptions { @@ -79,7 +81,7 @@ pub struct GraphValidOptions { /// for the CLI. pub fn graph_valid( graph: &ModuleGraph, - fs: &Arc, + sys: &CliSys, roots: &[ModuleSpecifier], options: GraphValidOptions, ) -> Result<(), AnyError> { @@ -89,7 +91,7 @@ pub fn graph_valid( let mut errors = graph_walk_errors( graph, - fs, + sys, roots, GraphWalkErrorsOptions { check_js: options.check_js, @@ -139,7 +141,7 @@ pub struct GraphWalkErrorsOptions { /// and enhances them with CLI information. pub fn graph_walk_errors<'a>( graph: &'a ModuleGraph, - fs: &'a Arc, + sys: &'a CliSys, roots: &'a [ModuleSpecifier], options: GraphWalkErrorsOptions, ) -> impl Iterator + 'a { @@ -174,7 +176,7 @@ pub fn graph_walk_errors<'a>( } ModuleGraphError::ModuleError(error) => { enhanced_integrity_error_message(error) - .or_else(|| enhanced_sloppy_imports_error_message(fs, error)) + .or_else(|| enhanced_sloppy_imports_error_message(sys, error)) .unwrap_or_else(|| format_deno_graph_error(error)) } }; @@ -431,8 +433,7 @@ pub struct ModuleGraphBuilder { caches: Arc, cjs_tracker: Arc, cli_options: Arc, - file_fetcher: Arc, - fs: Arc, + file_fetcher: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, lockfile: Option>, @@ -442,6 +443,7 @@ pub struct ModuleGraphBuilder { parsed_source_cache: Arc, resolver: Arc, root_permissions_container: PermissionsContainer, + sys: CliSys, } impl ModuleGraphBuilder { @@ -450,8 +452,7 @@ impl ModuleGraphBuilder { caches: Arc, cjs_tracker: Arc, cli_options: Arc, - file_fetcher: Arc, - fs: Arc, + file_fetcher: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, lockfile: Option>, @@ -461,13 +462,13 @@ impl ModuleGraphBuilder { parsed_source_cache: Arc, resolver: Arc, root_permissions_container: PermissionsContainer, + sys: CliSys, ) -> Self { Self { caches, cjs_tracker, cli_options, file_fetcher, - fs, global_http_cache, in_npm_pkg_checker, lockfile, @@ -477,6 +478,7 @@ impl ModuleGraphBuilder { parsed_source_cache, resolver, root_permissions_container, + sys, } } @@ -592,7 +594,7 @@ impl ModuleGraphBuilder { is_dynamic: options.is_dynamic, passthrough_jsr_specifiers: false, executor: Default::default(), - file_system: &DenoGraphFsAdapter(self.fs.as_ref()), + file_system: &self.sys, jsr_url_provider: &CliJsrUrlProvider, npm_resolver: Some(&graph_npm_resolver), module_analyzer: &analyzer, @@ -680,7 +682,7 @@ impl ModuleGraphBuilder { for (from, to) in graph.packages.mappings() { lockfile.insert_package_specifier( JsrDepPackageReq::jsr(from.clone()), - to.version.to_string(), + to.version.to_custom_string::(), ); } } @@ -746,10 +748,10 @@ impl ModuleGraphBuilder { ) -> cache::FetchCacher { cache::FetchCacher::new( self.file_fetcher.clone(), - self.fs.clone(), self.global_http_cache.clone(), self.in_npm_pkg_checker.clone(), self.module_info_cache.clone(), + self.sys.clone(), cache::FetchCacherOptions { file_header_overrides: self.cli_options.resolve_file_header_overrides(), permissions, @@ -778,7 +780,7 @@ impl ModuleGraphBuilder { ) -> Result<(), AnyError> { graph_valid( graph, - &self.fs, + &self.sys, roots, GraphValidOptions { kind: if self.cli_options.type_check_mode().is_true() { @@ -834,13 +836,13 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { } fn enhanced_sloppy_imports_error_message( - fs: &Arc, + sys: &CliSys, error: &ModuleError, ) -> Option { match error { ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error | ModuleError::Missing(specifier, _) => { - let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone())) + let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone())) .resolve(specifier, SloppyImportsResolutionKind::Execution)? .as_suggestion_message(); Some(format!( @@ -1081,71 +1083,6 @@ impl deno_graph::source::Reporter for FileWatcherReporter { } } -pub struct DenoGraphFsAdapter<'a>( - pub &'a dyn deno_runtime::deno_fs::FileSystem, -); - -impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> { - fn read_dir( - &self, - dir_url: &deno_graph::ModuleSpecifier, - ) -> Vec { - use deno_core::anyhow; - use deno_graph::source::DirEntry; - use deno_graph::source::DirEntryKind; - - let dir_path = match dir_url.to_file_path() { - Ok(path) => path, - // ignore, treat as non-analyzable - Err(()) => return vec![], - }; - let entries = match self.0.read_dir_sync(&dir_path) { - Ok(dir) => dir, - Err(err) - if matches!( - err.kind(), - std::io::ErrorKind::PermissionDenied | std::io::ErrorKind::NotFound - ) => - { - return vec![]; - } - Err(err) => { - return vec![DirEntry { - kind: DirEntryKind::Error( - anyhow::Error::from(err) - .context("Failed to read directory.".to_string()), - ), - url: dir_url.clone(), - }]; - } - }; - let mut dir_entries = Vec::with_capacity(entries.len()); - for entry in entries { - let entry_path = dir_path.join(&entry.name); - dir_entries.push(if entry.is_directory { - DirEntry { - kind: DirEntryKind::Dir, - url: ModuleSpecifier::from_directory_path(&entry_path).unwrap(), - } - } else if entry.is_file { - DirEntry { - kind: DirEntryKind::File, - url: ModuleSpecifier::from_file_path(&entry_path).unwrap(), - } - } else if entry.is_symlink { - DirEntry { - kind: DirEntryKind::Symlink, - url: ModuleSpecifier::from_file_path(&entry_path).unwrap(), - } - } else { - continue; - }); - } - - dir_entries - } -} - pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String { format!( "{}:{}:{}", diff --git a/cli/http_util.rs b/cli/http_util.rs index 4b17936d68..b24dd7bc0c 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -1,14 +1,11 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::auth_tokens::AuthToken; use crate::util::progress_bar::UpdateGuard; use crate::version; -use cache_control::Cachability; -use cache_control::CacheControl; -use chrono::DateTime; +use boxed_error::Boxed; +use deno_cache_dir::file_fetcher::RedirectHeaderParseError; use deno_core::error::custom_error; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::StreamExt; use deno_core::parking_lot::Mutex; @@ -18,195 +15,26 @@ use deno_core::url::Url; use deno_runtime::deno_fetch; use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::CreateHttpClientOptions; +use deno_runtime::deno_fetch::ResBody; use deno_runtime::deno_tls::RootCertStoreProvider; -use http::header; use http::header::HeaderName; use http::header::HeaderValue; -use http::header::ACCEPT; -use http::header::AUTHORIZATION; use http::header::CONTENT_LENGTH; -use http::header::IF_NONE_MATCH; -use http::header::LOCATION; +use http::HeaderMap; use http::StatusCode; use http_body_util::BodyExt; use std::collections::HashMap; use std::sync::Arc; use std::thread::ThreadId; -use std::time::Duration; -use std::time::SystemTime; use thiserror::Error; -// TODO(ry) HTTP headers are not unique key, value pairs. There may be more than -// one header line with the same key. This should be changed to something like -// Vec<(String, String)> -pub type HeadersMap = HashMap; - -/// A structure used to determine if a entity in the http cache can be used. -/// -/// This is heavily influenced by -/// which is BSD -/// 2-Clause Licensed and copyright Kornel Lesiński -pub struct CacheSemantics { - cache_control: CacheControl, - cached: SystemTime, - headers: HashMap, - now: SystemTime, -} - -impl CacheSemantics { - pub fn new( - headers: HashMap, - cached: SystemTime, - now: SystemTime, - ) -> Self { - let cache_control = headers - .get("cache-control") - .map(|v| CacheControl::from_value(v).unwrap_or_default()) - .unwrap_or_default(); - Self { - cache_control, - cached, - headers, - now, - } - } - - fn age(&self) -> Duration { - let mut age = self.age_header_value(); - - if let Ok(resident_time) = self.now.duration_since(self.cached) { - age += resident_time; - } - - age - } - - fn age_header_value(&self) -> Duration { - Duration::from_secs( - self - .headers - .get("age") - .and_then(|v| v.parse().ok()) - .unwrap_or(0), - ) - } - - fn is_stale(&self) -> bool { - self.max_age() <= self.age() - } - - fn max_age(&self) -> Duration { - if self.cache_control.cachability == Some(Cachability::NoCache) { - return Duration::from_secs(0); - } - - if self.headers.get("vary").map(|s| s.trim()) == Some("*") { - return Duration::from_secs(0); - } - - if let Some(max_age) = self.cache_control.max_age { - return max_age; - } - - let default_min_ttl = Duration::from_secs(0); - - let server_date = self.raw_server_date(); - if let Some(expires) = self.headers.get("expires") { - return match DateTime::parse_from_rfc2822(expires) { - Err(_) => Duration::from_secs(0), - Ok(expires) => { - let expires = SystemTime::UNIX_EPOCH - + Duration::from_secs(expires.timestamp().max(0) as _); - return default_min_ttl - .max(expires.duration_since(server_date).unwrap_or_default()); - } - }; - } - - if let Some(last_modified) = self.headers.get("last-modified") { - if let Ok(last_modified) = DateTime::parse_from_rfc2822(last_modified) { - let last_modified = SystemTime::UNIX_EPOCH - + Duration::from_secs(last_modified.timestamp().max(0) as _); - if let Ok(diff) = server_date.duration_since(last_modified) { - let secs_left = diff.as_secs() as f64 * 0.1; - return default_min_ttl.max(Duration::from_secs(secs_left as _)); - } - } - } - - default_min_ttl - } - - fn raw_server_date(&self) -> SystemTime { - self - .headers - .get("date") - .and_then(|d| DateTime::parse_from_rfc2822(d).ok()) - .and_then(|d| { - SystemTime::UNIX_EPOCH - .checked_add(Duration::from_secs(d.timestamp() as _)) - }) - .unwrap_or(self.cached) - } - - /// Returns true if the cached value is "fresh" respecting cached headers, - /// otherwise returns false. - pub fn should_use(&self) -> bool { - if self.cache_control.cachability == Some(Cachability::NoCache) { - return false; - } - - if let Some(max_age) = self.cache_control.max_age { - if self.age() > max_age { - return false; - } - } - - if let Some(min_fresh) = self.cache_control.min_fresh { - if self.time_to_live() < min_fresh { - return false; - } - } - - if self.is_stale() { - let has_max_stale = self.cache_control.max_stale.is_some(); - let allows_stale = has_max_stale - && self - .cache_control - .max_stale - .map(|val| val > self.age() - self.max_age()) - .unwrap_or(true); - if !allows_stale { - return false; - } - } - - true - } - - fn time_to_live(&self) -> Duration { - self.max_age().checked_sub(self.age()).unwrap_or_default() - } -} - -#[derive(Debug, Eq, PartialEq)] -pub enum FetchOnceResult { - Code(Vec, HeadersMap), - NotModified, - Redirect(Url, HeadersMap), - RequestError(String), - ServerError(StatusCode), -} - -#[derive(Debug)] -pub struct FetchOnceArgs<'a> { - pub url: Url, - pub maybe_accept: Option, - pub maybe_etag: Option, - pub maybe_auth_token: Option, - pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, - pub maybe_progress_guard: Option<&'a UpdateGuard>, +#[derive(Debug, Error)] +pub enum SendError { + #[error(transparent)] + Send(#[from] deno_fetch::ClientSendError), + #[error(transparent)] + InvalidUri(#[from] http::uri::InvalidUri), } pub struct HttpClientProvider { @@ -273,8 +101,11 @@ pub struct BadResponseError { pub response_text: Option, } +#[derive(Debug, Boxed)] +pub struct DownloadError(pub Box); + #[derive(Debug, Error)] -pub enum DownloadError { +pub enum DownloadErrorKind { #[error(transparent)] Fetch(AnyError), #[error(transparent)] @@ -285,8 +116,8 @@ pub enum DownloadError { Json(#[from] serde_json::Error), #[error(transparent)] ToStr(#[from] http::header::ToStrError), - #[error("Redirection from '{}' did not provide location header", .request_url)] - NoRedirectHeader { request_url: Url }, + #[error(transparent)] + RedirectHeaderParse(RedirectHeaderParseError), #[error("Too many redirects.")] TooManyRedirects, #[error(transparent)] @@ -314,9 +145,7 @@ impl HttpClient { } pub fn get(&self, url: Url) -> Result { - let body = http_body_util::Empty::new() - .map_err(|never| match never {}) - .boxed(); + let body = deno_fetch::ReqBody::empty(); let mut req = http::Request::new(body); *req.uri_mut() = url.as_str().parse()?; Ok(RequestBuilder { @@ -348,9 +177,7 @@ impl HttpClient { S: serde::Serialize, { let json = deno_core::serde_json::to_vec(ser)?; - let body = http_body_util::Full::new(json.into()) - .map_err(|never| match never {}) - .boxed(); + let body = deno_fetch::ReqBody::full(json.into()); let builder = self.post(url, body)?; Ok(builder.header( http::header::CONTENT_TYPE, @@ -358,107 +185,22 @@ impl HttpClient { )) } - /// Asynchronously fetches the given HTTP URL one pass only. - /// If no redirect is present and no error occurs, - /// yields Code(ResultPayload). - /// If redirect occurs, does not follow and - /// yields Redirect(url). - pub async fn fetch_no_follow<'a>( + pub async fn send( &self, - args: FetchOnceArgs<'a>, - ) -> Result { - let body = http_body_util::Empty::new() - .map_err(|never| match never {}) - .boxed(); + url: &Url, + headers: HeaderMap, + ) -> Result, SendError> { + let body = deno_fetch::ReqBody::empty(); let mut request = http::Request::new(body); - *request.uri_mut() = args.url.as_str().parse()?; + *request.uri_mut() = http::Uri::try_from(url.as_str())?; + *request.headers_mut() = headers; - if let Some(etag) = args.maybe_etag { - let if_none_match_val = HeaderValue::from_str(&etag)?; - request - .headers_mut() - .insert(IF_NONE_MATCH, if_none_match_val); - } - if let Some(auth_token) = args.maybe_auth_token { - let authorization_val = HeaderValue::from_str(&auth_token.to_string())?; - request - .headers_mut() - .insert(AUTHORIZATION, authorization_val); - } else if let Some((header, value)) = args.maybe_auth { - request.headers_mut().insert(header, value); - } - if let Some(accept) = args.maybe_accept { - let accepts_val = HeaderValue::from_str(&accept)?; - request.headers_mut().insert(ACCEPT, accepts_val); - } - let response = match self.client.clone().send(request).await { - Ok(resp) => resp, - Err(err) => { - if err.is_connect_error() { - return Ok(FetchOnceResult::RequestError(err.to_string())); - } - return Err(err.into()); - } - }; - - if response.status() == StatusCode::NOT_MODIFIED { - return Ok(FetchOnceResult::NotModified); - } - - let mut result_headers = HashMap::new(); - let response_headers = response.headers(); - - if let Some(warning) = response_headers.get("X-Deno-Warning") { - log::warn!( - "{} {}", - crate::colors::yellow("Warning"), - warning.to_str().unwrap() - ); - } - - for key in response_headers.keys() { - let key_str = key.to_string(); - let values = response_headers.get_all(key); - let values_str = values - .iter() - .map(|e| e.to_str().unwrap().to_string()) - .collect::>() - .join(","); - result_headers.insert(key_str, values_str); - } - - if response.status().is_redirection() { - let new_url = resolve_redirect_from_response(&args.url, &response)?; - return Ok(FetchOnceResult::Redirect(new_url, result_headers)); - } - - let status = response.status(); - - if status.is_server_error() { - return Ok(FetchOnceResult::ServerError(status)); - } - - if status.is_client_error() { - let err = if response.status() == StatusCode::NOT_FOUND { - custom_error( - "NotFound", - format!("Import '{}' failed, not found.", args.url), - ) - } else { - generic_error(format!( - "Import '{}' failed: {}", - args.url, - response.status() - )) - }; - return Err(err); - } - - let body = - get_response_body_with_progress(response, args.maybe_progress_guard) - .await?; - - Ok(FetchOnceResult::Code(body, result_headers)) + self + .client + .clone() + .send(request) + .await + .map_err(SendError::Send) } pub async fn download_text(&self, url: Url) -> Result { @@ -488,7 +230,12 @@ impl HttpClient { Some(progress_guard), ) }, - |e| matches!(e, DownloadError::BadResponse(_) | DownloadError::Fetch(_)), + |e| { + matches!( + e.as_kind(), + DownloadErrorKind::BadResponse(_) | DownloadErrorKind::Fetch(_) + ) + }, ) .await } @@ -515,18 +262,21 @@ impl HttpClient { } else if !response.status().is_success() { let status = response.status(); let maybe_response_text = body_to_string(response).await.ok(); - return Err(DownloadError::BadResponse(BadResponseError { - status_code: status, - response_text: maybe_response_text - .map(|s| s.trim().to_string()) - .filter(|s| !s.is_empty()), - })); + return Err( + DownloadErrorKind::BadResponse(BadResponseError { + status_code: status, + response_text: maybe_response_text + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()), + }) + .into_box(), + ); } get_response_body_with_progress(response, progress_guard) .await - .map(Some) - .map_err(DownloadError::Fetch) + .map(|(_, body)| Some(body)) + .map_err(|err| DownloadErrorKind::Fetch(err).into_box()) } async fn get_redirected_response( @@ -543,7 +293,7 @@ impl HttpClient { .clone() .send(req) .await - .map_err(|e| DownloadError::Fetch(e.into()))?; + .map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?; let status = response.status(); if status.is_redirection() { for _ in 0..5 { @@ -563,7 +313,7 @@ impl HttpClient { .clone() .send(req) .await - .map_err(|e| DownloadError::Fetch(e.into()))?; + .map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?; let status = new_response.status(); if status.is_redirection() { response = new_response; @@ -572,17 +322,17 @@ impl HttpClient { return Ok((new_response, new_url)); } } - Err(DownloadError::TooManyRedirects) + Err(DownloadErrorKind::TooManyRedirects.into_box()) } else { Ok((response, url)) } } } -async fn get_response_body_with_progress( +pub async fn get_response_body_with_progress( response: http::Response, progress_guard: Option<&UpdateGuard>, -) -> Result, AnyError> { +) -> Result<(HeaderMap, Vec), AnyError> { use http_body::Body as _; if let Some(progress_guard) = progress_guard { let mut total_size = response.body().size_hint().exact(); @@ -597,45 +347,21 @@ async fn get_response_body_with_progress( progress_guard.set_total_size(total_size); let mut current_size = 0; let mut data = Vec::with_capacity(total_size as usize); - let mut stream = response.into_body().into_data_stream(); + let (parts, body) = response.into_parts(); + let mut stream = body.into_data_stream(); while let Some(item) = stream.next().await { let bytes = item?; current_size += bytes.len() as u64; progress_guard.set_position(current_size); data.extend(bytes.into_iter()); } - return Ok(data); + return Ok((parts.headers, data)); } } - let bytes = response.collect().await?.to_bytes(); - Ok(bytes.into()) -} -/// Construct the next uri based on base uri and location header fragment -/// See -fn resolve_url_from_location(base_url: &Url, location: &str) -> Url { - if location.starts_with("http://") || location.starts_with("https://") { - // absolute uri - Url::parse(location).expect("provided redirect url should be a valid url") - } else if location.starts_with("//") { - // "//" authority path-abempty - Url::parse(&format!("{}:{}", base_url.scheme(), location)) - .expect("provided redirect url should be a valid url") - } else if location.starts_with('/') { - // path-absolute - base_url - .join(location) - .expect("provided redirect url should be a valid url") - } else { - // assuming path-noscheme | path-empty - let base_url_path_str = base_url.path().to_owned(); - // Pop last part or url (after last slash) - let segs: Vec<&str> = base_url_path_str.rsplitn(2, '/').collect(); - let new_path = format!("{}/{}", segs.last().unwrap_or(&""), location); - base_url - .join(&new_path) - .expect("provided redirect url should be a valid url") - } + let (parts, body) = response.into_parts(); + let bytes = body.collect().await?.to_bytes(); + Ok((parts.headers, bytes.into())) } fn resolve_redirect_from_response( @@ -643,16 +369,11 @@ fn resolve_redirect_from_response( response: &http::Response, ) -> Result { debug_assert!(response.status().is_redirection()); - if let Some(location) = response.headers().get(LOCATION) { - let location_string = location.to_str()?; - log::debug!("Redirecting to {:?}...", &location_string); - let new_url = resolve_url_from_location(request_url, location_string); - Ok(new_url) - } else { - Err(DownloadError::NoRedirectHeader { - request_url: request_url.clone(), - }) - } + deno_cache_dir::file_fetcher::resolve_redirect_from_headers( + request_url, + response.headers(), + ) + .map_err(|err| DownloadErrorKind::RedirectHeaderParse(*err).into_box()) } pub async fn body_to_string(body: B) -> Result @@ -707,8 +428,6 @@ mod test { use deno_runtime::deno_tls::rustls::RootCertStore; - use crate::version; - use super::*; #[tokio::test] @@ -738,231 +457,9 @@ mod test { assert_eq!(err.to_string(), "Too many redirects."); } - #[test] - fn test_resolve_url_from_location_full_1() { - let url = "http://deno.land".parse::().unwrap(); - let new_uri = resolve_url_from_location(&url, "http://golang.org"); - assert_eq!(new_uri.host_str().unwrap(), "golang.org"); - } - - #[test] - fn test_resolve_url_from_location_full_2() { - let url = "https://deno.land".parse::().unwrap(); - let new_uri = resolve_url_from_location(&url, "https://golang.org"); - assert_eq!(new_uri.host_str().unwrap(), "golang.org"); - } - - #[test] - fn test_resolve_url_from_location_relative_1() { - let url = "http://deno.land/x".parse::().unwrap(); - let new_uri = resolve_url_from_location(&url, "//rust-lang.org/en-US"); - assert_eq!(new_uri.host_str().unwrap(), "rust-lang.org"); - assert_eq!(new_uri.path(), "/en-US"); - } - - #[test] - fn test_resolve_url_from_location_relative_2() { - let url = "http://deno.land/x".parse::().unwrap(); - let new_uri = resolve_url_from_location(&url, "/y"); - assert_eq!(new_uri.host_str().unwrap(), "deno.land"); - assert_eq!(new_uri.path(), "/y"); - } - - #[test] - fn test_resolve_url_from_location_relative_3() { - let url = "http://deno.land/x".parse::().unwrap(); - let new_uri = resolve_url_from_location(&url, "z"); - assert_eq!(new_uri.host_str().unwrap(), "deno.land"); - assert_eq!(new_uri.path(), "/z"); - } - - fn create_test_client() -> HttpClient { - HttpClient::new( - create_http_client("test_client", CreateHttpClientOptions::default()) - .unwrap(), - ) - } - - #[tokio::test] - async fn test_fetch_string() { - let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server - let url = Url::parse("http://127.0.0.1:4545/assets/fixture.json").unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert!(!body.is_empty()); - assert_eq!(headers.get("content-type").unwrap(), "application/json"); - assert_eq!(headers.get("etag"), None); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } - } - - #[tokio::test] - async fn test_fetch_gzip() { - let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server - let url = Url::parse("http://127.0.0.1:4545/run/import_compression/gziped") - .unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/javascript" - ); - assert_eq!(headers.get("etag"), None); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } - } - - #[tokio::test] - async fn test_fetch_with_etag() { - let _http_server_guard = test_util::http_server(); - let url = Url::parse("http://127.0.0.1:4545/etag_script.ts").unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url: url.clone(), - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert!(!body.is_empty()); - assert_eq!(String::from_utf8(body).unwrap(), "console.log('etag')"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/typescript" - ); - assert_eq!(headers.get("etag").unwrap(), "33a64df551425fcc55e"); - } else { - panic!(); - } - - let res = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: Some("33a64df551425fcc55e".to_string()), - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - assert_eq!(res.unwrap(), FetchOnceResult::NotModified); - } - - #[tokio::test] - async fn test_fetch_brotli() { - let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server - let url = Url::parse("http://127.0.0.1:4545/run/import_compression/brotli") - .unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert!(!body.is_empty()); - assert_eq!(String::from_utf8(body).unwrap(), "console.log('brotli');"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/javascript" - ); - assert_eq!(headers.get("etag"), None); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } - } - - #[tokio::test] - async fn test_fetch_accept() { - let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server - let url = Url::parse("http://127.0.0.1:4545/echo_accept").unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: Some("application/json".to_string()), - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, _)) = result { - assert_eq!(body, r#"{"accept":"application/json"}"#.as_bytes()); - } else { - panic!(); - } - } - - #[tokio::test] - async fn test_fetch_no_follow_with_redirect() { - let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server - let url = Url::parse("http://127.0.0.1:4546/assets/fixture.json").unwrap(); - // Dns resolver substitutes `127.0.0.1` with `localhost` - let target_url = - Url::parse("http://localhost:4545/assets/fixture.json").unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Redirect(url, _)) = result { - assert_eq!(url, target_url); - } else { - panic!(); - } - } - #[tokio::test] async fn test_fetch_with_cafile_string() { let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server let url = Url::parse("https://localhost:5545/assets/fixture.json").unwrap(); let client = HttpClient::new( @@ -978,24 +475,15 @@ mod test { ) .unwrap(), ); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert!(!body.is_empty()); - assert_eq!(headers.get("content-type").unwrap(), "application/json"); - assert_eq!(headers.get("etag"), None); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } + let response = client.send(&url, Default::default()).await.unwrap(); + assert!(response.status().is_success()); + let (parts, body) = response.into_parts(); + let headers = parts.headers; + let body = body.collect().await.unwrap().to_bytes(); + assert!(!body.is_empty()); + assert_eq!(headers.get("content-type").unwrap(), "application/json"); + assert_eq!(headers.get("etag"), None); + assert_eq!(headers.get("x-typescript-types"), None); } static PUBLIC_HTTPS_URLS: &[&str] = &[ @@ -1026,34 +514,15 @@ mod test { .unwrap(), ); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - + let result = client.send(&url, Default::default()).await; match result { - Err(_) => { - eprintln!("Fetch error: {result:?}"); - continue; + Ok(response) if response.status().is_success() => { + return; // success } - Ok( - FetchOnceResult::Code(..) - | FetchOnceResult::NotModified - | FetchOnceResult::Redirect(..), - ) => return, - Ok( - FetchOnceResult::RequestError(_) | FetchOnceResult::ServerError(_), - ) => { - eprintln!("HTTP error: {result:?}"); - continue; + _ => { + // keep going } - }; + } } // Use 1.1.1.1 and 8.8.8.8 as our last-ditch internet check @@ -1089,42 +558,13 @@ mod test { .unwrap(), ); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - - match result { - Err(_) => { - eprintln!("Fetch error (expected): {result:?}"); - return; - } - Ok( - FetchOnceResult::Code(..) - | FetchOnceResult::NotModified - | FetchOnceResult::Redirect(..), - ) => { - panic!("Should not have successfully fetched a URL"); - } - Ok( - FetchOnceResult::RequestError(_) | FetchOnceResult::ServerError(_), - ) => { - eprintln!("HTTP error (expected): {result:?}"); - return; - } - }; + let result = client.send(&url, HeaderMap::new()).await; + assert!(result.is_err() || !result.unwrap().status().is_success()); } #[tokio::test] async fn test_fetch_with_cafile_gzip() { let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server let url = Url::parse("https://localhost:5545/run/import_compression/gziped") .unwrap(); @@ -1143,27 +583,18 @@ mod test { ) .unwrap(), ); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/javascript" - ); - assert_eq!(headers.get("etag"), None); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } + let response = client.send(&url, Default::default()).await.unwrap(); + assert!(response.status().is_success()); + let (parts, body) = response.into_parts(); + let headers = parts.headers; + let body = body.collect().await.unwrap().to_bytes().to_vec(); + assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/javascript" + ); + assert_eq!(headers.get("etag"), None); + assert_eq!(headers.get("x-typescript-types"), None); } #[tokio::test] @@ -1185,46 +616,29 @@ mod test { ) .unwrap(), ); - let result = client - .fetch_no_follow(FetchOnceArgs { - url: url.clone(), - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert!(!body.is_empty()); - assert_eq!(String::from_utf8(body).unwrap(), "console.log('etag')"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/typescript" - ); - assert_eq!(headers.get("etag").unwrap(), "33a64df551425fcc55e"); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } + let response = client.send(&url, Default::default()).await.unwrap(); + assert!(response.status().is_success()); + let (parts, body) = response.into_parts(); + let headers = parts.headers; + let body = body.collect().await.unwrap().to_bytes().to_vec(); + assert!(!body.is_empty()); + assert_eq!(String::from_utf8(body).unwrap(), "console.log('etag')"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/typescript" + ); + assert_eq!(headers.get("etag").unwrap(), "33a64df551425fcc55e"); + assert_eq!(headers.get("x-typescript-types"), None); - let res = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: Some("33a64df551425fcc55e".to_string()), - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - assert_eq!(res.unwrap(), FetchOnceResult::NotModified); + let mut headers = HeaderMap::new(); + headers.insert("If-None-Match", "33a64df551425fcc55e".parse().unwrap()); + let res = client.send(&url, headers).await.unwrap(); + assert_eq!(res.status(), StatusCode::NOT_MODIFIED); } #[tokio::test] async fn test_fetch_with_cafile_brotli() { let _http_server_guard = test_util::http_server(); - // Relies on external http server. See target/debug/test_server let url = Url::parse("https://localhost:5545/run/import_compression/brotli") .unwrap(); @@ -1243,93 +657,18 @@ mod test { ) .unwrap(), ); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - if let Ok(FetchOnceResult::Code(body, headers)) = result { - assert!(!body.is_empty()); - assert_eq!(String::from_utf8(body).unwrap(), "console.log('brotli');"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/javascript" - ); - assert_eq!(headers.get("etag"), None); - assert_eq!(headers.get("x-typescript-types"), None); - } else { - panic!(); - } - } - - #[tokio::test] - async fn bad_redirect() { - let _g = test_util::http_server(); - let url_str = "http://127.0.0.1:4545/bad_redirect"; - let url = Url::parse(url_str).unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - assert!(result.is_err()); - let err = result.unwrap_err(); - // Check that the error message contains the original URL - assert!(err.to_string().contains(url_str)); - } - - #[tokio::test] - async fn server_error() { - let _g = test_util::http_server(); - let url_str = "http://127.0.0.1:4545/server_error"; - let url = Url::parse(url_str).unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - - if let Ok(FetchOnceResult::ServerError(status)) = result { - assert_eq!(status, 500); - } else { - panic!(); - } - } - - #[tokio::test] - async fn request_error() { - let _g = test_util::http_server(); - let url_str = "http://127.0.0.1:9999/"; - let url = Url::parse(url_str).unwrap(); - let client = create_test_client(); - let result = client - .fetch_no_follow(FetchOnceArgs { - url, - maybe_accept: None, - maybe_etag: None, - maybe_auth_token: None, - maybe_progress_guard: None, - maybe_auth: None, - }) - .await; - - assert!(matches!(result, Ok(FetchOnceResult::RequestError(_)))); + let response = client.send(&url, Default::default()).await.unwrap(); + assert!(response.status().is_success()); + let (parts, body) = response.into_parts(); + let headers = parts.headers; + let body = body.collect().await.unwrap().to_bytes().to_vec(); + assert!(!body.is_empty()); + assert_eq!(String::from_utf8(body).unwrap(), "console.log('brotli');"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/javascript" + ); + assert_eq!(headers.get("etag"), None); + assert_eq!(headers.get("x-typescript-types"), None); } } diff --git a/cli/js/40_lint.js b/cli/js/40_lint.js new file mode 100644 index 0000000000..d29dc3e850 --- /dev/null +++ b/cli/js/40_lint.js @@ -0,0 +1,1122 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +// @ts-check + +import { + compileSelector, + parseSelector, + splitSelectors, +} from "ext:cli/40_lint_selector.js"; +import { core, internals } from "ext:core/mod.js"; +const { + op_lint_create_serialized_ast, +} = core.ops; + +// Keep in sync with Rust +// These types are expected to be present on every node. Note that this +// isn't set in stone. We could revise this at a future point. +const AST_PROP_TYPE = 1; +const AST_PROP_PARENT = 2; +const AST_PROP_RANGE = 3; +const AST_PROP_LENGTH = 4; + +// Keep in sync with Rust +// Each node property is tagged with this enum to denote +// what kind of value it holds. +/** @enum {number} */ +const PropFlags = { + /** This is an offset to another node */ + Ref: 0, + /** This is an array of offsets to other nodes (like children of a BlockStatement) */ + RefArr: 1, + /** + * This is a string id. The actual string needs to be looked up in + * the string table that was included in the message. + */ + String: 2, + /** This value is either 0 = false, or 1 = true */ + Bool: 3, + /** No value, it's null */ + Null: 4, + /** No value, it's undefined */ + Undefined: 5, +}; + +/** @typedef {import("./40_lint_types.d.ts").AstContext} AstContext */ +/** @typedef {import("./40_lint_types.d.ts").VisitorFn} VisitorFn */ +/** @typedef {import("./40_lint_types.d.ts").CompiledVisitor} CompiledVisitor */ +/** @typedef {import("./40_lint_types.d.ts").LintState} LintState */ +/** @typedef {import("./40_lint_types.d.ts").RuleContext} RuleContext */ +/** @typedef {import("./40_lint_types.d.ts").NodeFacade} NodeFacade */ +/** @typedef {import("./40_lint_types.d.ts").LintPlugin} LintPlugin */ +/** @typedef {import("./40_lint_types.d.ts").TransformFn} TransformFn */ +/** @typedef {import("./40_lint_types.d.ts").MatchContext} MatchContext */ + +/** @type {LintState} */ +const state = { + plugins: [], + installedPlugins: new Set(), +}; + +/** + * Every rule gets their own instance of this class. This is the main + * API lint rules interact with. + * @implements {RuleContext} + */ +export class Context { + id; + + fileName; + + /** + * @param {string} id + * @param {string} fileName + */ + constructor(id, fileName) { + this.id = id; + this.fileName = fileName; + } +} + +/** + * @param {LintPlugin} plugin + */ +export function installPlugin(plugin) { + if (typeof plugin !== "object") { + throw new Error("Linter plugin must be an object"); + } + if (typeof plugin.name !== "string") { + throw new Error("Linter plugin name must be a string"); + } + if (typeof plugin.rules !== "object") { + throw new Error("Linter plugin rules must be an object"); + } + if (state.installedPlugins.has(plugin.name)) { + throw new Error(`Linter plugin ${plugin.name} has already been registered`); + } + state.plugins.push(plugin); + state.installedPlugins.add(plugin.name); +} + +/** + * @param {AstContext} ctx + * @param {number} offset + * @returns + */ +function getNode(ctx, offset) { + if (offset === 0) return null; + const cached = ctx.nodes.get(offset); + if (cached !== undefined) return cached; + + const node = new Node(ctx, offset); + ctx.nodes.set(offset, /** @type {*} */ (cached)); + return node; +} + +/** + * Find the offset of a specific property of a specific node. This will + * be used later a lot more for selectors. + * @param {Uint8Array} buf + * @param {number} search + * @param {number} offset + * @returns {number} + */ +function findPropOffset(buf, offset, search) { + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const propCount = buf[offset]; + offset += 1; + + for (let i = 0; i < propCount; i++) { + const maybe = offset; + const prop = buf[offset++]; + const kind = buf[offset++]; + if (prop === search) return maybe; + + if (kind === PropFlags.Ref) { + offset += 4; + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4 + (len * 4); + } else if (kind === PropFlags.String) { + offset += 4; + } else if (kind === PropFlags.Bool) { + offset++; + } else if (kind === PropFlags.Null || kind === PropFlags.Undefined) { + // No value + } else { + offset++; + } + } + + return -1; +} + +const INTERNAL_CTX = Symbol("ctx"); +const INTERNAL_OFFSET = Symbol("offset"); + +// This class is a facade for all materialized nodes. Instead of creating a +// unique class per AST node, we have one class with getters for every +// possible node property. This allows us to lazily materialize child node +// only when they are needed. +class Node { + [INTERNAL_CTX]; + [INTERNAL_OFFSET]; + + /** + * @param {AstContext} ctx + * @param {number} offset + */ + constructor(ctx, offset) { + this[INTERNAL_CTX] = ctx; + this[INTERNAL_OFFSET] = offset; + } + + /** + * Logging a class with only getters prints just the class name. This + * makes debugging difficult because you don't see any of the properties. + * For that reason we'll intercept inspection and serialize the node to + * a plain JSON structure which can be logged and allows users to see all + * properties and their values. + * + * This is only expected to be used during development of a rule. + * @param {*} _ + * @param {Deno.InspectOptions} options + * @returns {string} + */ + [Symbol.for("Deno.customInspect")](_, options) { + const json = toJsValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET]); + return Deno.inspect(json, options); + } + + [Symbol.for("Deno.lint.toJsValue")]() { + return toJsValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET]); + } +} + +/** @type {Set} */ +const appliedGetters = new Set(); + +/** + * Add getters for all potential properties found in the message. + * @param {AstContext} ctx + */ +function setNodeGetters(ctx) { + if (appliedGetters.size === ctx.strByProp.length) return; + + for (let i = 0; i < ctx.strByProp.length; i++) { + const id = ctx.strByProp[i]; + if (id === 0 || appliedGetters.has(i)) continue; + appliedGetters.add(i); + + const name = getString(ctx.strTable, id); + + Object.defineProperty(Node.prototype, name, { + get() { + return readValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET], i); + }, + }); + } +} + +/** + * Serialize a node recursively to plain JSON + * @param {AstContext} ctx + * @param {number} offset + * @returns {*} + */ +function toJsValue(ctx, offset) { + const { buf } = ctx; + + /** @type {Record} */ + const node = { + type: readValue(ctx, offset, AST_PROP_TYPE), + range: readValue(ctx, offset, AST_PROP_RANGE), + }; + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const count = buf[offset++]; + for (let i = 0; i < count; i++) { + const prop = buf[offset++]; + const kind = buf[offset++]; + const name = getString(ctx.strTable, ctx.strByProp[prop]); + + if (kind === PropFlags.Ref) { + const v = readU32(buf, offset); + offset += 4; + node[name] = v === 0 ? null : toJsValue(ctx, v); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + const nodes = new Array(len); + for (let i = 0; i < len; i++) { + const v = readU32(buf, offset); + if (v === 0) continue; + nodes[i] = toJsValue(ctx, v); + offset += 4; + } + node[name] = nodes; + } else if (kind === PropFlags.Bool) { + const v = buf[offset++]; + node[name] = v === 1; + } else if (kind === PropFlags.String) { + const v = readU32(buf, offset); + offset += 4; + node[name] = getString(ctx.strTable, v); + } else if (kind === PropFlags.Null) { + node[name] = null; + } else if (kind === PropFlags.Undefined) { + node[name] = undefined; + } + } + + return node; +} + +/** + * Read a specific property from a node + * @param {AstContext} ctx + * @param {number} offset + * @param {number} search + * @returns {*} + */ +function readValue(ctx, offset, search) { + const { buf } = ctx; + const type = buf[offset]; + + if (search === AST_PROP_TYPE) { + return getString(ctx.strTable, ctx.strByType[type]); + } else if (search === AST_PROP_RANGE) { + const start = readU32(buf, offset + 1 + 4); + const end = readU32(buf, offset + 1 + 4 + 4); + return [start, end]; + } else if (search === AST_PROP_PARENT) { + const pos = readU32(buf, offset + 1); + return getNode(ctx, pos); + } + + offset = findPropOffset(ctx.buf, offset, search); + if (offset === -1) return undefined; + + const kind = buf[offset + 1]; + offset += 2; + + if (kind === PropFlags.Ref) { + const value = readU32(buf, offset); + return getNode(ctx, value); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + + const nodes = new Array(len); + for (let i = 0; i < len; i++) { + nodes[i] = getNode(ctx, readU32(buf, offset)); + offset += 4; + } + return nodes; + } else if (kind === PropFlags.Bool) { + return buf[offset] === 1; + } else if (kind === PropFlags.String) { + const v = readU32(buf, offset); + return getString(ctx.strTable, v); + } else if (kind === PropFlags.Null) { + return null; + } else if (kind === PropFlags.Undefined) { + return undefined; + } + + throw new Error(`Unknown prop kind: ${kind}`); +} + +const DECODER = new TextDecoder(); + +/** + * TODO: Check if it's faster to use the `ArrayView` API instead. + * @param {Uint8Array} buf + * @param {number} i + * @returns {number} + */ +function readU32(buf, i) { + return (buf[i] << 24) + (buf[i + 1] << 16) + (buf[i + 2] << 8) + + buf[i + 3]; +} + +/** + * Get a string by id and error if it wasn't found + * @param {AstContext["strTable"]} strTable + * @param {number} id + * @returns {string} + */ +function getString(strTable, id) { + const name = strTable.get(id); + if (name === undefined) { + throw new Error(`Missing string id: ${id}`); + } + + return name; +} + +/** + * @param {AstContext["buf"]} buf + * @param {number} child + * @returns {null | [number, number]} + */ +function findChildOffset(buf, child) { + let offset = readU32(buf, child + 1); + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const propCount = buf[offset++]; + for (let i = 0; i < propCount; i++) { + const _prop = buf[offset++]; + const kind = buf[offset++]; + + switch (kind) { + case PropFlags.Ref: { + const start = offset; + const value = readU32(buf, offset); + offset += 4; + if (value === child) { + return [start, -1]; + } + break; + } + case PropFlags.RefArr: { + const start = offset; + + const len = readU32(buf, offset); + offset += 4; + + for (let j = 0; j < len; j++) { + const value = readU32(buf, offset); + offset += 4; + if (value === child) { + return [start, j]; + } + } + + break; + } + case PropFlags.String: + offset += 4; + break; + case PropFlags.Bool: + offset++; + break; + case PropFlags.Null: + case PropFlags.Undefined: + break; + } + } + + return null; +} + +/** @implements {MatchContext} */ +class MatchCtx { + /** + * @param {AstContext["buf"]} buf + * @param {AstContext["strTable"]} strTable + * @param {AstContext["strByType"]} strByType + */ + constructor(buf, strTable, strByType) { + this.buf = buf; + this.strTable = strTable; + this.strByType = strByType; + } + + /** + * @param {number} offset + * @returns {number} + */ + getParent(offset) { + return readU32(this.buf, offset + 1); + } + + /** + * @param {number} offset + * @returns {number} + */ + getType(offset) { + return this.buf[offset]; + } + + /** + * @param {number} offset + * @param {number[]} propIds + * @param {number} idx + * @returns {unknown} + */ + getAttrPathValue(offset, propIds, idx) { + const { buf } = this; + + const propId = propIds[idx]; + + switch (propId) { + case AST_PROP_TYPE: { + const type = this.getType(offset); + return getString(this.strTable, this.strByType[type]); + } + case AST_PROP_PARENT: + case AST_PROP_RANGE: + throw new Error(`Not supported`); + } + + offset = findPropOffset(buf, offset, propId); + if (offset === -1) return undefined; + const _prop = buf[offset++]; + const kind = buf[offset++]; + + if (kind === PropFlags.Ref) { + const value = readU32(buf, offset); + // Checks need to end with a value, not a node + if (idx === propIds.length - 1) return undefined; + return this.getAttrPathValue(value, propIds, idx + 1); + } else if (kind === PropFlags.RefArr) { + const count = readU32(buf, offset); + offset += 4; + + if (idx < propIds.length - 1 && propIds[idx + 1] === AST_PROP_LENGTH) { + return count; + } + + // TODO(@marvinhagemeister): Allow traversing into array children? + } + + // Cannot traverse into primitives further + if (idx < propIds.length - 1) return undefined; + + if (kind === PropFlags.String) { + const s = readU32(buf, offset); + return getString(this.strTable, s); + } else if (kind === PropFlags.Bool) { + return buf[offset] === 1; + } else if (kind === PropFlags.Null) { + return null; + } else if (kind === PropFlags.Undefined) { + return undefined; + } + + return undefined; + } + + /** + * @param {number} offset + * @param {number[]} propIds + * @param {number} idx + * @returns {boolean} + */ + hasAttrPath(offset, propIds, idx) { + const { buf } = this; + + const propId = propIds[idx]; + // If propId is 0 then the property doesn't exist in the AST + if (propId === 0) return false; + + switch (propId) { + case AST_PROP_TYPE: + case AST_PROP_PARENT: + case AST_PROP_RANGE: + return true; + } + + offset = findPropOffset(buf, offset, propId); + if (offset === -1) return false; + if (idx === propIds.length - 1) return true; + + const _prop = buf[offset++]; + const kind = buf[offset++]; + if (kind === PropFlags.Ref) { + const value = readU32(buf, offset); + return this.hasAttrPath(value, propIds, idx + 1); + } else if (kind === PropFlags.RefArr) { + const _count = readU32(buf, offset); + offset += 4; + + if (idx < propIds.length - 1 && propIds[idx + 1] === AST_PROP_LENGTH) { + return true; + } + + // TODO(@marvinhagemeister): Allow traversing into array children? + } + + // Primitives cannot be traversed further. This means we + // didn't found the attribute. + if (idx < propIds.length - 1) return false; + + return true; + } + + /** + * @param {number} offset + * @returns {number} + */ + getFirstChild(offset) { + const { buf } = this; + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const count = buf[offset++]; + for (let i = 0; i < count; i++) { + const _prop = buf[offset++]; + const kind = buf[offset++]; + + switch (kind) { + case PropFlags.Ref: { + const v = readU32(buf, offset); + offset += 4; + return v; + } + case PropFlags.RefArr: { + const len = readU32(buf, offset); + offset += 4; + for (let j = 0; j < len; j++) { + const v = readU32(buf, offset); + offset += 4; + return v; + } + + return len; + } + + case PropFlags.String: + offset += 4; + break; + case PropFlags.Bool: + offset++; + break; + case PropFlags.Null: + case PropFlags.Undefined: + break; + } + } + + return -1; + } + + /** + * @param {number} offset + * @returns {number} + */ + getLastChild(offset) { + const { buf } = this; + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + let last = -1; + + const count = buf[offset++]; + for (let i = 0; i < count; i++) { + const _prop = buf[offset++]; + const kind = buf[offset++]; + + switch (kind) { + case PropFlags.Ref: { + const v = readU32(buf, offset); + offset += 4; + last = v; + break; + } + case PropFlags.RefArr: { + const len = readU32(buf, offset); + offset += 4; + for (let j = 0; j < len; j++) { + const v = readU32(buf, offset); + last = v; + offset += 4; + } + + break; + } + + case PropFlags.String: + offset += 4; + break; + case PropFlags.Bool: + offset++; + break; + case PropFlags.Null: + case PropFlags.Undefined: + break; + } + } + + return last; + } + + /** + * @param {number} id + * @returns {number[]} + */ + getSiblings(id) { + const { buf } = this; + + const result = findChildOffset(buf, id); + // Happens for program nodes + if (result === null) return []; + + if (result[1] === -1) { + return [id]; + } + + let offset = result[0]; + const count = readU32(buf, offset); + offset += 4; + + /** @type {number[]} */ + const out = []; + for (let i = 0; i < count; i++) { + const v = readU32(buf, offset); + offset += 4; + out.push(v); + } + + return out; + } +} + +/** + * @param {Uint8Array} buf + * @param {AstContext} buf + */ +function createAstContext(buf) { + /** @type {Map} */ + const strTable = new Map(); + + // The buffer has a few offsets at the end which allows us to easily + // jump to the relevant sections of the message. + const typeMapOffset = readU32(buf, buf.length - 16); + const propMapOffset = readU32(buf, buf.length - 12); + const strTableOffset = readU32(buf, buf.length - 8); + + // Offset of the topmost node in the AST Tree. + const rootOffset = readU32(buf, buf.length - 4); + + let offset = strTableOffset; + const stringCount = readU32(buf, offset); + offset += 4; + + // TODO(@marvinhagemeister): We could lazily decode the strings on an as needed basis. + // Not sure if this matters much in practice though. + let id = 0; + for (let i = 0; i < stringCount; i++) { + const len = readU32(buf, offset); + offset += 4; + + const strBytes = buf.slice(offset, offset + len); + offset += len; + const s = DECODER.decode(strBytes); + strTable.set(id, s); + id++; + } + + if (strTable.size !== stringCount) { + throw new Error( + `Could not deserialize string table. Expected ${stringCount} items, but got ${strTable.size}`, + ); + } + + offset = typeMapOffset; + const typeCount = readU32(buf, offset); + offset += 4; + + const typeByStr = new Map(); + const strByType = new Array(typeCount).fill(0); + for (let i = 0; i < typeCount; i++) { + const v = readU32(buf, offset); + offset += 4; + + strByType[i] = v; + typeByStr.set(strTable.get(v), i); + } + + offset = propMapOffset; + const propCount = readU32(buf, offset); + offset += 4; + + const propByStr = new Map(); + const strByProp = new Array(propCount).fill(0); + for (let i = 0; i < propCount; i++) { + const v = readU32(buf, offset); + offset += 4; + + strByProp[i] = v; + propByStr.set(strTable.get(v), i); + } + + /** @type {AstContext} */ + const ctx = { + buf, + strTable, + rootOffset, + nodes: new Map(), + strTableOffset, + strByProp, + strByType, + typeByStr, + propByStr, + matcher: new MatchCtx(buf, strTable, strByType), + }; + + setNodeGetters(ctx); + + // DEV ONLY: Enable this to inspect the buffer message + // _dump(ctx); + + return ctx; +} + +/** + * @param {*} _node + */ +const NOOP = (_node) => {}; + +/** + * Kick off the actual linting process of JS plugins. + * @param {string} fileName + * @param {Uint8Array} serializedAst + */ +export function runPluginsForFile(fileName, serializedAst) { + const ctx = createAstContext(serializedAst); + + /** @type {Map}>} */ + const bySelector = new Map(); + + const destroyFns = []; + + // Instantiate and merge visitors. This allows us to only traverse + // the AST once instead of per plugin. When ever we enter or exit a + // node we'll call all visitors that match. + for (let i = 0; i < state.plugins.length; i++) { + const plugin = state.plugins[i]; + + for (const name of Object.keys(plugin.rules)) { + const rule = plugin.rules[name]; + const id = `${plugin.name}/${name}`; + const ctx = new Context(id, fileName); + const visitor = rule.create(ctx); + + // deno-lint-ignore guard-for-in + for (let key in visitor) { + const fn = visitor[key]; + if (fn === undefined) continue; + + // Support enter and exit callbacks on a visitor. + // Exit callbacks are marked by having `:exit` at the end. + let isExit = false; + if (key.endsWith(":exit")) { + isExit = true; + key = key.slice(0, -":exit".length); + } + + const selectors = splitSelectors(key); + + for (let j = 0; j < selectors.length; j++) { + const key = selectors[j]; + + let info = bySelector.get(key); + if (info === undefined) { + info = { enter: NOOP, exit: NOOP }; + bySelector.set(key, info); + } + const prevFn = isExit ? info.exit : info.enter; + + /** + * @param {*} node + */ + const wrapped = (node) => { + prevFn(node); + + try { + fn(node); + } catch (err) { + throw new Error(`Visitor "${name}" of plugin "${id}" errored`, { + cause: err, + }); + } + }; + + if (isExit) { + info.exit = wrapped; + } else { + info.enter = wrapped; + } + } + } + + if (typeof rule.destroy === "function") { + const destroyFn = rule.destroy.bind(rule); + destroyFns.push(() => { + try { + destroyFn(ctx); + } catch (err) { + throw new Error(`Destroy hook of "${id}" errored`, { cause: err }); + } + }); + } + } + } + + // Create selectors + /** @type {TransformFn} */ + const toElem = (str) => { + const id = ctx.typeByStr.get(str); + return id === undefined ? 0 : id; + }; + /** @type {TransformFn} */ + const toAttr = (str) => { + const id = ctx.propByStr.get(str); + return id === undefined ? 0 : id; + }; + + /** @type {CompiledVisitor[]} */ + const visitors = []; + for (const [sel, info] of bySelector.entries()) { + // Selectors are already split here. + // TODO(@marvinhagemeister): Avoid array allocation (not sure if that matters) + const parsed = parseSelector(sel, toElem, toAttr)[0]; + const matcher = compileSelector(parsed); + + visitors.push({ info, matcher }); + } + + // Traverse ast with all visitors at the same time to avoid traversing + // multiple times. + try { + traverse(ctx, visitors, ctx.rootOffset); + } finally { + ctx.nodes.clear(); + + // Optional: Destroy rules + for (let i = 0; i < destroyFns.length; i++) { + destroyFns[i](); + } + } +} + +/** + * @param {AstContext} ctx + * @param {CompiledVisitor[]} visitors + * @param {number} offset + */ +function traverse(ctx, visitors, offset) { + // The 0 offset is used to denote an empty/placeholder node + if (offset === 0) return; + + const originalOffset = offset; + + const { buf } = ctx; + + /** @type {VisitorFn[] | null} */ + let exits = null; + + for (let i = 0; i < visitors.length; i++) { + const v = visitors[i]; + + if (v.matcher(ctx.matcher, offset)) { + if (v.info.exit !== NOOP) { + if (exits === null) { + exits = [v.info.exit]; + } else { + exits.push(v.info.exit); + } + } + + if (v.info.enter !== NOOP) { + const node = /** @type {*} */ (getNode(ctx, offset)); + v.info.enter(node); + } + } + } + + // Search for node references in the properties of the current node. All + // other properties can be ignored. + try { + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const propCount = buf[offset]; + offset += 1; + + for (let i = 0; i < propCount; i++) { + const kind = buf[offset + 1]; + offset += 2; // propId + propFlags + + if (kind === PropFlags.Ref) { + const next = readU32(buf, offset); + offset += 4; + traverse(ctx, visitors, next); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + + for (let j = 0; j < len; j++) { + const child = readU32(buf, offset); + offset += 4; + traverse(ctx, visitors, child); + } + } else if (kind === PropFlags.String) { + offset += 4; + } else if (kind === PropFlags.Bool) { + offset += 1; + } else if (kind === PropFlags.Null || kind === PropFlags.Undefined) { + // No value + } + } + } finally { + if (exits !== null) { + for (let i = 0; i < exits.length; i++) { + const node = /** @type {*} */ (getNode(ctx, originalOffset)); + exits[i](node); + } + } + } +} + +/** + * This is useful debugging helper to display the buffer's contents. + * @param {AstContext} ctx + */ +function _dump(ctx) { + const { buf, strTableOffset, strTable, strByType, strByProp } = ctx; + + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(strTable); + + for (let i = 0; i < strByType.length; i++) { + const v = strByType[i]; + // @ts-ignore dump fn + // deno-lint-ignore no-console + if (v > 0) console.log(" > type:", i, getString(ctx.strTable, v), v); + } + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(); + for (let i = 0; i < strByProp.length; i++) { + const v = strByProp[i]; + // @ts-ignore dump fn + // deno-lint-ignore no-console + if (v > 0) console.log(" > prop:", i, getString(ctx.strTable, v), v); + } + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(); + + let offset = 0; + + while (offset < strTableOffset) { + const type = buf[offset]; + const name = getString(ctx.strTable, ctx.strByType[type]); + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(`${name}, offset: ${offset}, type: ${type}`); + offset += 1; + + const parent = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` parent: ${parent}`); + + const start = readU32(buf, offset); + offset += 4; + const end = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` range: ${start} -> ${end}`); + + const count = buf[offset++]; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` prop count: ${count}`); + + for (let i = 0; i < count; i++) { + const prop = buf[offset++]; + const kind = buf[offset++]; + const name = getString(ctx.strTable, ctx.strByProp[prop]); + + let kindName = "unknown"; + for (const k in PropFlags) { + // @ts-ignore dump fn + if (kind === PropFlags[k]) { + kindName = k; + } + } + + if (kind === PropFlags.Ref) { + const v = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: ${v} (${kindName}, ${prop})`); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: Array(${len}) (${kindName}, ${prop})`); + + for (let j = 0; j < len; j++) { + const v = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` - ${v} (${prop})`); + } + } else if (kind === PropFlags.Bool) { + const v = buf[offset]; + offset += 1; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: ${v} (${kindName}, ${prop})`); + } else if (kind === PropFlags.String) { + const v = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log( + ` ${name}: ${getString(ctx.strTable, v)} (${kindName}, ${prop})`, + ); + } else if (kind === PropFlags.Null) { + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: null (${kindName}, ${prop})`); + } else if (kind === PropFlags.Undefined) { + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: undefined (${kindName}, ${prop})`); + } + } + } +} + +// TODO(bartlomieju): this is temporary, until we get plugins plumbed through +// the CLI linter +/** + * @param {LintPlugin} plugin + * @param {string} fileName + * @param {string} sourceText + */ +function runLintPlugin(plugin, fileName, sourceText) { + installPlugin(plugin); + const serializedAst = op_lint_create_serialized_ast(fileName, sourceText); + + try { + runPluginsForFile(fileName, serializedAst); + } finally { + // During testing we don't want to keep plugins around + state.installedPlugins.clear(); + } +} + +// TODO(bartlomieju): this is temporary, until we get plugins plumbed through +// the CLI linter +internals.runLintPlugin = runLintPlugin; diff --git a/cli/js/40_lint_selector.js b/cli/js/40_lint_selector.js new file mode 100644 index 0000000000..b78f7a5d0e --- /dev/null +++ b/cli/js/40_lint_selector.js @@ -0,0 +1,1014 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +// @ts-check + +/** @typedef {import("./40_lint_types.d.ts").LintState} LintState */ +/** @typedef {import("./40_lint_types.d.ts").AstContext} AstContext */ +/** @typedef {import("./40_lint_types.d.ts").MatchContext} MatchCtx */ +/** @typedef {import("./40_lint_types.d.ts").AttrExists} AttrExists */ +/** @typedef {import("./40_lint_types.d.ts").AttrBin} AttrBin */ +/** @typedef {import("./40_lint_types.d.ts").AttrSelector} AttrSelector */ +/** @typedef {import("./40_lint_types.d.ts").ElemSelector} ElemSelector */ +/** @typedef {import("./40_lint_types.d.ts").PseudoNthChild} PseudoNthChild */ +/** @typedef {import("./40_lint_types.d.ts").PseudoHas} PseudoHas */ +/** @typedef {import("./40_lint_types.d.ts").PseudoNot} PseudoNot */ +/** @typedef {import("./40_lint_types.d.ts").Relation} SRelation */ +/** @typedef {import("./40_lint_types.d.ts").Selector} Selector */ +/** @typedef {import("./40_lint_types.d.ts").SelectorParseCtx} SelectorParseCtx */ +/** @typedef {import("./40_lint_types.d.ts").NextFn} NextFn */ +/** @typedef {import("./40_lint_types.d.ts").MatcherFn} MatcherFn */ +/** @typedef {import("./40_lint_types.d.ts").TransformFn} Transformer */ + +const Char = { + Tab: 9, + Space: 32, + Bang: 33, + DoubleQuote: 34, + Quote: 39, + BraceOpen: 40, + BraceClose: 41, + Plus: 43, + Comma: 44, + Minus: 45, + Dot: 46, + Slash: 47, + n0: 49, + n9: 57, + Colon: 58, + Less: 60, + Equal: 61, + Greater: 62, + A: 65, + Z: 90, + BracketOpen: 91, + BackSlash: 92, + BracketClose: 93, + Underscore: 95, + a: 97, + z: 122, + Tilde: 126, +}; + +export const Token = { + EOF: 0, + Word: 1, + Space: 2, + Op: 3, + Colon: 4, + Comma: 7, + BraceOpen: 8, + BraceClose: 9, + BracketOpen: 10, + BracketClose: 11, + String: 12, + Number: 13, + Bool: 14, + Null: 15, + Undefined: 16, + Dot: 17, + Minus: 17, +}; + +export const BinOp = { + /** [attr="value"] or [attr=value] */ + Equal: 1, + /** [attr!="value"] or [attr!=value] */ + NotEqual: 2, + /** [attr>1] */ + Greater: 3, + /** [attr>=1] */ + GreaterThan: 4, + /** [attr<1] */ + Less: 5, + /** [attr<=1] */ + LessThan: 6, + Tilde: 7, + Plus: 8, + Space: 9, +}; + +/** + * @param {string} s + * @returns {number} + */ +function getAttrOp(s) { + switch (s) { + case "=": + return BinOp.Equal; + case "!=": + return BinOp.NotEqual; + case ">": + return BinOp.Greater; + case ">=": + return BinOp.GreaterThan; + case "<": + return BinOp.Less; + case "<=": + return BinOp.LessThan; + case "~": + return BinOp.Tilde; + case "+": + return BinOp.Plus; + default: + throw new Error(`Unknown attribute operator: '${s}'`); + } +} + +export class Lexer { + token = Token.Word; + start = 0; + end = 0; + ch = 0; + i = -1; + + value = ""; + + /** + * @param {string} input + */ + constructor(input) { + this.input = input; + this.step(); + this.next(); + } + + /** + * @param {number} token + */ + expect(token) { + if (this.token !== token) { + throw new Error( + `Expected token '${token}', but got '${this.token}'.\n\n${this.input}\n${ + " ".repeat(this.i) + }^`, + ); + } + } + + /** + * @param {number} token + */ + readAsWordUntil(token) { + const s = this.i; + while (this.token !== Token.EOF && this.token !== token) { + this.next(); + } + + this.start = s; + this.end = this.i - 1; + this.value = this.getSlice(); + } + + getSlice() { + return this.input.slice(this.start, this.end); + } + + step() { + this.i++; + if (this.i >= this.input.length) { + this.ch = -1; + } else { + this.ch = this.input.charCodeAt(this.i); + } + } + + next() { + this.value = ""; + + if (this.i >= this.input.length) { + this.token = Token.EOF; + return; + } + + // console.log( + // "NEXT", + // this.input, + // this.i, + // JSON.stringify(String.fromCharCode(this.ch)), + // ); + + while (true) { + switch (this.ch) { + case Char.Space: + while (this.isWhiteSpace()) { + this.step(); + } + + // Check if space preceeded operator + if (this.isOpContinue()) { + continue; + } + + this.token = Token.Space; + return; + case Char.BracketOpen: + this.token = Token.BracketOpen; + this.step(); + return; + case Char.BracketClose: + this.token = Token.BracketClose; + this.step(); + return; + case Char.BraceOpen: + this.token = Token.BraceOpen; + this.step(); + return; + case Char.BraceClose: + this.token = Token.BraceClose; + this.step(); + return; + case Char.Colon: + this.token = Token.Colon; + this.step(); + return; + case Char.Comma: + this.token = Token.Comma; + this.step(); + return; + case Char.Dot: + this.token = Token.Dot; + this.step(); + return; + case Char.Minus: + this.token = Token.Minus; + this.step(); + return; + + case Char.Plus: + case Char.Tilde: + case Char.Greater: + case Char.Equal: + case Char.Less: + case Char.Bang: { + this.token = Token.Op; + this.start = this.i; + this.step(); + + while (this.isOpContinue()) { + this.step(); + } + + this.end = this.i; + this.value = this.getSlice(); + + // Consume remaining space + while (this.isWhiteSpace()) { + this.step(); + } + + return; + } + + case Char.Quote: + case Char.DoubleQuote: { + this.token = Token.String; + const ch = this.ch; + + this.step(); + this.start = this.i; + + while (this.ch > 0 && this.ch !== ch) { + this.step(); + } + + this.end = this.i; + this.value = this.getSlice(); + this.step(); + + return; + } + + default: + this.start = this.i; + this.step(); + + while (this.isWordContinue()) { + this.step(); + } + + this.end = this.i; + this.value = this.getSlice(); + this.token = Token.Word; + return; + } + } + } + + isWordContinue() { + const ch = this.ch; + switch (ch) { + case Char.Minus: + case Char.Underscore: + return true; + default: + return (ch >= Char.a && ch <= Char.z) || + (ch >= Char.A && ch <= Char.Z) || + (ch >= Char.n0 && ch <= Char.n9); + } + } + + isOpContinue() { + const ch = this.ch; + switch (ch) { + case Char.Equal: + case Char.Bang: + case Char.Greater: + case Char.Less: + case Char.Tilde: + case Char.Plus: + return true; + default: + return false; + } + } + + isWhiteSpace() { + return this.ch === Char.Space || this.ch === Char.Tab; + } +} + +const NUMBER_REG = /^(\d+\.)?\d+$/; +const BIGINT_REG = /^\d+n$/; + +/** + * @param {string} raw + * @returns {any} + */ +function getFromRawValue(raw) { + switch (raw) { + case "true": + return true; + case "false": + return false; + case "null": + return null; + case "undefined": + return undefined; + default: + if (raw.startsWith("'") && raw.endsWith("'")) { + if (raw.length === 2) return ""; + return raw.slice(1, -1); + } else if (raw.startsWith('"') && raw.endsWith('"')) { + if (raw.length === 2) return ""; + return raw.slice(1, -1); + } else if (raw.startsWith("/")) { + const end = raw.lastIndexOf("/"); + if (end === -1) throw new Error(`Invalid RegExp pattern: ${raw}`); + const pattern = raw.slice(1, end); + const flags = end < raw.length - 1 ? raw.slice(end + 1) : undefined; + return new RegExp(pattern, flags); + } else if (NUMBER_REG.test(raw)) { + return Number(raw); + } else if (BIGINT_REG.test(raw)) { + return BigInt(raw.slice(0, -1)); + } + + return raw; + } +} + +export const ELEM_NODE = 1; +export const RELATION_NODE = 2; +export const ATTR_EXISTS_NODE = 3; +export const ATTR_BIN_NODE = 4; +export const PSEUDO_NTH_CHILD = 5; +export const PSEUDO_HAS = 6; +export const PSEUDO_NOT = 7; +export const PSEUDO_FIRST_CHILD = 8; +export const PSEUDO_LAST_CHILD = 9; + +/** + * Parse out all unique selectors of a selector list. + * @param {string} input + * @returns {string[]} + */ +export function splitSelectors(input) { + /** @type {string[]} */ + const out = []; + + let last = 0; + let depth = 0; + for (let i = 0; i < input.length; i++) { + const ch = input.charCodeAt(i); + switch (ch) { + case Char.BraceOpen: + depth++; + break; + case Char.BraceClose: + depth--; + break; + case Char.Comma: + if (depth === 0) { + out.push(input.slice(last, i).trim()); + last = i + 1; + } + break; + } + } + + if (last < input.length - 1) { + out.push(input.slice(last).trim()); + } + + return out; +} + +/** + * @param {string} input + * @param {Transformer} toElem + * @param {Transformer} toAttr + * @returns {Selector[]} + */ +export function parseSelector(input, toElem, toAttr) { + /** @type {Selector[]} */ + const result = []; + + /** @type {Selector[]} */ + const stack = [[]]; + + const lex = new Lexer(input); + + // Some subselectors like `:nth-child(.. of )` must have + // a single selector instead of selector list. + let throwOnComma = false; + + while (lex.token !== Token.EOF) { + const current = /** @type {Selector} */ (stack.at(-1)); + + if (lex.token === Token.Word) { + const value = lex.value; + const wildcard = value === "*"; + + const elem = !wildcard ? toElem(value) : 0; + current.push({ + type: ELEM_NODE, + elem, + wildcard, + }); + lex.next(); + + continue; + } else if (lex.token === Token.Space) { + lex.next(); + + if (lex.token === Token.Word) { + current.push({ + type: RELATION_NODE, + op: BinOp.Space, + }); + } + + continue; + } else if (lex.token === Token.BracketOpen) { + lex.next(); + lex.expect(Token.Word); + + // Check for value comparison + const prop = [toAttr(lex.value)]; + lex.next(); + + while (lex.token === Token.Dot) { + lex.next(); + lex.expect(Token.Word); + + prop.push(toAttr(lex.value)); + lex.next(); + } + + if (lex.token === Token.Op) { + const op = getAttrOp(lex.value); + lex.readAsWordUntil(Token.BracketClose); + + const value = getFromRawValue(lex.value); + current.push({ type: ATTR_BIN_NODE, prop, op, value }); + } else { + current.push({ + type: ATTR_EXISTS_NODE, + prop, + }); + } + + lex.expect(Token.BracketClose); + lex.next(); + continue; + } else if (lex.token === Token.Colon) { + lex.next(); + lex.expect(Token.Word); + + switch (lex.value) { + case "first-child": + current.push({ + type: PSEUDO_FIRST_CHILD, + }); + break; + case "last-child": + current.push({ + type: PSEUDO_LAST_CHILD, + }); + break; + case "nth-child": { + lex.next(); + lex.expect(Token.BraceOpen); + lex.next(); + + let mul = 1; + let repeat = false; + let step = 0; + if (lex.token === Token.Minus) { + mul = -1; + lex.next(); + } + + lex.expect(Token.Word); + const value = lex.getSlice(); + + if (value.endsWith("n")) { + repeat = true; + step = +value.slice(0, -1) * mul; + } else { + step = +value * mul; + } + + lex.next(); + + /** @type {PseudoNthChild} */ + const node = { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step, + stepOffset: 0, + repeat, + }; + current.push(node); + + if (lex.token === Token.Space) lex.next(); + + if (lex.token !== Token.BraceClose) { + if (lex.token === Token.Op) { + node.op = lex.value; + lex.next(); + + if (lex.token === Token.Space) lex.next(); + } else if (lex.token === Token.Minus) { + node.op = "-"; + lex.next(); + + if (lex.token === Token.Space) { + lex.next(); + } + } + + lex.expect(Token.Word); + node.stepOffset = +lex.value; + lex.next(); + + if (lex.token !== Token.BraceClose) { + lex.next(); // Space + + if (lex.token === Token.Word) { + if (/** @type {string} */ (lex.value) !== "of") { + throw new Error( + `Expected 'of' keyword in ':nth-child' but got: ${lex.value}`, + ); + } + + lex.next(); + lex.expect(Token.Space); + lex.next(); + throwOnComma = true; + stack.push([]); + } + + continue; + } + + lex.expect(Token.BraceClose); + } else if (!node.repeat) { + // :nth-child(2) -> step is actually stepOffset + node.stepOffset = node.step - 1; + node.step = 0; + } + + lex.next(); + + continue; + } + + case "has": + case "where": + case "is": { + lex.next(); + lex.expect(Token.BraceOpen); + lex.next(); + + current.push({ + type: PSEUDO_HAS, + selectors: [], + }); + stack.push([]); + + continue; + } + case "not": { + lex.next(); + lex.expect(Token.BraceOpen); + lex.next(); + + current.push({ + type: PSEUDO_NOT, + selectors: [], + }); + stack.push([]); + + continue; + } + default: + throw new Error(`Unknown pseudo selector: '${lex.value}'`); + } + } else if (lex.token === Token.Comma) { + if (throwOnComma) { + throw new Error(`Multiple selector arguments not supported here`); + } + + lex.next(); + if (lex.token === Token.Space) { + lex.next(); + } + + popSelector(result, stack); + stack.push([]); + continue; + } else if (lex.token === Token.BraceClose) { + throwOnComma = false; + popSelector(result, stack); + } else if (lex.token === Token.Op) { + current.push({ + type: RELATION_NODE, + op: getAttrOp(lex.value), + }); + } + + lex.next(); + } + + if (stack.length > 0) { + result.push(stack[0]); + } + + return result; +} + +/** + * @param {Selector[]} result + * @param {Selector[]} stack + */ +function popSelector(result, stack) { + const sel = /** @type {Selector} */ (stack.pop()); + + if (stack.length === 0) { + result.push(sel); + stack.push([]); + } else { + const prev = /** @type {Selector} */ (stack.at(-1)); + if (prev.length === 0) { + throw new Error(`Empty selector`); + } + + const node = prev.at(-1); + if (node === undefined) { + throw new Error(`Empty node`); + } + + if (node.type === PSEUDO_NTH_CHILD) { + node.of = sel; + } else if (node.type === PSEUDO_HAS || node.type === PSEUDO_NOT) { + node.selectors.push(sel); + } else { + throw new Error(`Multiple selectors not allowed here`); + } + } +} + +const TRUE_FN = () => { + return true; +}; + +/** + * @param {Selector} selector + * @returns {MatcherFn} + */ +export function compileSelector(selector) { + /** @type {MatcherFn} */ + let fn = TRUE_FN; + + for (let i = 0; i < selector.length; i++) { + const node = selector[i]; + + switch (node.type) { + case ELEM_NODE: + fn = matchElem(node, fn); + break; + case RELATION_NODE: + switch (node.op) { + case BinOp.Space: + fn = matchDescendant(fn); + break; + case BinOp.Greater: + fn = matchChild(fn); + break; + case BinOp.Plus: + fn = matchAdjacent(fn); + break; + case BinOp.Tilde: + fn = matchFollowing(fn); + break; + default: + throw new Error(`Unknown relation op ${node.op}`); + } + break; + case ATTR_EXISTS_NODE: + fn = matchAttrExists(node, fn); + break; + case ATTR_BIN_NODE: + fn = matchAttrBin(node, fn); + break; + case PSEUDO_FIRST_CHILD: + fn = matchFirstChild(fn); + break; + case PSEUDO_LAST_CHILD: + fn = matchLastChild(fn); + break; + case PSEUDO_NTH_CHILD: + fn = matchNthChild(node, fn); + break; + case PSEUDO_HAS: + // FIXME + // fn = matchIs(part, fn); + throw new Error("TODO: :has"); + case PSEUDO_NOT: + fn = matchNot(node.selectors, fn); + break; + default: + // @ts-ignore error handling + // deno-lint-ignore no-console + console.log(node); + throw new Error(`Unknown selector node`); + } + } + + return fn; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchFirstChild(next) { + return (ctx, id) => { + const parent = ctx.getParent(id); + const first = ctx.getFirstChild(parent); + return first === id && next(ctx, first); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchLastChild(next) { + return (ctx, id) => { + const parent = ctx.getParent(id); + const last = ctx.getLastChild(parent); + return last === id && next(ctx, id); + }; +} + +/** + * @param {PseudoNthChild} node + * @param {number} i + * @returns {number} + */ +function getNthAnB(node, i) { + const n = node.step * i; + + if (node.op === null) return n; + + switch (node.op) { + case "+": + return n + node.stepOffset; + case "-": + return n - node.stepOffset; + default: + throw new Error("Not supported nth-child operator: " + node.op); + } +} + +/** + * @param {PseudoNthChild} node + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchNthChild(node, next) { + const ofSelector = node.of !== null ? compileSelector(node.of) : TRUE_FN; + + // TODO(@marvinhagemeister): we should probably cache results here + + return (ctx, id) => { + const siblings = ctx.getSiblings(id); + const idx = siblings.indexOf(id); + + if (!node.repeat) { + return idx === node.stepOffset && next(ctx, id); + } + + for (let i = 0; i < siblings.length; i++) { + const n = getNthAnB(node, i); + + if (n > siblings.length - 1) return false; + + const search = siblings[n]; + if (id === search) { + if (node.of !== null && !ofSelector(ctx, id)) { + continue; + } else if (next(ctx, id)) { + return true; + } + } else if (n > idx) { + return false; + } + } + + return false; + }; +} + +/** + * @param {Selector[]} selectors + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchNot(selectors, next) { + /** @type {MatcherFn[]} */ + const compiled = []; + + for (let i = 0; i < selectors.length; i++) { + const sel = selectors[i]; + compiled.push(compileSelector(sel)); + } + + return (ctx, id) => { + for (let i = 0; i < compiled.length; i++) { + const fn = compiled[i]; + if (fn(ctx, id)) { + return false; + } + } + + return next(ctx, id); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchDescendant(next) { + // TODO(@marvinhagemeister): we should probably cache results here + return (ctx, id) => { + let current = ctx.getParent(id); + while (current > 0) { + if (next(ctx, current)) { + return true; + } + + current = ctx.getParent(current); + } + + return false; + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchChild(next) { + return (ctx, id) => { + const parent = ctx.getParent(id); + if (parent < 0) return false; + + return next(ctx, parent); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchAdjacent(next) { + return (ctx, id) => { + const siblings = ctx.getSiblings(id); + const idx = siblings.indexOf(id) - 1; + + if (idx < 0) return false; + + const prev = siblings[idx]; + return next(ctx, prev); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchFollowing(next) { + return (ctx, id) => { + const siblings = ctx.getSiblings(id); + const idx = siblings.indexOf(id) - 1; + + if (idx < 0) return false; + + for (let i = idx; i >= 0; i--) { + const sib = siblings[i]; + if (next(ctx, sib)) return true; + } + + return false; + }; +} + +/** + * @param {ElemSelector} part + * @param {MatcherFn} next + * @returns {MatcherFn} + */ +function matchElem(part, next) { + return (ctx, id) => { + // Placeholder node cannot be matched + if (id === 0) return false; + // Wildcard always matches + else if (part.wildcard) return next(ctx, id); + // 0 means it's the placeholder node which + // can never be matched. + else if (part.elem === 0) return false; + + const type = ctx.getType(id); + if (type > 0 && type === part.elem) return next(ctx, id); + + return false; + }; +} + +/** + * @param {AttrExists} attr + * @param {MatcherFn} next + * @returns {MatcherFn} + */ +function matchAttrExists(attr, next) { + return (ctx, id) => { + return ctx.hasAttrPath(id, attr.prop, 0) ? next(ctx, id) : false; + }; +} + +/** + * @param {AttrBin} attr + * @param {MatcherFn} next + * @returns {MatcherFn} + */ +function matchAttrBin(attr, next) { + return (ctx, id) => { + if (!ctx.hasAttrPath(id, attr.prop, 0)) return false; + const value = ctx.getAttrPathValue(id, attr.prop, 0); + if (!matchAttrValue(attr, value)) return false; + return next(ctx, id); + }; +} + +/** + * @param {AttrBin} attr + * @param {*} value + * @returns {boolean} + */ +function matchAttrValue(attr, value) { + switch (attr.op) { + case BinOp.Equal: + return value === attr.value; + case BinOp.NotEqual: + return value !== attr.value; + case BinOp.Greater: + return typeof value === "number" && typeof attr.value === "number" && + value > attr.value; + case BinOp.GreaterThan: + return typeof value === "number" && typeof attr.value === "number" && + value >= attr.value; + case BinOp.Less: + return typeof value === "number" && typeof attr.value === "number" && + value < attr.value; + case BinOp.LessThan: + return typeof value === "number" && typeof attr.value === "number" && + value <= attr.value; + default: + return false; + } +} diff --git a/cli/js/40_lint_types.d.ts b/cli/js/40_lint_types.d.ts new file mode 100644 index 0000000000..7b06e36098 --- /dev/null +++ b/cli/js/40_lint_types.d.ts @@ -0,0 +1,132 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +export interface NodeFacade { + type: string; + range: [number, number]; + [key: string]: unknown; +} + +export interface AstContext { + buf: Uint8Array; + strTable: Map; + strTableOffset: number; + rootOffset: number; + nodes: Map; + strByType: number[]; + strByProp: number[]; + typeByStr: Map; + propByStr: Map; + matcher: MatchContext; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface RuleContext { + id: string; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface LintRule { + create(ctx: RuleContext): Record void>; + destroy?(ctx: RuleContext): void; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface LintPlugin { + name: string; + rules: Record; +} + +export interface LintState { + plugins: LintPlugin[]; + installedPlugins: Set; +} + +export type VisitorFn = (node: unknown) => void; + +export interface CompiledVisitor { + matcher: (ctx: MatchContext, offset: number) => boolean; + info: { enter: VisitorFn; exit: VisitorFn }; +} + +export interface AttrExists { + type: 3; + prop: number[]; +} + +export interface AttrBin { + type: 4; + prop: number[]; + op: number; + // deno-lint-ignore no-explicit-any + value: any; +} + +export type AttrSelector = AttrExists | AttrBin; + +export interface ElemSelector { + type: 1; + wildcard: boolean; + elem: number; +} + +export interface PseudoNthChild { + type: 5; + op: string | null; + step: number; + stepOffset: number; + of: Selector | null; + repeat: boolean; +} + +export interface PseudoHas { + type: 6; + selectors: Selector[]; +} +export interface PseudoNot { + type: 7; + selectors: Selector[]; +} +export interface PseudoFirstChild { + type: 8; +} +export interface PseudoLastChild { + type: 9; +} + +export interface Relation { + type: 2; + op: number; +} + +export type Selector = Array< + | ElemSelector + | Relation + | AttrExists + | AttrBin + | PseudoNthChild + | PseudoNot + | PseudoHas + | PseudoFirstChild + | PseudoLastChild +>; + +export interface SelectorParseCtx { + root: Selector; + current: Selector; +} + +export interface MatchContext { + getFirstChild(id: number): number; + getLastChild(id: number): number; + getSiblings(id: number): number[]; + getParent(id: number): number; + getType(id: number): number; + hasAttrPath(id: number, propIds: number[], idx: number): boolean; + getAttrPathValue(id: number, propIds: number[], idx: number): unknown; +} + +export type NextFn = (ctx: MatchContext, id: number) => boolean; +export type MatcherFn = (ctx: MatchContext, id: number) => boolean; +export type TransformFn = (value: string) => number; + +export {}; diff --git a/cli/jsr.rs b/cli/jsr.rs index 767d304d60..acfbb1c8e2 100644 --- a/cli/jsr.rs +++ b/cli/jsr.rs @@ -1,7 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use crate::args::jsr_url; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use dashmap::DashMap; use deno_core::serde_json; use deno_graph::packages::JsrPackageInfo; @@ -19,11 +19,11 @@ pub struct JsrFetchResolver { /// It can be large and we don't want to store it. info_by_nv: DashMap>>, info_by_name: DashMap>>, - file_fetcher: Arc, + file_fetcher: Arc, } impl JsrFetchResolver { - pub fn new(file_fetcher: Arc) -> Self { + pub fn new(file_fetcher: Arc) -> Self { Self { nv_by_req: Default::default(), info_by_nv: Default::default(), diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index c128372dcd..8fb3454bc8 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -36,6 +36,8 @@ use deno_semver::package::PackageNv; use deno_semver::package::PackageNvReference; use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; +use deno_semver::SmallStackString; +use deno_semver::StackString; use deno_semver::Version; use import_map::ImportMap; use node_resolver::NodeResolutionKind; @@ -270,13 +272,24 @@ impl<'a> TsResponseImportMapper<'a> { } } + if specifier.scheme() == "node" { + return Some(specifier.to_string()); + } + if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str()) { let mut segments = jsr_path.split('/'); let name = if jsr_path.starts_with('@') { - format!("{}/{}", segments.next()?, segments.next()?) + let scope = segments.next()?; + let name = segments.next()?; + capacity_builder::StringBuilder::::build(|builder| { + builder.append(scope); + builder.append("/"); + builder.append(name); + }) + .unwrap() } else { - segments.next()?.to_string() + StackString::from(segments.next()?) }; let version = Version::parse_standard(segments.next()?).ok()?; let nv = PackageNv { name, version }; @@ -286,7 +299,9 @@ impl<'a> TsResponseImportMapper<'a> { &path, Some(&self.file_referrer), )?; - let sub_path = (export != ".").then_some(export); + let sub_path = (export != ".") + .then_some(export) + .map(SmallStackString::from_string); let mut req = None; req = req.or_else(|| { let import_map = self.maybe_import_map?; @@ -599,18 +614,24 @@ fn try_reverse_map_package_json_exports( /// For a set of tsc changes, can them for any that contain something that looks /// like an import and rewrite the import specifier to include the extension pub fn fix_ts_import_changes( - referrer: &ModuleSpecifier, - resolution_mode: ResolutionMode, changes: &[tsc::FileTextChanges], language_server: &language_server::Inner, ) -> Result, AnyError> { - let import_mapper = language_server.get_ts_response_import_mapper(referrer); let mut r = Vec::new(); for change in changes { + let Ok(referrer) = ModuleSpecifier::parse(&change.file_name) else { + continue; + }; + let referrer_doc = language_server.get_asset_or_document(&referrer).ok(); + let resolution_mode = referrer_doc + .as_ref() + .map(|d| d.resolution_mode()) + .unwrap_or(ResolutionMode::Import); + let import_mapper = + language_server.get_ts_response_import_mapper(&referrer); let mut text_changes = Vec::new(); for text_change in &change.text_changes { let lines = text_change.new_text.split('\n'); - let new_lines: Vec = lines .map(|line| { // This assumes that there's only one import per line. @@ -618,7 +639,7 @@ pub fn fix_ts_import_changes( let specifier = captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); if let Some(new_specifier) = import_mapper - .check_unresolved_specifier(specifier, referrer, resolution_mode) + .check_unresolved_specifier(specifier, &referrer, resolution_mode) { line.replace(specifier, &new_specifier) } else { diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index fbf9ea6f1b..24a55d495c 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -7,6 +7,7 @@ use crate::cache::LocalLspHttpCache; use crate::lsp::config::Config; use crate::lsp::logging::lsp_log; use crate::lsp::logging::lsp_warn; +use crate::sys::CliSys; use deno_core::url::Url; use deno_core::ModuleSpecifier; @@ -91,12 +92,11 @@ impl LspCache { }) .ok() }); - let deno_dir = DenoDir::new(global_cache_path) + let sys = CliSys::default(); + let deno_dir = DenoDir::new(sys.clone(), global_cache_path) .expect("should be infallible with absolute custom root"); - let global = Arc::new(GlobalHttpCache::new( - deno_dir.remote_folder_path(), - crate::cache::RealDenoCacheEnv, - )); + let global = + Arc::new(GlobalHttpCache::new(sys, deno_dir.remote_folder_path())); Self { deno_dir, global, diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index a629757788..ff4c2978d5 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -9,8 +9,6 @@ use deno_config::deno_json::LintConfig; use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::TestConfig; use deno_config::deno_json::TsConfig; -use deno_config::fs::DenoConfigFs; -use deno_config::fs::RealDenoConfigFs; use deno_config::glob::FilePatterns; use deno_config::glob::PathOrPatternSet; use deno_config::workspace::CreateResolverOptions; @@ -41,7 +39,6 @@ use deno_path_util::url_to_file_path; use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; use lsp_types::ClientCapabilities; -use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::HashMap; @@ -63,10 +60,11 @@ use crate::args::ConfigFile; use crate::args::LintFlags; use crate::args::LintOptions; use crate::cache::FastInsecureHasher; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::lsp::logging::lsp_warn; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; +use crate::sys::CliSys; use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinterOptions; use crate::tools::lint::LintRuleProvider; @@ -459,6 +457,19 @@ impl Default for LanguagePreferences { } } +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct SuggestionActionsSettings { + #[serde(default = "is_true")] + pub enabled: bool, +} + +impl Default for SuggestionActionsSettings { + fn default() -> Self { + SuggestionActionsSettings { enabled: true } + } +} + #[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct UpdateImportsOnFileMoveOptions { @@ -490,6 +501,8 @@ pub struct LanguageWorkspaceSettings { #[serde(default)] pub suggest: CompletionSettings, #[serde(default)] + pub suggestion_actions: SuggestionActionsSettings, + #[serde(default)] pub update_imports_on_file_move: UpdateImportsOnFileMoveOptions, } @@ -1203,9 +1216,8 @@ impl ConfigData { specified_config: Option<&Path>, scope: &ModuleSpecifier, settings: &Settings, - file_fetcher: &Arc, + file_fetcher: &Arc, // sync requirement is because the lsp requires sync - cached_deno_config_fs: &(dyn DenoConfigFs + Sync), deno_json_cache: &(dyn DenoJsonCache + Sync), pkg_json_cache: &(dyn PackageJsonCache + Sync), workspace_cache: &(dyn WorkspaceCache + Sync), @@ -1215,6 +1227,7 @@ impl ConfigData { Ok(scope_dir_path) => { let paths = [scope_dir_path]; WorkspaceDirectory::discover( + &CliSys::default(), match specified_config { Some(config_path) => { deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( @@ -1226,7 +1239,6 @@ impl ConfigData { } }, &WorkspaceDiscoverOptions { - fs: cached_deno_config_fs, additional_config_file_names: &[], deno_json_cache: Some(deno_json_cache), pkg_json_cache: Some(pkg_json_cache), @@ -1298,7 +1310,7 @@ impl ConfigData { member_dir: Arc, scope: Arc, settings: &Settings, - file_fetcher: Option<&Arc>, + file_fetcher: Option<&Arc>, ) -> Self { let (settings, workspace_folder) = settings.get_for_specifier(&scope); let mut watched_files = HashMap::with_capacity(10); @@ -1603,9 +1615,7 @@ impl ConfigData { || unstable.contains("sloppy-imports"); let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new_without_stat_cache(Arc::new( - deno_runtime::deno_fs::RealFs, - )), + SloppyImportsCachedFs::new_without_stat_cache(CliSys::default()), )) }); let resolver = Arc::new(resolver); @@ -1819,13 +1829,12 @@ impl ConfigTree { &mut self, settings: &Settings, workspace_files: &IndexSet, - file_fetcher: &Arc, + file_fetcher: &Arc, ) { lsp_log!("Refreshing configuration tree..."); // since we're resolving a workspace multiple times in different // folders, we want to cache all the lookups and config files across // ConfigData::load calls - let cached_fs = CachedDenoConfigFs::default(); let deno_json_cache = DenoJsonMemCache::default(); let pkg_json_cache = PackageJsonMemCache::default(); let workspace_cache = WorkspaceMemCache::default(); @@ -1850,7 +1859,6 @@ impl ConfigTree { folder_uri, settings, file_fetcher, - &cached_fs, &deno_json_cache, &pkg_json_cache, &workspace_cache, @@ -1881,7 +1889,6 @@ impl ConfigTree { &scope, settings, file_fetcher, - &cached_fs, &deno_json_cache, &pkg_json_cache, &workspace_cache, @@ -1898,7 +1905,6 @@ impl ConfigTree { member_scope, settings, file_fetcher, - &cached_fs, &deno_json_cache, &pkg_json_cache, &workspace_cache, @@ -1913,21 +1919,24 @@ impl ConfigTree { #[cfg(test)] pub async fn inject_config_file(&mut self, config_file: ConfigFile) { + use sys_traits::FsCreateDirAll; + use sys_traits::FsWrite; + let scope = config_file.specifier.join(".").unwrap(); let json_text = serde_json::to_string(&config_file.json).unwrap(); - let test_fs = deno_runtime::deno_fs::InMemoryFs::default(); + let memory_sys = sys_traits::impls::InMemorySys::default(); let config_path = url_to_file_path(&config_file.specifier).unwrap(); - test_fs.setup_text_files(vec![( - config_path.to_string_lossy().to_string(), - json_text, - )]); + memory_sys + .fs_create_dir_all(config_path.parent().unwrap()) + .unwrap(); + memory_sys.fs_write(&config_path, json_text).unwrap(); let workspace_dir = Arc::new( WorkspaceDirectory::discover( + &memory_sys, deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( &config_path, ), &deno_config::workspace::WorkspaceDiscoverOptions { - fs: &crate::args::deno_json::DenoConfigFsAdapter(&test_fs), ..Default::default() }, ) @@ -2000,11 +2009,14 @@ fn resolve_lockfile_from_path( lockfile_path: PathBuf, frozen: bool, ) -> Option { - match CliLockfile::read_from_path(CliLockfileReadFromPathOptions { - file_path: lockfile_path, - frozen, - skip_write: false, - }) { + match CliLockfile::read_from_path( + &CliSys::default(), + CliLockfileReadFromPathOptions { + file_path: lockfile_path, + frozen, + skip_write: false, + }, + ) { Ok(value) => { if value.filename.exists() { if let Ok(specifier) = ModuleSpecifier::from_file_path(&value.filename) @@ -2061,78 +2073,6 @@ impl deno_config::workspace::WorkspaceCache for WorkspaceMemCache { } } -#[derive(Default)] -struct CachedFsItems { - items: HashMap>, -} - -impl CachedFsItems { - pub fn get( - &mut self, - path: &Path, - action: impl FnOnce(&Path) -> Result, - ) -> Result { - let value = if let Some(value) = self.items.get(path) { - value - } else { - let value = action(path); - // just in case this gets really large for some reason - if self.items.len() == 16_384 { - return value; - } - self.items.insert(path.to_owned(), value); - self.items.get(path).unwrap() - }; - value - .as_ref() - .map(|v| (*v).clone()) - .map_err(|e| std::io::Error::new(e.kind(), e.to_string())) - } -} - -#[derive(Default)] -struct InnerData { - stat_calls: CachedFsItems, - read_to_string_calls: CachedFsItems>, -} - -#[derive(Default)] -struct CachedDenoConfigFs(Mutex); - -impl DenoConfigFs for CachedDenoConfigFs { - fn stat_sync( - &self, - path: &Path, - ) -> Result { - self - .0 - .lock() - .stat_calls - .get(path, |path| RealDenoConfigFs.stat_sync(path)) - } - - fn read_to_string_lossy( - &self, - path: &Path, - ) -> Result, std::io::Error> { - self - .0 - .lock() - .read_to_string_calls - .get(path, |path| RealDenoConfigFs.read_to_string_lossy(path)) - } - - fn read_dir( - &self, - path: &Path, - ) -> Result, std::io::Error> { - // no need to cache these because the workspace cache will ensure - // we only do read_dir calls once (read_dirs are only used for - // npm workspace resolution) - RealDenoConfigFs.read_dir(path) - } -} - #[cfg(test)] mod tests { use deno_config::deno_json::ConfigParseOptions; @@ -2292,6 +2232,7 @@ mod tests { enabled: true, }, }, + suggestion_actions: SuggestionActionsSettings { enabled: true }, update_imports_on_file_move: UpdateImportsOnFileMoveOptions { enabled: UpdateImportsOnFileMoveEnabled::Prompt } @@ -2338,6 +2279,7 @@ mod tests { enabled: true, }, }, + suggestion_actions: SuggestionActionsSettings { enabled: true }, update_imports_on_file_move: UpdateImportsOnFileMoveOptions { enabled: UpdateImportsOnFileMoveEnabled::Prompt } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 01fc3bf69e..af6fdf53a4 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -21,9 +21,11 @@ use crate::graph_util::enhanced_resolution_error_message; use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; +use crate::sys::CliSys; use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinterOptions; use crate::tools::lint::LintRuleProvider; +use crate::tsc::DiagnosticCategory; use crate::util::path::to_percent_decoded_str; use deno_ast::MediaType; @@ -44,9 +46,9 @@ use deno_graph::source::ResolveError; use deno_graph::Resolution; use deno_graph::ResolutionError; use deno_graph::SpecifierError; +use deno_lint::linter::LintConfig as DenoLintConfig; use deno_resolver::sloppy_imports::SloppyImportsResolution; use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; -use deno_runtime::deno_fs; use deno_runtime::deno_node; use deno_runtime::tokio_util::create_basic_runtime; use deno_semver::jsr::JsrPackageReqReference; @@ -833,7 +835,7 @@ fn generate_lint_diagnostics( lint_rule_provider.resolve_lint_rules(Default::default(), None) }, fix: false, - deno_lint_config: deno_lint::linter::LintConfig { + deno_lint_config: DenoLintConfig { default_jsx_factory: None, default_jsx_fragment_factory: None, }, @@ -906,8 +908,22 @@ async fn generate_ts_diagnostics( } else { Default::default() }; - for (specifier_str, ts_json_diagnostics) in ts_diagnostics_map { + for (specifier_str, mut ts_json_diagnostics) in ts_diagnostics_map { let specifier = resolve_url(&specifier_str)?; + let suggestion_actions_settings = snapshot + .config + .language_settings_for_specifier(&specifier) + .map(|s| s.suggestion_actions.clone()) + .unwrap_or_default(); + if !suggestion_actions_settings.enabled { + ts_json_diagnostics.retain(|d| { + d.category != DiagnosticCategory::Suggestion + // Still show deprecated and unused diagnostics. + // https://github.com/microsoft/vscode/blob/ce50bd4876af457f64d83cfd956bc916535285f4/extensions/typescript-language-features/src/languageFeatures/diagnostics.ts#L113-L114 + || d.reports_deprecated == Some(true) + || d.reports_unnecessary == Some(true) + }); + } let version = snapshot .documents .get(&specifier) @@ -1265,7 +1281,7 @@ impl DenoDiagnostic { Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("npm package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))), Self::NoLocal(specifier) => { let maybe_sloppy_resolution = CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs)) + SloppyImportsCachedFs::new(CliSys::default()) ).resolve(specifier, SloppyImportsResolutionKind::Execution); let data = maybe_sloppy_resolution.as_ref().map(|res| { json!({ diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index bdb64c9da3..d15cfe5a6c 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -251,6 +251,13 @@ impl AssetOrDocument { pub fn document_lsp_version(&self) -> Option { self.document().and_then(|d| d.maybe_lsp_version()) } + + pub fn resolution_mode(&self) -> ResolutionMode { + match self { + AssetOrDocument::Asset(_) => ResolutionMode::Import, + AssetOrDocument::Document(d) => d.resolution_mode(), + } + } } type ModuleResult = Result; diff --git a/cli/lsp/jsr.rs b/cli/lsp/jsr.rs index ab570f6348..fc30de2ae0 100644 --- a/cli/lsp/jsr.rs +++ b/cli/lsp/jsr.rs @@ -2,7 +2,8 @@ use crate::args::jsr_api_url; use crate::args::jsr_url; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; +use crate::file_fetcher::TextDecodedFile; use crate::jsr::partial_jsr_package_version_info_from_slice; use crate::jsr::JsrFetchResolver; use dashmap::DashMap; @@ -17,6 +18,7 @@ use deno_graph::ModuleSpecifier; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::Version; use serde::Deserialize; use std::collections::HashMap; @@ -32,8 +34,8 @@ pub struct JsrCacheResolver { /// The `module_graph` fields of the version infos should be forcibly absent. /// It can be large and we don't want to store it. info_by_nv: DashMap>>, - info_by_name: DashMap>>, - workspace_scope_by_name: HashMap, + info_by_name: DashMap>>, + workspace_scope_by_name: HashMap, cache: Arc, } @@ -58,7 +60,7 @@ impl JsrCacheResolver { continue; }; let nv = PackageNv { - name: jsr_pkg_config.name.clone(), + name: jsr_pkg_config.name.as_str().into(), version: version.clone(), }; info_by_name.insert( @@ -124,8 +126,8 @@ impl JsrCacheResolver { return nv.value().clone(); } let maybe_get_nv = || { - let name = req.name.clone(); - let package_info = self.package_info(&name)?; + let name = &req.name; + let package_info = self.package_info(name)?; // Find the first matching version of the package which is cached. let mut versions = package_info.versions.keys().collect::>(); versions.sort(); @@ -143,7 +145,10 @@ impl JsrCacheResolver { self.package_version_info(&nv).is_some() }) .cloned()?; - Some(PackageNv { name, version }) + Some(PackageNv { + name: name.clone(), + version, + }) }; let nv = maybe_get_nv(); self.nv_by_req.insert(req.clone(), nv.clone()); @@ -215,7 +220,10 @@ impl JsrCacheResolver { None } - pub fn package_info(&self, name: &str) -> Option> { + pub fn package_info( + &self, + name: &StackString, + ) -> Option> { if let Some(info) = self.info_by_name.get(name) { return info.value().clone(); } @@ -225,7 +233,7 @@ impl JsrCacheResolver { serde_json::from_slice::(&meta_bytes).ok() }; let info = read_cached_package_info().map(Arc::new); - self.info_by_name.insert(name.to_string(), info.clone()); + self.info_by_name.insert(name.clone(), info.clone()); info } @@ -267,7 +275,7 @@ fn read_cached_url( #[derive(Debug)] pub struct CliJsrSearchApi { - file_fetcher: Arc, + file_fetcher: Arc, resolver: JsrFetchResolver, search_cache: DashMap>>, versions_cache: DashMap>>, @@ -275,7 +283,7 @@ pub struct CliJsrSearchApi { } impl CliJsrSearchApi { - pub fn new(file_fetcher: Arc) -> Self { + pub fn new(file_fetcher: Arc) -> Self { let resolver = JsrFetchResolver::new(file_fetcher.clone()); Self { file_fetcher, @@ -309,10 +317,8 @@ impl PackageSearchApi for CliJsrSearchApi { let file_fetcher = self.file_fetcher.clone(); // spawn due to the lsp's `Send` requirement let file = deno_core::unsync::spawn(async move { - file_fetcher - .fetch_bypass_permissions(&search_url) - .await? - .into_text_decoded() + let file = file_fetcher.fetch_bypass_permissions(&search_url).await?; + TextDecodedFile::decode(file) }) .await??; let names = Arc::new(parse_jsr_search_response(&file.source)?); diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 839d28469e..9ab1d9786c 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -1,6 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use deno_ast::MediaType; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDiscoverOptions; use deno_core::anyhow::anyhow; @@ -95,19 +96,19 @@ use crate::args::create_default_npmrc; use crate::args::get_root_cert_store; use crate::args::has_flag_env_var; use crate::args::CaData; -use crate::args::CacheSetting; use crate::args::CliOptions; use crate::args::Flags; use crate::args::InternalFlags; use crate::args::UnstableFmtOptions; use crate::factory::CliFactory; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::graph_util; use crate::http_util::HttpClientProvider; use crate::lsp::config::ConfigWatchedFileType; use crate::lsp::logging::init_log_file; use crate::lsp::tsc::file_text_changes_to_workspace_edit; use crate::lsp::urls::LspUrlKind; +use crate::sys::CliSys; use crate::tools::fmt::format_file; use crate::tools::fmt::format_parsed_source; use crate::tools::upgrade::check_for_upgrades_for_lsp; @@ -279,7 +280,7 @@ impl LanguageServer { .await?; graph_util::graph_valid( &graph, - factory.fs(), + &CliSys::default(), &roots, graph_util::GraphValidOptions { kind: GraphKind::All, @@ -958,15 +959,16 @@ impl Inner { } async fn refresh_config_tree(&mut self) { - let mut file_fetcher = FileFetcher::new( + let file_fetcher = CliFileFetcher::new( self.cache.global().clone(), - CacheSetting::RespectHeaders, - true, self.http_client_provider.clone(), + CliSys::default(), Default::default(), None, + true, + CacheSetting::RespectHeaders, + super::logging::lsp_log_level(), ); - file_fetcher.set_download_log_level(super::logging::lsp_log_level()); let file_fetcher = Arc::new(file_fetcher); self .config @@ -1855,20 +1857,12 @@ impl Inner { } let changes = if code_action_data.fix_id == "fixMissingImport" { - fix_ts_import_changes( - &code_action_data.specifier, - maybe_asset_or_doc - .as_ref() - .and_then(|d| d.document()) - .map(|d| d.resolution_mode()) - .unwrap_or(ResolutionMode::Import), - &combined_code_actions.changes, - self, - ) - .map_err(|err| { - error!("Unable to remap changes: {:#}", err); - LspError::internal_error() - })? + fix_ts_import_changes(&combined_code_actions.changes, self).map_err( + |err| { + error!("Unable to remap changes: {:#}", err); + LspError::internal_error() + }, + )? } else { combined_code_actions.changes }; @@ -1912,20 +1906,16 @@ impl Inner { asset_or_doc.scope().cloned(), ) .await?; - if kind_suffix == ".rewrite.function.returnType" { - refactor_edit_info.edits = fix_ts_import_changes( - &action_data.specifier, - asset_or_doc - .document() - .map(|d| d.resolution_mode()) - .unwrap_or(ResolutionMode::Import), - &refactor_edit_info.edits, - self, - ) - .map_err(|err| { - error!("Unable to remap changes: {:#}", err); - LspError::internal_error() - })? + if kind_suffix == ".rewrite.function.returnType" + || kind_suffix == ".move.newFile" + { + refactor_edit_info.edits = + fix_ts_import_changes(&refactor_edit_info.edits, self).map_err( + |err| { + error!("Unable to remap changes: {:#}", err); + LspError::internal_error() + }, + )? } code_action.edit = refactor_edit_info.to_workspace_edit(self)?; code_action @@ -3624,11 +3614,11 @@ impl Inner { let workspace = match config_data { Some(d) => d.member_dir.clone(), None => Arc::new(WorkspaceDirectory::discover( + &CliSys::default(), deno_config::workspace::WorkspaceDiscoverStart::Paths(&[ initial_cwd.clone() ]), &WorkspaceDiscoverOptions { - fs: Default::default(), // use real fs, deno_json_cache: None, pkg_json_cache: None, workspace_cache: None, @@ -3645,6 +3635,7 @@ impl Inner { )?), }; let cli_options = CliOptions::new( + &CliSys::default(), Arc::new(Flags { internal: InternalFlags { cache_path: Some(self.cache.deno_dir().root.clone()), @@ -3793,7 +3784,7 @@ impl Inner { for (name, command) in scripts { result.push(TaskDefinition { name: name.clone(), - command: command.clone(), + command: Some(command.clone()), source_uri: url_to_uri(&package_json.specifier()) .map_err(|_| LspError::internal_error())?, }); diff --git a/cli/lsp/lsp_custom.rs b/cli/lsp/lsp_custom.rs index 74c6aca88b..8df4ba1d07 100644 --- a/cli/lsp/lsp_custom.rs +++ b/cli/lsp/lsp_custom.rs @@ -14,7 +14,7 @@ pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str = #[serde(rename_all = "camelCase")] pub struct TaskDefinition { pub name: String, - pub command: String, + pub command: Option, pub source_uri: lsp::Uri, } diff --git a/cli/lsp/npm.rs b/cli/lsp/npm.rs index 2decfc3429..18c7e2fccf 100644 --- a/cli/lsp/npm.rs +++ b/cli/lsp/npm.rs @@ -11,21 +11,22 @@ use serde::Deserialize; use std::sync::Arc; use crate::args::npm_registry_url; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; +use crate::file_fetcher::TextDecodedFile; use crate::npm::NpmFetchResolver; use super::search::PackageSearchApi; #[derive(Debug)] pub struct CliNpmSearchApi { - file_fetcher: Arc, + file_fetcher: Arc, resolver: NpmFetchResolver, search_cache: DashMap>>, versions_cache: DashMap>>, } impl CliNpmSearchApi { - pub fn new(file_fetcher: Arc) -> Self { + pub fn new(file_fetcher: Arc) -> Self { let resolver = NpmFetchResolver::new( file_fetcher.clone(), Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()), @@ -57,10 +58,8 @@ impl PackageSearchApi for CliNpmSearchApi { .append_pair("text", &format!("{} boost-exact:false", query)); let file_fetcher = self.file_fetcher.clone(); let file = deno_core::unsync::spawn(async move { - file_fetcher - .fetch_bypass_permissions(&search_url) - .await? - .into_text_decoded() + let file = file_fetcher.fetch_bypass_permissions(&search_url).await?; + TextDecodedFile::decode(file) }) .await??; let names = Arc::new(parse_npm_search_response(&file.source)?); diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index ade353e683..c8dd7fa1a7 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -12,14 +12,16 @@ use super::path_to_regex::StringOrNumber; use super::path_to_regex::StringOrVec; use super::path_to_regex::Token; -use crate::args::CacheSetting; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; +use crate::file_fetcher::CliFileFetcher; use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchPermissionsOptionRef; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::TextDecodedFile; use crate::http_util::HttpClientProvider; +use crate::sys::CliSys; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::serde::Deserialize; @@ -418,7 +420,7 @@ enum VariableItems { pub struct ModuleRegistry { origins: HashMap>, pub location: PathBuf, - pub file_fetcher: Arc, + pub file_fetcher: Arc, http_cache: Arc, } @@ -428,19 +430,18 @@ impl ModuleRegistry { http_client_provider: Arc, ) -> Self { // the http cache should always be the global one for registry completions - let http_cache = Arc::new(GlobalHttpCache::new( - location.clone(), - crate::cache::RealDenoCacheEnv, - )); - let mut file_fetcher = FileFetcher::new( + let http_cache = + Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone())); + let file_fetcher = CliFileFetcher::new( http_cache.clone(), - CacheSetting::RespectHeaders, - true, http_client_provider, + CliSys::default(), Default::default(), None, + true, + CacheSetting::RespectHeaders, + super::logging::lsp_log_level(), ); - file_fetcher.set_download_log_level(super::logging::lsp_log_level()); Self { origins: HashMap::new(), @@ -479,13 +480,15 @@ impl ModuleRegistry { let specifier = specifier.clone(); async move { file_fetcher - .fetch_with_options(FetchOptions { - specifier: &specifier, - permissions: FetchPermissionsOptionRef::AllowAll, - maybe_auth: None, - maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), - maybe_cache_setting: None, - }) + .fetch_with_options( + &specifier, +FetchPermissionsOptionRef::AllowAll, + FetchOptions { + maybe_auth: None, + maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), + maybe_cache_setting: None, + } + ) .await } }).await?; @@ -500,7 +503,7 @@ impl ModuleRegistry { ); self.http_cache.set(specifier, headers_map, &[])?; } - let file = fetch_result?.into_text_decoded()?; + let file = TextDecodedFile::decode(fetch_result?)?; let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?; validate_config(&config)?; Ok(config.registries) @@ -584,12 +587,11 @@ impl ModuleRegistry { // spawn due to the lsp's `Send` requirement let file = deno_core::unsync::spawn({ async move { - file_fetcher + let file = file_fetcher .fetch_bypass_permissions(&endpoint) .await - .ok()? - .into_text_decoded() - .ok() + .ok()?; + TextDecodedFile::decode(file).ok() } }) .await @@ -983,12 +985,11 @@ impl ModuleRegistry { let file_fetcher = self.file_fetcher.clone(); // spawn due to the lsp's `Send` requirement let file = deno_core::unsync::spawn(async move { - file_fetcher + let file = file_fetcher .fetch_bypass_permissions(&specifier) .await - .ok()? - .into_text_decoded() - .ok() + .ok()?; + TextDecodedFile::decode(file).ok() }) .await .ok()??; @@ -1049,7 +1050,7 @@ impl ModuleRegistry { let file_fetcher = self.file_fetcher.clone(); let specifier = specifier.clone(); async move { - file_fetcher + let file = file_fetcher .fetch_bypass_permissions(&specifier) .await .map_err(|err| { @@ -1058,9 +1059,8 @@ impl ModuleRegistry { specifier, err ); }) - .ok()? - .into_text_decoded() - .ok() + .ok()?; + TextDecodedFile::decode(file).ok() } }) .await @@ -1095,7 +1095,7 @@ impl ModuleRegistry { let file_fetcher = self.file_fetcher.clone(); let specifier = specifier.clone(); async move { - file_fetcher + let file = file_fetcher .fetch_bypass_permissions(&specifier) .await .map_err(|err| { @@ -1104,9 +1104,8 @@ impl ModuleRegistry { specifier, err ); }) - .ok()? - .into_text_decoded() - .ok() + .ok()?; + TextDecodedFile::decode(file).ok() } }) .await diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 28c7b04fc9..2dec5266f4 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -2,6 +2,7 @@ use dashmap::DashMap; use deno_ast::MediaType; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::HttpCache; use deno_config::deno_json::JsxImportSourceConfig; @@ -18,9 +19,7 @@ use deno_resolver::cjs::IsCjsResolutionMode; use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::DenoResolverOptions; use deno_resolver::NodeAndNpmReqResolver; -use deno_runtime::deno_fs; -use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::PackageJsonResolver; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; @@ -39,10 +38,8 @@ use std::sync::Arc; use super::cache::LspCache; use super::jsr::JsrCacheResolver; use crate::args::create_default_npmrc; -use crate::args::CacheSetting; use crate::args::CliLockfile; use crate::args::NpmInstallDepsProvider; -use crate::cache::DenoCacheEnvFsAdapter; use crate::factory::Deferred; use crate::graph_util::to_node_resolution_kind; use crate::graph_util::to_node_resolution_mode; @@ -51,6 +48,8 @@ use crate::http_util::HttpClientProvider; use crate::lsp::config::Config; use crate::lsp::config::ConfigData; use crate::lsp::logging::lsp_warn; +use crate::node::CliNodeResolver; +use crate::node::CliPackageJsonResolver; use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; @@ -61,12 +60,12 @@ use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::npm::ManagedCliNpmResolver; use crate::resolver::CliDenoResolver; -use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; use crate::resolver::CliResolverOptions; use crate::resolver::IsCjsResolver; use crate::resolver::WorkerCliNpmGraphResolver; +use crate::sys::CliSys; use crate::tsc::into_specifier_and_media_type; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; @@ -78,9 +77,9 @@ struct LspScopeResolver { is_cjs_resolver: Arc, jsr_resolver: Option>, npm_resolver: Option>, - node_resolver: Option>, + node_resolver: Option>, npm_pkg_req_resolver: Option>, - pkg_json_resolver: Arc, + pkg_json_resolver: Arc, redirect_resolver: Option>, graph_imports: Arc>, dep_info: Arc>>, @@ -384,7 +383,7 @@ impl LspResolver { pub fn pkg_json_resolver( &self, referrer: &ModuleSpecifier, - ) -> &Arc { + ) -> &Arc { let resolver = self.get_scope_resolver(Some(referrer)); &resolver.pkg_json_resolver } @@ -592,28 +591,26 @@ struct ResolverFactoryServices { cli_resolver: Deferred>, in_npm_pkg_checker: Deferred>, is_cjs_resolver: Deferred>, - node_resolver: Deferred>>, + node_resolver: Deferred>>, npm_pkg_req_resolver: Deferred>>, npm_resolver: Option>, } struct ResolverFactory<'a> { config_data: Option<&'a Arc>, - fs: Arc, - pkg_json_resolver: Arc, + pkg_json_resolver: Arc, + sys: CliSys, services: ResolverFactoryServices, } impl<'a> ResolverFactory<'a> { pub fn new(config_data: Option<&'a Arc>) -> Self { - let fs = Arc::new(deno_fs::RealFs); - let pkg_json_resolver = Arc::new(PackageJsonResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), - )); + let sys = CliSys::default(); + let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(sys.clone())); Self { config_data, - fs, pkg_json_resolver, + sys, services: Default::default(), } } @@ -624,9 +621,10 @@ impl<'a> ResolverFactory<'a> { cache: &LspCache, ) { let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false); + let sys = CliSys::default(); let options = if enable_byonm { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), + sys, pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.config_data.and_then(|config_data| { config_data.node_modules_dir.clone().or_else(|| { @@ -642,12 +640,14 @@ impl<'a> ResolverFactory<'a> { .and_then(|d| d.npmrc.clone()) .unwrap_or_else(create_default_npmrc); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(self.fs.as_ref()), + &sys, cache.deno_dir().npm_folder_path(), npmrc.get_all_known_registries_urls(), )); CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions { http_client_provider: http_client_provider.clone(), + // only used for top level install, so we can ignore this + npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) { Some(lockfile) => { CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( @@ -656,10 +656,7 @@ impl<'a> ResolverFactory<'a> { } None => CliNpmResolverManagedSnapshotOption::Specified(None), }, - // Don't provide the lockfile. We don't want these resolvers - // updating it. Only the cache request should update the lockfile. - maybe_lockfile: None, - fs: Arc::new(deno_fs::RealFs), + sys: CliSys::default(), npm_cache_dir, // Use an "only" cache setting in order to make the // user do an explicit "cache" command and prevent @@ -667,11 +664,12 @@ impl<'a> ResolverFactory<'a> { // the user is typing. cache_setting: CacheSetting::Only, text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly), + // Don't provide the lockfile. We don't want these resolvers + // updating it. Only the cache request should update the lockfile. + maybe_lockfile: None, maybe_node_modules_path: self .config_data .and_then(|d| d.node_modules_dir.clone()), - // only used for top level install, so we can ignore this - npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), npmrc, npm_system_info: NpmSystemInfo::default(), lifecycle_scripts: Default::default(), @@ -731,7 +729,7 @@ impl<'a> ResolverFactory<'a> { }) } - pub fn pkg_json_resolver(&self) -> &Arc { + pub fn pkg_json_resolver(&self) -> &Arc { &self.pkg_json_resolver } @@ -772,17 +770,18 @@ impl<'a> ResolverFactory<'a> { }) } - pub fn node_resolver(&self) -> Option<&Arc> { + pub fn node_resolver(&self) -> Option<&Arc> { self .services .node_resolver .get_or_init(|| { let npm_resolver = self.services.npm_resolver.as_ref()?; - Some(Arc::new(NodeResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(self.fs.clone()), + Some(Arc::new(CliNodeResolver::new( self.in_npm_pkg_checker().clone(), + RealIsBuiltInNodeModuleChecker, npm_resolver.clone().into_npm_pkg_folder_resolver(), self.pkg_json_resolver.clone(), + self.sys.clone(), ))) }) .as_ref() @@ -797,10 +796,10 @@ impl<'a> ResolverFactory<'a> { let npm_resolver = self.npm_resolver()?; Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), - fs: CliDenoResolverFs(self.fs.clone()), in_npm_pkg_checker: self.in_npm_pkg_checker().clone(), node_resolver: node_resolver.clone(), npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), + sys: self.sys.clone(), }))) }) .as_ref() diff --git a/cli/lsp/search.rs b/cli/lsp/search.rs index 8933eeb186..c98acde6f1 100644 --- a/cli/lsp/search.rs +++ b/cli/lsp/search.rs @@ -67,7 +67,9 @@ pub mod tests { &self, nv: &PackageNv, ) -> Result>, AnyError> { - let Some(exports_by_version) = self.package_versions.get(&nv.name) else { + let Some(exports_by_version) = + self.package_versions.get(nv.name.as_str()) + else { return Err(anyhow!("Package not found.")); }; let Some(exports) = exports_by_version.get(&nv.version) else { diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 957c3a6859..f8b972511f 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -64,6 +64,7 @@ use deno_core::OpState; use deno_core::PollEventLoopOptions; use deno_core::RuntimeOptions; use deno_path_util::url_to_file_path; +use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES; use deno_runtime::inspector_server::InspectorServer; use deno_runtime::tokio_util::create_basic_runtime; use indexmap::IndexMap; @@ -3411,10 +3412,18 @@ fn parse_code_actions( additional_text_edits.extend(change.text_changes.iter().map(|tc| { let mut text_edit = tc.as_text_edit(asset_or_doc.line_index()); if let Some(specifier_rewrite) = &data.specifier_rewrite { - text_edit.new_text = text_edit.new_text.replace( - &specifier_rewrite.old_specifier, - &specifier_rewrite.new_specifier, - ); + let specifier_index = text_edit + .new_text + .char_indices() + .find_map(|(b, c)| (c == '\'' || c == '"').then_some(b)); + if let Some(i) = specifier_index { + let mut specifier_part = text_edit.new_text.split_off(i); + specifier_part = specifier_part.replace( + &specifier_rewrite.old_specifier, + &specifier_rewrite.new_specifier, + ); + text_edit.new_text.push_str(&specifier_part); + } if let Some(deno_types_specifier) = &specifier_rewrite.new_deno_types_specifier { @@ -3587,10 +3596,17 @@ impl CompletionEntryDetails { &mut insert_replace_edit.new_text } }; - *new_text = new_text.replace( - &specifier_rewrite.old_specifier, - &specifier_rewrite.new_specifier, - ); + let specifier_index = new_text + .char_indices() + .find_map(|(b, c)| (c == '\'' || c == '"').then_some(b)); + if let Some(i) = specifier_index { + let mut specifier_part = new_text.split_off(i); + specifier_part = specifier_part.replace( + &specifier_rewrite.old_specifier, + &specifier_rewrite.new_specifier, + ); + new_text.push_str(&specifier_part); + } if let Some(deno_types_specifier) = &specifier_rewrite.new_deno_types_specifier { @@ -3729,7 +3745,7 @@ pub struct CompletionItemData { #[serde(rename_all = "camelCase")] struct CompletionEntryDataAutoImport { module_specifier: String, - file_name: String, + file_name: Option, } #[derive(Debug)] @@ -3786,9 +3802,20 @@ impl CompletionEntry { else { return; }; - if let Ok(normalized) = specifier_map.normalize(&raw.file_name) { - self.auto_import_data = - Some(CompletionNormalizedAutoImportData { raw, normalized }); + if let Some(file_name) = &raw.file_name { + if let Ok(normalized) = specifier_map.normalize(file_name) { + self.auto_import_data = + Some(CompletionNormalizedAutoImportData { raw, normalized }); + } + } else if SUPPORTED_BUILTIN_NODE_MODULES + .contains(&raw.module_specifier.as_str()) + { + if let Ok(normalized) = + resolve_url(&format!("node:{}", &raw.module_specifier)) + { + self.auto_import_data = + Some(CompletionNormalizedAutoImportData { raw, normalized }); + } } } @@ -5516,7 +5543,6 @@ impl TscRequest { mod tests { use super::*; use crate::cache::HttpCache; - use crate::http_util::HeadersMap; use crate::lsp::cache::LspCache; use crate::lsp::config::Config; use crate::lsp::config::WorkspaceSettings; @@ -5746,6 +5772,7 @@ mod tests { "sourceLine": " import { A } from \".\";", "category": 2, "code": 6133, + "reportsUnnecessary": true, }] }) ); @@ -5828,6 +5855,7 @@ mod tests { "sourceLine": " import {", "category": 2, "code": 6192, + "reportsUnnecessary": true, }, { "start": { "line": 8, @@ -5951,7 +5979,7 @@ mod tests { .global() .set( &specifier_dep, - HeadersMap::default(), + Default::default(), b"export const b = \"b\";\n", ) .unwrap(); @@ -5990,7 +6018,7 @@ mod tests { .global() .set( &specifier_dep, - HeadersMap::default(), + Default::default(), b"export const b = \"b\";\n\nexport const a = \"b\";\n", ) .unwrap(); diff --git a/cli/main.rs b/cli/main.rs index 0594739fd8..c3c7286e71 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -1,7 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. mod args; -mod auth_tokens; mod cache; mod cdp; mod emit; @@ -21,6 +20,7 @@ mod ops; mod resolver; mod shared; mod standalone; +mod sys; mod task_runner; mod tools; mod tsc; diff --git a/cli/mainrt.rs b/cli/mainrt.rs index 18142bd0e7..2b767ea89c 100644 --- a/cli/mainrt.rs +++ b/cli/mainrt.rs @@ -8,7 +8,6 @@ mod standalone; mod args; -mod auth_tokens; mod cache; mod emit; mod errors; @@ -19,6 +18,7 @@ mod node; mod npm; mod resolver; mod shared; +mod sys; mod task_runner; mod util; mod version; @@ -32,11 +32,13 @@ use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; use deno_terminal::colors; use indexmap::IndexMap; +use standalone::DenoCompileFileSystem; use std::borrow::Cow; use std::collections::HashMap; use std::env; use std::env::current_exe; +use std::sync::Arc; use crate::args::Flags; @@ -93,7 +95,9 @@ fn main() { Some(data.metadata.otel_config.clone()), ); load_env_vars(&data.metadata.env_vars_from_env_file); - let exit_code = standalone::run(data).await?; + let fs = DenoCompileFileSystem::new(data.vfs.clone()); + let sys = crate::sys::CliSys::DenoCompile(fs.clone()); + let exit_code = standalone::run(Arc::new(fs), sys, data).await?; deno_runtime::exit(exit_code); } Ok(None) => Ok(()), diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 5e4ff875dc..ea40dbe609 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -11,36 +11,8 @@ use std::sync::atomic::AtomicU16; use std::sync::atomic::Ordering; use std::sync::Arc; -use crate::args::jsr_url; -use crate::args::CliLockfile; -use crate::args::CliOptions; -use crate::args::DenoSubcommand; -use crate::args::TsTypeLib; -use crate::cache::CodeCache; -use crate::cache::FastInsecureHasher; -use crate::cache::ParsedSourceCache; -use crate::emit::Emitter; -use crate::graph_container::MainModuleGraphContainer; -use crate::graph_container::ModuleGraphContainer; -use crate::graph_container::ModuleGraphUpdatePermit; -use crate::graph_util::CreateGraphOptions; -use crate::graph_util::ModuleGraphBuilder; -use crate::node; -use crate::node::CliNodeCodeTranslator; -use crate::npm::CliNpmResolver; -use crate::resolver::CjsTracker; -use crate::resolver::CliNpmReqResolver; -use crate::resolver::CliResolver; -use crate::resolver::ModuleCodeStringSource; -use crate::resolver::NotSupportedKindInNpmError; -use crate::resolver::NpmModuleLoader; -use crate::tools::check; -use crate::tools::check::TypeChecker; -use crate::util::progress_bar::ProgressBar; -use crate::util::text_encoding::code_without_source_map; -use crate::util::text_encoding::source_map_from_code; -use crate::worker::CreateModuleLoaderResult; -use crate::worker::ModuleLoaderFactory; +use crate::node::CliNodeResolver; +use crate::sys::CliSys; use deno_ast::MediaType; use deno_ast::ModuleKind; use deno_core::anyhow::anyhow; @@ -69,16 +41,45 @@ use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_graph::WasmModule; use deno_runtime::code_cache; -use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::NodeRequireLoader; -use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::ClosestPkgJsonError; use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; +use sys_traits::FsRead; + +use crate::args::jsr_url; +use crate::args::CliLockfile; +use crate::args::CliOptions; +use crate::args::DenoSubcommand; +use crate::args::TsTypeLib; +use crate::cache::CodeCache; +use crate::cache::FastInsecureHasher; +use crate::cache::ParsedSourceCache; +use crate::emit::Emitter; +use crate::graph_container::MainModuleGraphContainer; +use crate::graph_container::ModuleGraphContainer; +use crate::graph_container::ModuleGraphUpdatePermit; +use crate::graph_util::CreateGraphOptions; +use crate::graph_util::ModuleGraphBuilder; +use crate::node::CliNodeCodeTranslator; +use crate::npm::CliNpmResolver; +use crate::resolver::CjsTracker; +use crate::resolver::CliNpmReqResolver; +use crate::resolver::CliResolver; +use crate::resolver::ModuleCodeStringSource; +use crate::resolver::NotSupportedKindInNpmError; +use crate::resolver::NpmModuleLoader; +use crate::tools::check; +use crate::tools::check::TypeChecker; +use crate::util::progress_bar::ProgressBar; +use crate::util::text_encoding::code_without_source_map; +use crate::util::text_encoding::source_map_from_code; +use crate::worker::CreateModuleLoaderResult; +use crate::worker::ModuleLoaderFactory; pub struct ModuleLoadPreparer { options: Arc, @@ -215,17 +216,17 @@ struct SharedCliModuleLoaderState { cjs_tracker: Arc, code_cache: Option>, emitter: Arc, - fs: Arc, in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, node_code_translator: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_req_resolver: Arc, npm_resolver: Arc, npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc, resolver: Arc, + sys: CliSys, in_flight_loads_tracker: InFlightModuleLoadsTracker, } @@ -275,17 +276,17 @@ impl CliModuleLoaderFactory { cjs_tracker: Arc, code_cache: Option>, emitter: Arc, - fs: Arc, in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, node_code_translator: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_req_resolver: Arc, npm_resolver: Arc, npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc, resolver: Arc, + sys: CliSys, ) -> Self { Self { shared: Arc::new(SharedCliModuleLoaderState { @@ -301,7 +302,6 @@ impl CliModuleLoaderFactory { cjs_tracker, code_cache, emitter, - fs, in_npm_pkg_checker, main_module_graph_container, module_load_preparer, @@ -312,6 +312,7 @@ impl CliModuleLoaderFactory { npm_module_loader, parsed_source_cache, resolver, + sys, in_flight_loads_tracker: InFlightModuleLoadsTracker { loads_number: Arc::new(AtomicU16::new(0)), cleanup_task_timeout: 10_000, @@ -344,7 +345,7 @@ impl CliModuleLoaderFactory { let node_require_loader = Rc::new(CliNodeRequireLoader { cjs_tracker: self.shared.cjs_tracker.clone(), emitter: self.shared.emitter.clone(), - fs: self.shared.fs.clone(), + sys: self.shared.sys.clone(), graph_container, in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(), npm_resolver: self.shared.npm_resolver.clone(), @@ -593,9 +594,9 @@ impl Some(Module::Json(module)) => module.specifier.clone(), Some(Module::Wasm(module)) => module.specifier.clone(), Some(Module::External(module)) => { - node::resolve_specifier_into_node_modules( + node_resolver::resolve_specifier_into_node_modules( + &self.shared.sys, &module.specifier, - self.shared.fs.as_ref(), ) } None => specifier.into_owned(), @@ -996,7 +997,7 @@ impl ModuleLoader std::future::ready(()).boxed_local() } - fn get_source_map(&self, file_name: &str) -> Option> { + fn get_source_map(&self, file_name: &str) -> Option> { let specifier = resolve_url(file_name).ok()?; match specifier.scheme() { // we should only be looking for emits for schemes that denote external @@ -1008,7 +1009,7 @@ impl ModuleLoader .0 .load_prepared_module_for_source_map_sync(&specifier) .ok()??; - source_map_from_code(source.code.as_bytes()) + source_map_from_code(source.code.as_bytes()).map(Cow::Owned) } fn get_source_mapped_source_line( @@ -1091,7 +1092,7 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { struct CliNodeRequireLoader { cjs_tracker: Arc, emitter: Arc, - fs: Arc, + sys: CliSys, graph_container: TGraphContainer, in_npm_pkg_checker: Arc, npm_resolver: Arc, @@ -1120,7 +1121,7 @@ impl NodeRequireLoader ) -> Result, AnyError> { // todo(dsherret): use the preloaded module from the graph if available? let media_type = MediaType::from_path(path); - let text = self.fs.read_text_file_lossy_sync(path, None)?; + let text = self.sys.fs_read_to_string_lossy(path)?; if media_type.is_emittable() { let specifier = deno_path_util::url_from_file_path(path)?; if self.in_npm_pkg_checker.in_npm_package(&specifier) { diff --git a/cli/node.rs b/cli/node.rs index 11959df6b9..4a87d26ee0 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -8,7 +8,7 @@ use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_graph::ParsedSourceStore; use deno_runtime::deno_fs; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use node_resolver::analyze::CjsAnalysisExports; use node_resolver::analyze::CjsCodeAnalyzer; @@ -20,24 +20,15 @@ use crate::cache::CacheDBHash; use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; use crate::resolver::CjsTracker; +use crate::sys::CliSys; -pub type CliNodeCodeTranslator = - NodeCodeTranslator; - -/// Resolves a specifier that is pointing into a node_modules folder. -/// -/// Note: This should be called whenever getting the specifier from -/// a Module::External(module) reference because that module might -/// not be fully resolved at the time deno_graph is analyzing it -/// because the node_modules folder might not exist at that time. -pub fn resolve_specifier_into_node_modules( - specifier: &ModuleSpecifier, - fs: &dyn deno_fs::FileSystem, -) -> ModuleSpecifier { - node_resolver::resolve_specifier_into_node_modules(specifier, &|path| { - fs.realpath_sync(path).map_err(|err| err.into_io_error()) - }) -} +pub type CliNodeCodeTranslator = NodeCodeTranslator< + CliCjsCodeAnalyzer, + RealIsBuiltInNodeModuleChecker, + CliSys, +>; +pub type CliNodeResolver = deno_runtime::deno_node::NodeResolver; +pub type CliPackageJsonResolver = node_resolver::PackageJsonResolver; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum CliCjsAnalysis { diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs index eca399251b..ca89a7399e 100644 --- a/cli/npm/byonm.rs +++ b/cli/npm/byonm.rs @@ -4,27 +4,25 @@ use std::borrow::Cow; use std::path::Path; use std::sync::Arc; +use crate::sys::CliSys; use deno_core::error::AnyError; use deno_core::serde_json; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolverCreateOptions; use deno_resolver::npm::CliNpmReqResolver; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use node_resolver::NpmPackageFolderResolver; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; -use crate::resolver::CliDenoResolverFs; use super::CliNpmResolver; use super::InnerCliNpmResolverRef; pub type CliByonmNpmResolverCreateOptions = - ByonmNpmResolverCreateOptions; -pub type CliByonmNpmResolver = - ByonmNpmResolver; + ByonmNpmResolverCreateOptions; +pub type CliByonmNpmResolver = ByonmNpmResolver; // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. #[derive(Debug)] diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index 2c6e6d318a..97a87dd9b8 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -5,6 +5,7 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use crate::sys::CliSys; use deno_ast::ModuleSpecifier; use deno_cache_dir::npm::NpmCacheDir; use deno_core::anyhow::Context; @@ -20,9 +21,10 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; +use deno_npm_cache::NpmCacheSetting; +use deno_path_util::fs::canonicalize_path_maybe_not_exists; use deno_resolver::npm::CliNpmReqResolver; use deno_runtime::colors; -use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; @@ -40,7 +42,6 @@ use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; use crate::args::PackageJsonDepValueParseWithLocationError; use crate::cache::FastInsecureHasher; -use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::progress_bar::ProgressBar; use crate::util::sync::AtomicFlag; @@ -49,7 +50,7 @@ use self::resolvers::create_npm_fs_resolver; use self::resolvers::NpmPackageFsResolver; use super::CliNpmCache; -use super::CliNpmCacheEnv; +use super::CliNpmCacheHttpClient; use super::CliNpmRegistryInfoProvider; use super::CliNpmResolver; use super::CliNpmTarballCache; @@ -67,10 +68,10 @@ pub enum CliNpmResolverManagedSnapshotOption { pub struct CliManagedNpmResolverCreateOptions { pub snapshot: CliNpmResolverManagedSnapshotOption, pub maybe_lockfile: Option>, - pub fs: Arc, pub http_client_provider: Arc, pub npm_cache_dir: Arc, - pub cache_setting: crate::args::CacheSetting, + pub sys: CliSys, + pub cache_setting: deno_cache_dir::file_fetcher::CacheSetting, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub maybe_node_modules_path: Option, pub npm_system_info: NpmSystemInfo, @@ -82,9 +83,12 @@ pub struct CliManagedNpmResolverCreateOptions { pub async fn create_managed_npm_resolver_for_lsp( options: CliManagedNpmResolverCreateOptions, ) -> Arc { - let cache_env = create_cache_env(&options); - let npm_cache = create_cache(cache_env.clone(), &options); - let npm_api = create_api(npm_cache.clone(), cache_env.clone(), &options); + let npm_cache = create_cache(&options); + let http_client = Arc::new(CliNpmCacheHttpClient::new( + options.http_client_provider.clone(), + options.text_only_progress_bar.clone(), + )); + let npm_api = create_api(npm_cache.clone(), http_client.clone(), &options); // spawn due to the lsp's `Send` requirement deno_core::unsync::spawn(async move { let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await { @@ -95,14 +99,14 @@ pub async fn create_managed_npm_resolver_for_lsp( } }; create_inner( - cache_env, - options.fs, - options.maybe_lockfile, - npm_api, + http_client, npm_cache, - options.npmrc, options.npm_install_deps_provider, + npm_api, + options.sys, options.text_only_progress_bar, + options.maybe_lockfile, + options.npmrc, options.maybe_node_modules_path, options.npm_system_info, snapshot, @@ -116,19 +120,22 @@ pub async fn create_managed_npm_resolver_for_lsp( pub async fn create_managed_npm_resolver( options: CliManagedNpmResolverCreateOptions, ) -> Result, AnyError> { - let npm_cache_env = create_cache_env(&options); - let npm_cache = create_cache(npm_cache_env.clone(), &options); - let api = create_api(npm_cache.clone(), npm_cache_env.clone(), &options); + let npm_cache = create_cache(&options); + let http_client = Arc::new(CliNpmCacheHttpClient::new( + options.http_client_provider.clone(), + options.text_only_progress_bar.clone(), + )); + let api = create_api(npm_cache.clone(), http_client.clone(), &options); let snapshot = resolve_snapshot(&api, options.snapshot).await?; Ok(create_inner( - npm_cache_env, - options.fs, - options.maybe_lockfile, - api, + http_client, npm_cache, - options.npmrc, options.npm_install_deps_provider, + api, + options.sys, options.text_only_progress_bar, + options.maybe_lockfile, + options.npmrc, options.maybe_node_modules_path, options.npm_system_info, snapshot, @@ -138,14 +145,14 @@ pub async fn create_managed_npm_resolver( #[allow(clippy::too_many_arguments)] fn create_inner( - env: Arc, - fs: Arc, - maybe_lockfile: Option>, - registry_info_provider: Arc, + http_client: Arc, npm_cache: Arc, - npm_rc: Arc, npm_install_deps_provider: Arc, + registry_info_provider: Arc, + sys: CliSys, text_only_progress_bar: crate::util::progress_bar::ProgressBar, + maybe_lockfile: Option>, + npm_rc: Arc, node_modules_dir_path: Option, npm_system_info: NpmSystemInfo, snapshot: Option, @@ -158,28 +165,29 @@ fn create_inner( )); let tarball_cache = Arc::new(CliNpmTarballCache::new( npm_cache.clone(), - env, + http_client, + sys.clone(), npm_rc.clone(), )); let fs_resolver = create_npm_fs_resolver( - fs.clone(), npm_cache.clone(), &npm_install_deps_provider, &text_only_progress_bar, resolution.clone(), + sys.clone(), tarball_cache.clone(), node_modules_dir_path, npm_system_info.clone(), lifecycle_scripts.clone(), ); Arc::new(ManagedCliNpmResolver::new( - fs, fs_resolver, maybe_lockfile, registry_info_provider, npm_cache, npm_install_deps_provider, resolution, + sys, tarball_cache, text_only_progress_bar, npm_system_info, @@ -187,36 +195,25 @@ fn create_inner( )) } -fn create_cache_env( - options: &CliManagedNpmResolverCreateOptions, -) -> Arc { - Arc::new(CliNpmCacheEnv::new( - options.fs.clone(), - options.http_client_provider.clone(), - options.text_only_progress_bar.clone(), - )) -} - fn create_cache( - env: Arc, options: &CliManagedNpmResolverCreateOptions, ) -> Arc { Arc::new(CliNpmCache::new( options.npm_cache_dir.clone(), - options.cache_setting.as_npm_cache_setting(), - env, + options.sys.clone(), + NpmCacheSetting::from_cache_setting(&options.cache_setting), options.npmrc.clone(), )) } fn create_api( cache: Arc, - env: Arc, + http_client: Arc, options: &CliManagedNpmResolverCreateOptions, ) -> Arc { Arc::new(CliNpmRegistryInfoProvider::new( cache, - env, + http_client, options.npmrc.clone(), )) } @@ -305,12 +302,12 @@ pub enum PackageCaching<'a> { /// An npm resolver where the resolution is managed by Deno rather than /// the user bringing their own node_modules (BYONM) on the file system. pub struct ManagedCliNpmResolver { - fs: Arc, fs_resolver: Arc, maybe_lockfile: Option>, registry_info_provider: Arc, npm_cache: Arc, npm_install_deps_provider: Arc, + sys: CliSys, resolution: Arc, tarball_cache: Arc, text_only_progress_bar: ProgressBar, @@ -330,20 +327,19 @@ impl std::fmt::Debug for ManagedCliNpmResolver { impl ManagedCliNpmResolver { #[allow(clippy::too_many_arguments)] pub fn new( - fs: Arc, fs_resolver: Arc, maybe_lockfile: Option>, registry_info_provider: Arc, npm_cache: Arc, npm_install_deps_provider: Arc, resolution: Arc, + sys: CliSys, tarball_cache: Arc, text_only_progress_bar: ProgressBar, npm_system_info: NpmSystemInfo, lifecycle_scripts: LifecycleScriptsConfig, ) -> Self { Self { - fs, fs_resolver, maybe_lockfile, registry_info_provider, @@ -351,6 +347,7 @@ impl ManagedCliNpmResolver { npm_install_deps_provider, text_only_progress_bar, resolution, + sys, tarball_cache, npm_system_info, top_level_install_flag: Default::default(), @@ -363,8 +360,7 @@ impl ManagedCliNpmResolver { pkg_id: &NpmPackageId, ) -> Result { let path = self.fs_resolver.package_folder(pkg_id)?; - let path = - canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())?; + let path = canonicalize_path_maybe_not_exists(&self.sys, &path)?; log::debug!( "Resolved package folder of {} to {}", pkg_id.as_serialized(), @@ -559,11 +555,11 @@ impl ManagedCliNpmResolver { &self, ) -> Result<(), Box> { for err in self.npm_install_deps_provider.pkg_json_dep_errors() { - match &err.source { - deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => { + match err.source.as_kind() { + deno_package_json::PackageJsonDepValueParseErrorKind::VersionReq(_) => { return Err(Box::new(err.clone())); } - deno_package_json::PackageJsonDepValueParseError::Unsupported { + deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported { .. } => { // only warn for this one @@ -666,12 +662,13 @@ impl NpmPackageFolderResolver for ManagedCliNpmResolver { .fs_resolver .resolve_package_folder_from_package(name, referrer)?; let path = - canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref()) - .map_err(|err| PackageFolderResolveIoError { + canonicalize_path_maybe_not_exists(&self.sys, &path).map_err(|err| { + PackageFolderResolveIoError { package_name: name.to_string(), referrer: referrer.clone(), source: err, - })?; + } + })?; log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); Ok(path) } @@ -727,13 +724,12 @@ impl CliNpmResolver for ManagedCliNpmResolver { )); Arc::new(ManagedCliNpmResolver::new( - self.fs.clone(), create_npm_fs_resolver( - self.fs.clone(), self.npm_cache.clone(), &self.npm_install_deps_provider, &self.text_only_progress_bar, npm_resolution.clone(), + self.sys.clone(), self.tarball_cache.clone(), self.root_node_modules_path().map(ToOwned::to_owned), self.npm_system_info.clone(), @@ -744,6 +740,7 @@ impl CliNpmResolver for ManagedCliNpmResolver { self.npm_cache.clone(), self.npm_install_deps_provider.clone(), npm_resolution, + self.sys.clone(), self.tarball_cache.clone(), self.text_only_progress_bar.clone(), self.npm_system_info.clone(), diff --git a/cli/npm/managed/resolution.rs b/cli/npm/managed/resolution.rs index 73c5c31caf..5d9fcf4646 100644 --- a/cli/npm/managed/resolution.rs +++ b/cli/npm/managed/resolution.rs @@ -4,6 +4,7 @@ use std::collections::HashMap; use std::collections::HashSet; use std::sync::Arc; +use capacity_builder::StringBuilder; use deno_core::error::AnyError; use deno_lockfile::NpmPackageDependencyLockfileInfo; use deno_lockfile::NpmPackageLockfileInfo; @@ -24,6 +25,7 @@ use deno_npm::NpmSystemInfo; use deno_semver::jsr::JsrDepPackageReq; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::SmallStackString; use deno_semver::VersionReq; use crate::args::CliLockfile; @@ -336,7 +338,13 @@ fn populate_lockfile_from_snapshot( let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id; lockfile.insert_package_specifier( JsrDepPackageReq::npm(package_req.clone()), - format!("{}{}", id.nv.version, id.peer_deps_serialized()), + { + StringBuilder::::build(|builder| { + builder.append(&id.nv.version); + builder.append(&id.peer_dependencies); + }) + .unwrap() + }, ); } for package in snapshot.all_packages_for_every_system() { diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 68e95fb39a..26f6d8516d 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -11,7 +11,6 @@ use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; -use super::super::PackageCaching; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::anyhow::Context; @@ -21,11 +20,13 @@ use deno_core::futures::StreamExt; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; -use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use node_resolver::errors::PackageFolderResolveError; +use sys_traits::FsCanonicalize; +use super::super::PackageCaching; use crate::npm::CliNpmTarballCache; +use crate::sys::CliSys; /// Part of the resolution that interacts with the file system. #[async_trait(?Send)] @@ -73,15 +74,15 @@ pub trait NpmPackageFsResolver: Send + Sync { #[derive(Debug)] pub struct RegistryReadPermissionChecker { - fs: Arc, + sys: CliSys, cache: Mutex>, registry_path: PathBuf, } impl RegistryReadPermissionChecker { - pub fn new(fs: Arc, registry_path: PathBuf) -> Self { + pub fn new(sys: CliSys, registry_path: PathBuf) -> Self { Self { - fs, + sys, registry_path, cache: Default::default(), } @@ -108,7 +109,7 @@ impl RegistryReadPermissionChecker { |path: &Path| -> Result, AnyError> { match cache.get(path) { Some(canon) => Ok(Some(canon.clone())), - None => match self.fs.realpath_sync(path) { + None => match self.sys.fs_canonicalize(path) { Ok(canon) => { cache.insert(path.to_path_buf(), canon.clone()); Ok(Some(canon)) diff --git a/cli/npm/managed/resolvers/common/bin_entries.rs b/cli/npm/managed/resolvers/common/bin_entries.rs index e4a1845689..ca47b9a086 100644 --- a/cli/npm/managed/resolvers/common/bin_entries.rs +++ b/cli/npm/managed/resolvers/common/bin_entries.rs @@ -28,8 +28,10 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str { .id .nv .name + .as_str() .rsplit_once('/') - .map_or(package.id.nv.name.as_str(), |(_, name)| name) + .map(|(_, name)| name) + .unwrap_or(package.id.nv.name.as_str()) } pub fn warn_missing_entrypoint( diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 4e79941af6..77e0d0ea3e 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -11,6 +11,7 @@ use crate::colors; use crate::npm::managed::PackageCaching; use crate::npm::CliNpmCache; use crate::npm::CliNpmTarballCache; +use crate::sys::CliSys; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; @@ -18,7 +19,6 @@ use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; -use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageNotFoundError; @@ -47,15 +47,15 @@ pub struct GlobalNpmPackageResolver { impl GlobalNpmPackageResolver { pub fn new( cache: Arc, - fs: Arc, tarball_cache: Arc, resolution: Arc, + sys: CliSys, system_info: NpmSystemInfo, lifecycle_scripts: LifecycleScriptsConfig, ) -> Self { Self { registry_read_permission_checker: RegistryReadPermissionChecker::new( - fs, + sys, cache.root_dir_path().to_path_buf(), ), cache, diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 1e83717f15..5c93c228e8 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -15,11 +15,6 @@ use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; -use crate::args::LifecycleScriptsConfig; -use crate::colors; -use crate::npm::managed::PackageCaching; -use crate::npm::CliNpmCache; -use crate::npm::CliNpmTarballCache; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_cache_dir::npm::mixed_case_package_name_decode; @@ -34,21 +29,28 @@ use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; +use deno_path_util::fs::atomic_write_file_with_retries; +use deno_path_util::fs::canonicalize_path_maybe_not_exists; use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder; -use deno_runtime::deno_fs; use deno_runtime::deno_node::NodePermissions; use deno_semver::package::PackageNv; +use deno_semver::StackString; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::ReferrerNotFoundError; use serde::Deserialize; use serde::Serialize; +use sys_traits::FsMetadata; +use crate::args::LifecycleScriptsConfig; use crate::args::NpmInstallDepsProvider; use crate::cache::CACHE_PERM; -use crate::util::fs::atomic_write_file_with_retries; -use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; +use crate::colors; +use crate::npm::managed::PackageCaching; +use crate::npm::CliNpmCache; +use crate::npm::CliNpmTarballCache; +use crate::sys::CliSys; use crate::util::fs::clone_dir_recursive; use crate::util::fs::symlink_dir; use crate::util::fs::LaxSingleProcessFsFlag; @@ -65,10 +67,10 @@ use super::common::RegistryReadPermissionChecker; #[derive(Debug)] pub struct LocalNpmPackageResolver { cache: Arc, - fs: Arc, npm_install_deps_provider: Arc, progress_bar: ProgressBar, resolution: Arc, + sys: CliSys, tarball_cache: Arc, root_node_modules_path: PathBuf, root_node_modules_url: Url, @@ -81,10 +83,10 @@ impl LocalNpmPackageResolver { #[allow(clippy::too_many_arguments)] pub fn new( cache: Arc, - fs: Arc, npm_install_deps_provider: Arc, progress_bar: ProgressBar, resolution: Arc, + sys: CliSys, tarball_cache: Arc, node_modules_folder: PathBuf, system_info: NpmSystemInfo, @@ -92,15 +94,15 @@ impl LocalNpmPackageResolver { ) -> Self { Self { cache, - fs: fs.clone(), npm_install_deps_provider, progress_bar, resolution, tarball_cache, registry_read_permission_checker: RegistryReadPermissionChecker::new( - fs, + sys.clone(), node_modules_folder.clone(), ), + sys, root_node_modules_url: Url::from_directory_path(&node_modules_folder) .unwrap(), root_node_modules_path: node_modules_folder, @@ -139,8 +141,7 @@ impl LocalNpmPackageResolver { }; // Canonicalize the path so it's not pointing to the symlinked directory // in `node_modules` directory of the referrer. - canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref()) - .map(Some) + canonicalize_path_maybe_not_exists(&self.sys, &path).map(Some) } fn resolve_package_folder_from_specifier( @@ -209,7 +210,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { }; let sub_dir = join_package_name(&node_modules_folder, name); - if self.fs.is_dir_sync(&sub_dir) { + if self.sys.fs_is_dir_no_err(&sub_dir) { return Ok(sub_dir); } @@ -355,8 +356,10 @@ async fn sync_resolution_with_fs( let package_partitions = snapshot.all_system_packages_partitioned(system_info); let mut cache_futures = FuturesUnordered::new(); - let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> = - HashMap::with_capacity(package_partitions.packages.len()); + let mut newest_packages_by_name: HashMap< + &StackString, + &NpmResolutionPackage, + > = HashMap::with_capacity(package_partitions.packages.len()); let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new())); let mut lifecycle_scripts = super::common::lifecycle_scripts::LifecycleScripts::new( @@ -536,7 +539,7 @@ async fn sync_resolution_with_fs( } } - let mut found_names: HashMap<&String, &PackageNv> = HashMap::new(); + let mut found_names: HashMap<&StackString, &PackageNv> = HashMap::new(); // set of node_modules in workspace packages that we've already ensured exist let mut existing_child_node_modules_dirs: HashSet = HashSet::new(); @@ -922,7 +925,13 @@ impl SetupCache { } bincode::serialize(&self.current).ok().and_then(|data| { - atomic_write_file_with_retries(&self.file_path, data, CACHE_PERM).ok() + atomic_write_file_with_retries( + &CliSys::default(), + &self.file_path, + &data, + CACHE_PERM, + ) + .ok() }); true } @@ -1012,10 +1021,10 @@ fn get_package_folder_id_from_folder_name( ) -> Option { let folder_name = folder_name.replace('+', "/"); let (name, ending) = folder_name.rsplit_once('@')?; - let name = if let Some(encoded_name) = name.strip_prefix('_') { - mixed_case_package_name_decode(encoded_name)? + let name: StackString = if let Some(encoded_name) = name.strip_prefix('_') { + StackString::from_string(mixed_case_package_name_decode(encoded_name)?) } else { - name.to_string() + name.into() }; let (raw_version, copy_index) = match ending.split_once('_') { Some((raw_version, copy_index)) => { diff --git a/cli/npm/managed/resolvers/mod.rs b/cli/npm/managed/resolvers/mod.rs index 736270749f..c2fc8d2d92 100644 --- a/cli/npm/managed/resolvers/mod.rs +++ b/cli/npm/managed/resolvers/mod.rs @@ -7,8 +7,8 @@ mod local; use std::path::PathBuf; use std::sync::Arc; +use crate::sys::CliSys; use deno_npm::NpmSystemInfo; -use deno_runtime::deno_fs::FileSystem; use crate::args::LifecycleScriptsConfig; use crate::args::NpmInstallDepsProvider; @@ -25,11 +25,11 @@ use super::resolution::NpmResolution; #[allow(clippy::too_many_arguments)] pub fn create_npm_fs_resolver( - fs: Arc, npm_cache: Arc, npm_install_deps_provider: &Arc, progress_bar: &ProgressBar, resolution: Arc, + sys: CliSys, tarball_cache: Arc, maybe_node_modules_path: Option, system_info: NpmSystemInfo, @@ -38,10 +38,10 @@ pub fn create_npm_fs_resolver( match maybe_node_modules_path { Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new( npm_cache, - fs, npm_install_deps_provider.clone(), progress_bar.clone(), resolution, + sys, tarball_cache, node_modules_folder, system_info, @@ -49,9 +49,9 @@ pub fn create_npm_fs_resolver( )), None => Arc::new(GlobalNpmPackageResolver::new( npm_cache, - fs, tarball_cache, resolution, + sys, system_info, lifecycle_scripts, )), diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index b39e0a340d..34eaf21419 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -7,6 +7,7 @@ use std::borrow::Cow; use std::path::Path; use std::sync::Arc; +use crate::sys::CliSys; use dashmap::DashMap; use deno_core::error::AnyError; use deno_core::serde_json; @@ -17,7 +18,6 @@ use deno_resolver::npm::ByonmInNpmPackageChecker; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::CliNpmReqResolver; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; @@ -28,11 +28,8 @@ use managed::create_managed_in_npm_pkg_checker; use node_resolver::InNpmPackageChecker; use node_resolver::NpmPackageFolderResolver; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::http_util::HttpClientProvider; -use crate::util::fs::atomic_write_file_with_retries_and_fs; -use crate::util::fs::hard_link_dir_recursive; -use crate::util::fs::AtomicWriteFileFsAdapter; use crate::util::progress_bar::ProgressBar; pub use self::byonm::CliByonmNpmResolver; @@ -43,26 +40,24 @@ pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::ManagedCliNpmResolver; pub use self::managed::PackageCaching; -pub type CliNpmTarballCache = deno_npm_cache::TarballCache; -pub type CliNpmCache = deno_npm_cache::NpmCache; +pub type CliNpmTarballCache = + deno_npm_cache::TarballCache; +pub type CliNpmCache = deno_npm_cache::NpmCache; pub type CliNpmRegistryInfoProvider = - deno_npm_cache::RegistryInfoProvider; + deno_npm_cache::RegistryInfoProvider; #[derive(Debug)] -pub struct CliNpmCacheEnv { - fs: Arc, +pub struct CliNpmCacheHttpClient { http_client_provider: Arc, progress_bar: ProgressBar, } -impl CliNpmCacheEnv { +impl CliNpmCacheHttpClient { pub fn new( - fs: Arc, http_client_provider: Arc, progress_bar: ProgressBar, ) -> Self { Self { - fs, http_client_provider, progress_bar, } @@ -70,35 +65,7 @@ impl CliNpmCacheEnv { } #[async_trait::async_trait(?Send)] -impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv { - fn exists(&self, path: &Path) -> bool { - self.fs.exists_sync(path) - } - - fn hard_link_dir_recursive( - &self, - from: &Path, - to: &Path, - ) -> Result<(), AnyError> { - // todo(dsherret): use self.fs here instead - hard_link_dir_recursive(from, to) - } - - fn atomic_write_file_with_retries( - &self, - file_path: &Path, - data: &[u8], - ) -> std::io::Result<()> { - atomic_write_file_with_retries_and_fs( - &AtomicWriteFileFsAdapter { - fs: self.fs.as_ref(), - write_mode: crate::cache::CACHE_PERM, - }, - file_path, - data, - ) - } - +impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient { async fn download_with_retries_on_any_tokio_runtime( &self, url: Url, @@ -115,14 +82,14 @@ impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv { .download_with_progress_and_retries(url, maybe_auth_header, &guard) .await .map_err(|err| { - use crate::http_util::DownloadError::*; - let status_code = match &err { + use crate::http_util::DownloadErrorKind::*; + let status_code = match err.as_kind() { Fetch { .. } | UrlParse { .. } | HttpParse { .. } | Json { .. } | ToStr { .. } - | NoRedirectHeader { .. } + | RedirectHeaderParse { .. } | TooManyRedirects => None, BadResponse(bad_response_error) => { Some(bad_response_error.status_code) @@ -232,13 +199,13 @@ pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver { pub struct NpmFetchResolver { nv_by_req: DashMap>, info_by_name: DashMap>>, - file_fetcher: Arc, + file_fetcher: Arc, npmrc: Arc, } impl NpmFetchResolver { pub fn new( - file_fetcher: Arc, + file_fetcher: Arc, npmrc: Arc, ) -> Self { Self { diff --git a/cli/ops/lint.rs b/cli/ops/lint.rs new file mode 100644 index 0000000000..c38ac0c8a2 --- /dev/null +++ b/cli/ops/lint.rs @@ -0,0 +1,34 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_ast::MediaType; +use deno_ast::ModuleSpecifier; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::op2; + +use crate::tools::lint; + +deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],); + +#[op2] +#[buffer] +fn op_lint_create_serialized_ast( + #[string] file_name: &str, + #[string] source: String, +) -> Result, AnyError> { + let file_text = deno_ast::strip_bom(source); + let path = std::env::current_dir()?.join(file_name); + let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| { + generic_error(format!("Failed to parse path as URL: {}", path.display())) + })?; + let media_type = MediaType::from_specifier(&specifier); + let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { + specifier, + text: file_text.into(), + media_type, + capture_tokens: false, + scope_analysis: false, + maybe_syntax: None, + })?; + Ok(lint::serialize_ast_to_buffer(&parsed_source)) +} diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs index 230d268ab4..4ac1618816 100644 --- a/cli/ops/mod.rs +++ b/cli/ops/mod.rs @@ -2,4 +2,5 @@ pub mod bench; pub mod jupyter; +pub mod lint; pub mod testing; diff --git a/cli/resolver.rs b/cli/resolver.rs index f5c3f68f36..c4c8ef8b36 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -1,5 +1,11 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + +use crate::sys::CliSys; use async_trait::async_trait; use dashmap::DashMap; use dashmap::DashSet; @@ -20,16 +26,13 @@ use deno_npm::resolution::NpmResolutionError; use deno_resolver::sloppy_imports::SloppyImportsResolver; use deno_runtime::colors; use deno_runtime::deno_fs; -use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::is_builtin_node_module; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_semver::package::PackageReq; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; -use std::borrow::Cow; -use std::path::Path; -use std::path::PathBuf; -use std::sync::Arc; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; use thiserror::Error; use crate::args::NpmCachingStrategy; @@ -40,18 +43,17 @@ use crate::npm::InnerCliNpmResolverRef; use crate::util::sync::AtomicFlag; use crate::util::text_encoding::from_utf8_lossy_cow; -pub type CjsTracker = deno_resolver::cjs::CjsTracker; -pub type IsCjsResolver = - deno_resolver::cjs::IsCjsResolver; +pub type CjsTracker = deno_resolver::cjs::CjsTracker; +pub type IsCjsResolver = deno_resolver::cjs::IsCjsResolver; pub type CliSloppyImportsResolver = SloppyImportsResolver; pub type CliDenoResolver = deno_resolver::DenoResolver< - CliDenoResolverFs, - DenoFsNodeResolverEnv, + RealIsBuiltInNodeModuleChecker, SloppyImportsCachedFs, + CliSys, >; pub type CliNpmReqResolver = - deno_resolver::npm::NpmReqResolver; + deno_resolver::npm::NpmReqResolver; pub struct ModuleCodeStringSource { pub code: ModuleSourceCode, @@ -59,53 +61,6 @@ pub struct ModuleCodeStringSource { pub media_type: MediaType, } -#[derive(Debug, Clone)] -pub struct CliDenoResolverFs(pub Arc); - -impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> std::io::Result> { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|e| e.into_io_error()) - } - - fn realpath_sync(&self, path: &Path) -> std::io::Result { - self.0.realpath_sync(path).map_err(|e| e.into_io_error()) - } - - fn exists_sync(&self, path: &Path) -> bool { - self.0.exists_sync(path) - } - - fn is_dir_sync(&self, path: &Path) -> bool { - self.0.is_dir_sync(path) - } - - fn read_dir_sync( - &self, - dir_path: &Path, - ) -> std::io::Result> { - self - .0 - .read_dir_sync(dir_path) - .map(|entries| { - entries - .into_iter() - .map(|e| deno_resolver::fs::DirEntry { - name: e.name, - is_file: e.is_file, - is_directory: e.is_directory, - }) - .collect::>() - }) - .map_err(|err| err.into_io_error()) - } -} - #[derive(Debug, Error)] #[error("{media_type} files are not supported in npm packages: {specifier}")] pub struct NotSupportedKindInNpmError { @@ -440,7 +395,7 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> { #[derive(Debug)] pub struct SloppyImportsCachedFs { - fs: Arc, + sys: CliSys, cache: Option< DashMap< PathBuf, @@ -450,15 +405,18 @@ pub struct SloppyImportsCachedFs { } impl SloppyImportsCachedFs { - pub fn new(fs: Arc) -> Self { + pub fn new(sys: CliSys) -> Self { Self { - fs, + sys, cache: Some(Default::default()), } } - pub fn new_without_stat_cache(fs: Arc) -> Self { - Self { fs, cache: None } + pub fn new_without_stat_cache(fs: CliSys) -> Self { + Self { + sys: fs, + cache: None, + } } } @@ -475,10 +433,10 @@ impl deno_resolver::sloppy_imports::SloppyImportResolverFs } } - let entry = self.fs.stat_sync(path).ok().and_then(|stat| { - if stat.is_file { + let entry = self.sys.fs_metadata(path).ok().and_then(|stat| { + if stat.file_type().is_file() { Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::File) - } else if stat.is_directory { + } else if stat.file_type().is_dir() { Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::Dir) } else { None diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index 1e3abb2c0d..d644072f4c 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -446,7 +446,6 @@ }, "command": { "type": "string", - "required": true, "description": "The task to execute" }, "dependencies": { diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 85a22cf837..48af787f18 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -37,7 +37,6 @@ use deno_core::futures::AsyncReadExt; use deno_core::futures::AsyncSeekExt; use deno_core::serde_json; use deno_core::url::Url; -use deno_graph::source::RealFileSystem; use deno_graph::ModuleGraph; use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; @@ -71,7 +70,7 @@ use crate::args::UnstableConfig; use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::emit::Emitter; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::http_util::HttpClientProvider; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; @@ -91,6 +90,7 @@ use super::serialization::DenoCompileModuleData; use super::serialization::DeserializedDataSection; use super::serialization::RemoteModulesStore; use super::serialization::RemoteModulesStoreBuilder; +use super::serialization::SourceMapStore; use super::virtual_fs::output_vfs; use super::virtual_fs::BuiltVfs; use super::virtual_fs::FileBackedVfs; @@ -98,6 +98,7 @@ use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsFileSubDataKind; use super::virtual_fs::VfsRoot; use super::virtual_fs::VirtualDirectory; +use super::virtual_fs::VirtualDirectoryEntries; use super::virtual_fs::WindowsSystemRootablePath; pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str = @@ -203,18 +204,25 @@ pub struct Metadata { pub otel_config: OtelConfig, } +#[allow(clippy::too_many_arguments)] fn write_binary_bytes( mut file_writer: File, original_bin: Vec, metadata: &Metadata, npm_snapshot: Option, remote_modules: &RemoteModulesStoreBuilder, + source_map_store: &SourceMapStore, vfs: &BuiltVfs, compile_flags: &CompileFlags, ) -> Result<(), AnyError> { - let data_section_bytes = - serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs) - .context("Serializing binary data section.")?; + let data_section_bytes = serialize_binary_data_section( + metadata, + npm_snapshot, + remote_modules, + source_map_store, + vfs, + ) + .context("Serializing binary data section.")?; let target = compile_flags.resolve_target(); if target.contains("linux") { @@ -251,11 +259,11 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { } pub struct StandaloneData { - pub fs: Arc, pub metadata: Metadata, pub modules: StandaloneModules, pub npm_snapshot: Option, pub root_path: PathBuf, + pub source_maps: SourceMapStore, pub vfs: Arc, } @@ -283,13 +291,12 @@ impl StandaloneModules { pub fn read<'a>( &'a self, specifier: &'a ModuleSpecifier, + kind: VfsFileSubDataKind, ) -> Result>, AnyError> { if specifier.scheme() == "file" { let path = deno_path_util::url_to_file_path(specifier)?; let bytes = match self.vfs.file_entry(&path) { - Ok(entry) => self - .vfs - .read_file_all(entry, VfsFileSubDataKind::ModuleGraph)?, + Ok(entry) => self.vfs.read_file_all(entry, kind)?, Err(err) if err.kind() == ErrorKind::NotFound => { match RealFs.read_file_sync(&path, None) { Ok(bytes) => bytes, @@ -307,7 +314,18 @@ impl StandaloneModules { data: bytes, })) } else { - self.remote_modules.read(specifier) + self.remote_modules.read(specifier).map(|maybe_entry| { + maybe_entry.map(|entry| DenoCompileModuleData { + media_type: entry.media_type, + specifier: entry.specifier, + data: match kind { + VfsFileSubDataKind::Raw => entry.data, + VfsFileSubDataKind::ModuleGraph => { + entry.transpiled_data.unwrap_or(entry.data) + } + }, + }) + }) } } } @@ -328,7 +346,8 @@ pub fn extract_standalone( mut metadata, npm_snapshot, remote_modules, - mut vfs_dir, + source_maps, + vfs_root_entries, vfs_files_data, } = match deserialize_binary_data_section(data)? { Some(data_section) => data_section, @@ -351,20 +370,18 @@ pub fn extract_standalone( metadata.argv.push(arg.into_string().unwrap()); } let vfs = { - // align the name of the directory with the root dir - vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string(); - let fs_root = VfsRoot { - dir: vfs_dir, + dir: VirtualDirectory { + // align the name of the directory with the root dir + name: root_path.file_name().unwrap().to_string_lossy().to_string(), + entries: vfs_root_entries, + }, root_path: root_path.clone(), start_file_offset: 0, }; Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root)) }; - let fs: Arc = - Arc::new(DenoCompileFileSystem::new(vfs.clone())); Ok(Some(StandaloneData { - fs, metadata, modules: StandaloneModules { remote_modules, @@ -372,6 +389,7 @@ pub fn extract_standalone( }, npm_snapshot, root_path, + source_maps, vfs, })) } @@ -390,7 +408,7 @@ pub struct DenoCompileBinaryWriter<'a> { cli_options: &'a CliOptions, deno_dir: &'a DenoDir, emitter: &'a Emitter, - file_fetcher: &'a FileFetcher, + file_fetcher: &'a CliFileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a dyn CliNpmResolver, workspace_resolver: &'a WorkspaceResolver, @@ -404,7 +422,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { cli_options: &'a CliOptions, deno_dir: &'a DenoDir, emitter: &'a Emitter, - file_fetcher: &'a FileFetcher, + file_fetcher: &'a CliFileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a dyn CliNpmResolver, workspace_resolver: &'a WorkspaceResolver, @@ -451,7 +469,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { ) } } - self.write_standalone_binary(options, original_binary).await + self.write_standalone_binary(options, original_binary) } async fn get_base_binary( @@ -554,7 +572,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { /// This functions creates a standalone deno binary by appending a bundle /// and magic trailer to the currently executing binary. #[allow(clippy::too_many_arguments)] - async fn write_standalone_binary( + fn write_standalone_binary( &self, options: WriteBinOptions<'_>, original_bin: Vec, @@ -598,71 +616,81 @@ impl<'a> DenoCompileBinaryWriter<'a> { .with_context(|| format!("Including {}", path.display()))?; } let mut remote_modules_store = RemoteModulesStoreBuilder::default(); - let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() { - Some(FastInsecureHasher::new_deno_versioned()) - } else { - None - }; + let mut source_maps = Vec::with_capacity(graph.specifiers_count()); + // todo(dsherret): transpile in parallel for module in graph.modules() { if module.specifier().scheme() == "data" { continue; // don't store data urls as an entry as they're in the code } - if let Some(hasher) = &mut code_cache_key_hasher { - if let Some(source) = module.source() { - hasher.write(module.specifier().as_str().as_bytes()); - hasher.write(source.as_bytes()); - } - } - let (maybe_source, media_type) = match module { + let (maybe_original_source, maybe_transpiled, media_type) = match module { deno_graph::Module::Js(m) => { - let source = if m.media_type.is_emittable() { + let original_bytes = m.source.as_bytes().to_vec(); + let maybe_transpiled = if m.media_type.is_emittable() { let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( &m.specifier, m.media_type, m.is_script, )?; let module_kind = ModuleKind::from_is_cjs(is_cjs); - let source = self - .emitter - .emit_parsed_source( + let (source, source_map) = + self.emitter.emit_parsed_source_for_deno_compile( &m.specifier, m.media_type, module_kind, &m.source, - ) - .await?; - source.into_bytes() + )?; + if source != m.source.as_ref() { + source_maps.push((&m.specifier, source_map)); + Some(source.into_bytes()) + } else { + None + } } else { - m.source.as_bytes().to_vec() + None }; - (Some(source), m.media_type) + (Some(original_bytes), maybe_transpiled, m.media_type) } deno_graph::Module::Json(m) => { - (Some(m.source.as_bytes().to_vec()), m.media_type) + (Some(m.source.as_bytes().to_vec()), None, m.media_type) } deno_graph::Module::Wasm(m) => { - (Some(m.source.to_vec()), MediaType::Wasm) + (Some(m.source.to_vec()), None, MediaType::Wasm) } deno_graph::Module::Npm(_) | deno_graph::Module::Node(_) - | deno_graph::Module::External(_) => (None, MediaType::Unknown), + | deno_graph::Module::External(_) => (None, None, MediaType::Unknown), }; - if module.specifier().scheme() == "file" { - let file_path = deno_path_util::url_to_file_path(module.specifier())?; - vfs - .add_file_with_data( - &file_path, - match maybe_source { - Some(source) => source, - None => RealFs.read_file_sync(&file_path, None)?.into_owned(), - }, - VfsFileSubDataKind::ModuleGraph, - ) - .with_context(|| { - format!("Failed adding '{}'", file_path.display()) - })?; - } else if let Some(source) = maybe_source { - remote_modules_store.add(module.specifier(), media_type, source); + if let Some(original_source) = maybe_original_source { + if module.specifier().scheme() == "file" { + let file_path = deno_path_util::url_to_file_path(module.specifier())?; + vfs + .add_file_with_data( + &file_path, + original_source, + VfsFileSubDataKind::Raw, + ) + .with_context(|| { + format!("Failed adding '{}'", file_path.display()) + })?; + if let Some(transpiled_source) = maybe_transpiled { + vfs + .add_file_with_data( + &file_path, + transpiled_source, + VfsFileSubDataKind::ModuleGraph, + ) + .with_context(|| { + format!("Failed adding '{}'", file_path.display()) + })?; + } + } else { + remote_modules_store.add( + module.specifier(), + media_type, + original_source, + maybe_transpiled, + ); + } } } remote_modules_store.add_redirects(&graph.redirects); @@ -695,6 +723,28 @@ impl<'a> DenoCompileBinaryWriter<'a> { None => StandaloneRelativeFileBaseUrl::WindowsSystemRoot, }; + let code_cache_key = if self.cli_options.code_cache_enabled() { + let mut hasher = FastInsecureHasher::new_deno_versioned(); + for module in graph.modules() { + if let Some(source) = module.source() { + hasher + .write(root_dir_url.specifier_key(module.specifier()).as_bytes()); + hasher.write(source.as_bytes()); + } + } + Some(hasher.finish()) + } else { + None + }; + + let mut source_map_store = SourceMapStore::with_capacity(source_maps.len()); + for (specifier, source_map) in source_maps { + source_map_store.add( + Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()), + Cow::Owned(source_map.into_bytes()), + ); + } + let node_modules = match self.npm_resolver.as_inner() { InnerCliNpmResolverRef::Managed(_) => { npm_snapshot.as_ref().map(|_| NodeModules::Managed { @@ -742,7 +792,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { let metadata = Metadata { argv: compile_flags.args.clone(), seed: self.cli_options.seed(), - code_cache_key: code_cache_key_hasher.map(|h| h.finish()), + code_cache_key, location: self.cli_options.location_flag().clone(), permissions: self.cli_options.permission_flags().clone(), v8_flags: self.cli_options.v8_flags().clone(), @@ -809,6 +859,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { &metadata, npm_snapshot.map(|s| s.into_serialized()), &remote_modules_store, + &source_map_store, &vfs, compile_flags, ) @@ -903,10 +954,10 @@ impl<'a> DenoCompileBinaryWriter<'a> { root_dir.name = DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME.to_string(); let mut new_entries = Vec::with_capacity(root_dir.entries.len()); let mut localhost_entries = IndexMap::new(); - for entry in std::mem::take(&mut root_dir.entries) { + for entry in root_dir.entries.take_inner() { match entry { - VfsEntry::Dir(dir) => { - for entry in dir.entries { + VfsEntry::Dir(mut dir) => { + for entry in dir.entries.take_inner() { log::debug!("Flattening {} into node_modules", entry.name()); if let Some(existing) = localhost_entries.insert(entry.name().to_string(), entry) @@ -925,11 +976,11 @@ impl<'a> DenoCompileBinaryWriter<'a> { } new_entries.push(VfsEntry::Dir(VirtualDirectory { name: "localhost".to_string(), - entries: localhost_entries.into_iter().map(|(_, v)| v).collect(), + entries: VirtualDirectoryEntries::new( + localhost_entries.into_iter().map(|(_, v)| v).collect(), + ), })); - // needs to be sorted by name - new_entries.sort_by(|a, b| a.name().cmp(b.name())); - root_dir.entries = new_entries; + root_dir.entries = VirtualDirectoryEntries::new(new_entries); // it's better to not expose the user's cache directory, so take it out // of there @@ -937,10 +988,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { let parent_dir = vfs.get_dir_mut(parent).unwrap(); let index = parent_dir .entries - .iter() - .position(|entry| { - entry.name() == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME - }) + .binary_search(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME) .unwrap(); let npm_global_cache_dir_entry = parent_dir.entries.remove(index); @@ -950,11 +998,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { Cow::Borrowed(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME); for ancestor in parent.ancestors() { let dir = vfs.get_dir_mut(ancestor).unwrap(); - if let Some(index) = dir - .entries - .iter() - .position(|entry| entry.name() == last_name) - { + if let Ok(index) = dir.entries.binary_search(&last_name) { dir.entries.remove(index); } last_name = Cow::Owned(dir.name.clone()); @@ -965,7 +1009,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { // now build the vfs and add the global cache dir entry there let mut built_vfs = vfs.build(); - built_vfs.root.insert_entry(npm_global_cache_dir_entry); + built_vfs.entries.insert(npm_global_cache_dir_entry); built_vfs } InnerCliNpmResolverRef::Byonm(_) => vfs.build(), diff --git a/cli/standalone/code_cache.rs b/cli/standalone/code_cache.rs index 9580b9b44e..ec89c3ab1b 100644 --- a/cli/standalone/code_cache.rs +++ b/cli/standalone/code_cache.rs @@ -15,11 +15,11 @@ use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::unsync::sync::AtomicFlag; +use deno_path_util::get_atomic_path; use deno_runtime::code_cache::CodeCache; use deno_runtime::code_cache::CodeCacheType; use crate::cache::FastInsecureHasher; -use crate::util::path::get_atomic_file_path; use crate::worker::CliCodeCache; enum CodeCacheStrategy { @@ -189,7 +189,8 @@ impl FirstRunCodeCacheStrategy { cache_data: &HashMap, ) { let count = cache_data.len(); - let temp_file = get_atomic_file_path(&self.file_path); + let temp_file = + get_atomic_path(&sys_traits::impls::RealSys, &self.file_path); match serialize(&temp_file, self.cache_key, cache_data) { Ok(()) => { if let Err(err) = std::fs::rename(&temp_file, &self.file_path) { diff --git a/cli/standalone/file_system.rs b/cli/standalone/file_system.rs index 48dc907570..0a11d4550f 100644 --- a/cli/standalone/file_system.rs +++ b/cli/standalone/file_system.rs @@ -1,9 +1,13 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; +use std::time::Duration; +use std::time::SystemTime; use deno_runtime::deno_fs::AccessCheckCb; use deno_runtime::deno_fs::FileSystem; @@ -15,8 +19,16 @@ use deno_runtime::deno_io::fs::File; use deno_runtime::deno_io::fs::FsError; use deno_runtime::deno_io::fs::FsResult; use deno_runtime::deno_io::fs::FsStat; +use sys_traits::boxed::BoxedFsDirEntry; +use sys_traits::boxed::BoxedFsMetadataValue; +use sys_traits::boxed::FsMetadataBoxed; +use sys_traits::boxed::FsReadDirBoxed; +use sys_traits::FsMetadata; use super::virtual_fs::FileBackedVfs; +use super::virtual_fs::FileBackedVfsDirEntry; +use super::virtual_fs::FileBackedVfsFile; +use super::virtual_fs::FileBackedVfsMetadata; use super::virtual_fs::VfsFileSubDataKind; #[derive(Debug, Clone)] @@ -82,7 +94,7 @@ impl FileSystem for DenoCompileFileSystem { access_check: Option, ) -> FsResult> { if self.0.is_path_within(path) { - Ok(self.0.open_file(path)?) + Ok(Rc::new(self.0.open_file(path)?)) } else { RealFs.open_sync(path, options, access_check) } @@ -94,7 +106,7 @@ impl FileSystem for DenoCompileFileSystem { access_check: Option>, ) -> FsResult> { if self.0.is_path_within(&path) { - Ok(self.0.open_file(&path)?) + Ok(Rc::new(self.0.open_file(&path)?)) } else { RealFs.open_async(path, options, access_check).await } @@ -214,14 +226,14 @@ impl FileSystem for DenoCompileFileSystem { fn stat_sync(&self, path: &Path) -> FsResult { if self.0.is_path_within(path) { - Ok(self.0.stat(path)?) + Ok(self.0.stat(path)?.as_fs_stat()) } else { RealFs.stat_sync(path) } } async fn stat_async(&self, path: PathBuf) -> FsResult { if self.0.is_path_within(&path) { - Ok(self.0.stat(&path)?) + Ok(self.0.stat(&path)?.as_fs_stat()) } else { RealFs.stat_async(path).await } @@ -229,14 +241,14 @@ impl FileSystem for DenoCompileFileSystem { fn lstat_sync(&self, path: &Path) -> FsResult { if self.0.is_path_within(path) { - Ok(self.0.lstat(path)?) + Ok(self.0.lstat(path)?.as_fs_stat()) } else { RealFs.lstat_sync(path) } } async fn lstat_async(&self, path: PathBuf) -> FsResult { if self.0.is_path_within(&path) { - Ok(self.0.lstat(&path)?) + Ok(self.0.lstat(&path)?.as_fs_stat()) } else { RealFs.lstat_async(path).await } @@ -397,3 +409,428 @@ impl FileSystem for DenoCompileFileSystem { .await } } + +impl sys_traits::BaseFsHardLink for DenoCompileFileSystem { + #[inline] + fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> { + self.link_sync(src, dst).map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRead for DenoCompileFileSystem { + #[inline] + fn base_fs_read(&self, path: &Path) -> std::io::Result> { + self + .read_file_sync(path, None) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::FsMetadataValue for FileBackedVfsMetadata { + fn file_type(&self) -> sys_traits::FileType { + self.file_type + } + + fn len(&self) -> u64 { + self.len + } + + fn accessed(&self) -> std::io::Result { + Err(not_supported("accessed time")) + } + + fn created(&self) -> std::io::Result { + Err(not_supported("created time")) + } + + fn changed(&self) -> std::io::Result { + Err(not_supported("changed time")) + } + + fn modified(&self) -> std::io::Result { + Err(not_supported("modified time")) + } + + fn dev(&self) -> std::io::Result { + Ok(0) + } + + fn ino(&self) -> std::io::Result { + Ok(0) + } + + fn mode(&self) -> std::io::Result { + Ok(0) + } + + fn nlink(&self) -> std::io::Result { + Ok(0) + } + + fn uid(&self) -> std::io::Result { + Ok(0) + } + + fn gid(&self) -> std::io::Result { + Ok(0) + } + + fn rdev(&self) -> std::io::Result { + Ok(0) + } + + fn blksize(&self) -> std::io::Result { + Ok(0) + } + + fn blocks(&self) -> std::io::Result { + Ok(0) + } + + fn is_block_device(&self) -> std::io::Result { + Ok(false) + } + + fn is_char_device(&self) -> std::io::Result { + Ok(false) + } + + fn is_fifo(&self) -> std::io::Result { + Ok(false) + } + + fn is_socket(&self) -> std::io::Result { + Ok(false) + } + + fn file_attributes(&self) -> std::io::Result { + Ok(0) + } +} + +fn not_supported(name: &str) -> std::io::Error { + std::io::Error::new( + ErrorKind::Unsupported, + format!( + "{} is not supported for an embedded deno compile file", + name + ), + ) +} + +impl sys_traits::FsDirEntry for FileBackedVfsDirEntry { + type Metadata = BoxedFsMetadataValue; + + fn file_name(&self) -> Cow { + Cow::Borrowed(self.metadata.name.as_ref()) + } + + fn file_type(&self) -> std::io::Result { + Ok(self.metadata.file_type) + } + + fn metadata(&self) -> std::io::Result { + Ok(BoxedFsMetadataValue(Box::new(self.metadata.clone()))) + } + + fn path(&self) -> Cow { + Cow::Owned(self.parent_path.join(&self.metadata.name)) + } +} + +impl sys_traits::BaseFsReadDir for DenoCompileFileSystem { + type ReadDirEntry = BoxedFsDirEntry; + + fn base_fs_read_dir( + &self, + path: &Path, + ) -> std::io::Result< + Box> + '_>, + > { + if self.0.is_path_within(path) { + let entries = self.0.read_dir_with_metadata(path)?; + Ok(Box::new( + entries.map(|entry| Ok(BoxedFsDirEntry::new(entry))), + )) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_read_dir_boxed(path) + } + } +} + +impl sys_traits::BaseFsCanonicalize for DenoCompileFileSystem { + #[inline] + fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result { + self.realpath_sync(path).map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsMetadata for DenoCompileFileSystem { + type Metadata = BoxedFsMetadataValue; + + #[inline] + fn base_fs_metadata(&self, path: &Path) -> std::io::Result { + if self.0.is_path_within(path) { + Ok(BoxedFsMetadataValue::new(self.0.stat(path)?)) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_metadata_boxed(path) + } + } + + #[inline] + fn base_fs_symlink_metadata( + &self, + path: &Path, + ) -> std::io::Result { + if self.0.is_path_within(path) { + Ok(BoxedFsMetadataValue::new(self.0.lstat(path)?)) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_symlink_metadata_boxed(path) + } + } +} + +impl sys_traits::BaseFsCreateDir for DenoCompileFileSystem { + #[inline] + fn base_fs_create_dir( + &self, + path: &Path, + options: &sys_traits::CreateDirOptions, + ) -> std::io::Result<()> { + self + .mkdir_sync(path, options.recursive, options.mode) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRemoveFile for DenoCompileFileSystem { + #[inline] + fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> { + self + .remove_sync(path, false) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRename for DenoCompileFileSystem { + #[inline] + fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> { + self + .rename_sync(from, to) + .map_err(|err| err.into_io_error()) + } +} + +pub enum FsFileAdapter { + Real(sys_traits::impls::RealFsFile), + Vfs(FileBackedVfsFile), +} + +impl sys_traits::FsFile for FsFileAdapter {} + +impl sys_traits::FsFileAsRaw for FsFileAdapter { + #[cfg(windows)] + fn fs_file_as_raw_handle(&self) -> Option { + match self { + Self::Real(file) => file.fs_file_as_raw_handle(), + Self::Vfs(_) => None, + } + } + + #[cfg(unix)] + fn fs_file_as_raw_fd(&self) -> Option { + match self { + Self::Real(file) => file.fs_file_as_raw_fd(), + Self::Vfs(_) => None, + } + } +} + +impl sys_traits::FsFileSyncData for FsFileAdapter { + fn fs_file_sync_data(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_sync_data(), + Self::Vfs(_) => Ok(()), + } + } +} + +impl sys_traits::FsFileSyncAll for FsFileAdapter { + fn fs_file_sync_all(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_sync_all(), + Self::Vfs(_) => Ok(()), + } + } +} + +impl sys_traits::FsFileSetPermissions for FsFileAdapter { + #[inline] + fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_set_permissions(mode), + Self::Vfs(_) => Ok(()), + } + } +} + +impl std::io::Read for FsFileAdapter { + #[inline] + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + match self { + Self::Real(file) => file.read(buf), + Self::Vfs(file) => file.read_to_buf(buf), + } + } +} + +impl std::io::Seek for FsFileAdapter { + fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result { + match self { + Self::Real(file) => file.seek(pos), + Self::Vfs(file) => file.seek(pos), + } + } +} + +impl std::io::Write for FsFileAdapter { + #[inline] + fn write(&mut self, buf: &[u8]) -> std::io::Result { + match self { + Self::Real(file) => file.write(buf), + Self::Vfs(_) => Err(not_supported("writing files")), + } + } + + #[inline] + fn flush(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.flush(), + Self::Vfs(_) => Err(not_supported("writing files")), + } + } +} + +impl sys_traits::FsFileSetLen for FsFileAdapter { + #[inline] + fn fs_file_set_len(&mut self, len: u64) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_set_len(len), + Self::Vfs(_) => Err(not_supported("setting file length")), + } + } +} + +impl sys_traits::FsFileSetTimes for FsFileAdapter { + fn fs_file_set_times( + &mut self, + times: sys_traits::FsFileTimes, + ) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_set_times(times), + Self::Vfs(_) => Err(not_supported("setting file times")), + } + } +} + +impl sys_traits::FsFileLock for FsFileAdapter { + fn fs_file_lock( + &mut self, + mode: sys_traits::FsFileLockMode, + ) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_lock(mode), + Self::Vfs(_) => Err(not_supported("locking files")), + } + } + + fn fs_file_try_lock( + &mut self, + mode: sys_traits::FsFileLockMode, + ) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_try_lock(mode), + Self::Vfs(_) => Err(not_supported("locking files")), + } + } + + fn fs_file_unlock(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_unlock(), + Self::Vfs(_) => Err(not_supported("unlocking files")), + } + } +} + +impl sys_traits::FsFileIsTerminal for FsFileAdapter { + #[inline] + fn fs_file_is_terminal(&self) -> bool { + match self { + Self::Real(file) => file.fs_file_is_terminal(), + Self::Vfs(_) => false, + } + } +} + +impl sys_traits::BaseFsOpen for DenoCompileFileSystem { + type File = FsFileAdapter; + + fn base_fs_open( + &self, + path: &Path, + options: &sys_traits::OpenOptions, + ) -> std::io::Result { + if self.0.is_path_within(path) { + Ok(FsFileAdapter::Vfs(self.0.open_file(path)?)) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + Ok(FsFileAdapter::Real( + sys_traits::impls::RealSys.base_fs_open(path, options)?, + )) + } + } +} + +impl sys_traits::SystemRandom for DenoCompileFileSystem { + #[inline] + fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.sys_random(buf) + } +} + +impl sys_traits::SystemTimeNow for DenoCompileFileSystem { + #[inline] + fn sys_time_now(&self) -> SystemTime { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.sys_time_now() + } +} + +impl sys_traits::ThreadSleep for DenoCompileFileSystem { + #[inline] + fn thread_sleep(&self, dur: Duration) { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.thread_sleep(dur) + } +} + +impl sys_traits::EnvCurrentDir for DenoCompileFileSystem { + fn env_current_dir(&self) -> std::io::Result { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.env_current_dir() + } +} + +impl sys_traits::BaseEnvVar for DenoCompileFileSystem { + fn base_env_var_os( + &self, + key: &std::ffi::OsStr, + ) -> Option { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.base_env_var_os(key) + } +} diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 22e0b6d115..0fe6de0b9a 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -9,6 +9,7 @@ use binary::StandaloneData; use binary::StandaloneModules; use code_cache::DenoCompileCodeCache; use deno_ast::MediaType; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolutionError; @@ -35,10 +36,11 @@ use deno_package_json::PackageJsonDepValue; use deno_resolver::cjs::IsCjsResolutionMode; use deno_resolver::npm::NpmReqResolverOptions; use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::PackageJsonResolver; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -54,6 +56,7 @@ use node_resolver::errors::ClosestPkgJsonError; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; use serialization::DenoCompileModuleSource; +use serialization::SourceMapStore; use std::borrow::Cow; use std::rc::Rc; use std::sync::Arc; @@ -64,18 +67,17 @@ use crate::args::create_default_npmrc; use crate::args::get_root_cert_store; use crate::args::npm_pkg_req_ref_to_binary_command; use crate::args::CaData; -use crate::args::CacheSetting; use crate::args::NpmInstallDepsProvider; use crate::args::StorageKeyResolver; use crate::cache::Caches; -use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDirProvider; use crate::cache::FastInsecureHasher; use crate::cache::NodeAnalysisCache; -use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; +use crate::node::CliNodeResolver; +use crate::node::CliPackageJsonResolver; use crate::npm::create_cli_npm_resolver; use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; @@ -86,9 +88,9 @@ use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::resolver::CjsTracker; -use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNpmReqResolver; use crate::resolver::NpmModuleLoader; +use crate::sys::CliSys; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::text_encoding::from_utf8_lossy_cow; @@ -105,12 +107,12 @@ mod file_system; mod serialization; mod virtual_fs; +pub use self::file_system::DenoCompileFileSystem; pub use binary::extract_standalone; pub use binary::is_standalone_binary; pub use binary::DenoCompileBinaryWriter; use self::binary::Metadata; -use self::file_system::DenoCompileFileSystem; struct SharedModuleLoaderState { cjs_tracker: Arc, @@ -118,10 +120,11 @@ struct SharedModuleLoaderState { fs: Arc, modules: StandaloneModules, node_code_translator: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_module_loader: Arc, npm_req_resolver: Arc, npm_resolver: Arc, + source_maps: SourceMapStore, vfs: Arc, workspace_resolver: WorkspaceResolver, } @@ -396,7 +399,11 @@ impl ModuleLoader for EmbeddedModuleLoader { ); } - match self.shared.modules.read(original_specifier) { + match self + .shared + .modules + .read(original_specifier, VfsFileSubDataKind::ModuleGraph) + { Ok(Some(module)) => { let media_type = module.media_type; let (module_specifier, module_type, module_source) = @@ -495,6 +502,45 @@ impl ModuleLoader for EmbeddedModuleLoader { } std::future::ready(()).boxed_local() } + + fn get_source_map(&self, file_name: &str) -> Option> { + if file_name.starts_with("file:///") { + let url = + deno_path_util::url_from_directory_path(self.shared.vfs.root()).ok()?; + let file_url = ModuleSpecifier::parse(file_name).ok()?; + let relative_path = url.make_relative(&file_url)?; + self.shared.source_maps.get(&relative_path) + } else { + self.shared.source_maps.get(file_name) + } + .map(Cow::Borrowed) + } + + fn get_source_mapped_source_line( + &self, + file_name: &str, + line_number: usize, + ) -> Option { + let specifier = ModuleSpecifier::parse(file_name).ok()?; + let data = self + .shared + .modules + .read(&specifier, VfsFileSubDataKind::Raw) + .ok()??; + + let source = String::from_utf8_lossy(&data.data); + // Do NOT use .lines(): it skips the terminating empty line. + // (due to internally using_terminator() instead of .split()) + let lines: Vec<&str> = source.split('\n').collect(); + if line_number >= lines.len() { + Some(format!( + "{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)", + crate::colors::yellow("Warning"), line_number + 1, + )) + } else { + Some(lines[line_number].to_string()) + } + } } impl NodeRequireLoader for EmbeddedModuleLoader { @@ -583,16 +629,20 @@ impl RootCertStoreProvider for StandaloneRootCertStoreProvider { } } -pub async fn run(data: StandaloneData) -> Result { +pub async fn run( + fs: Arc, + sys: CliSys, + data: StandaloneData, +) -> Result { let StandaloneData { - fs, metadata, modules, npm_snapshot, root_path, + source_maps, vfs, } = data; - let deno_dir_provider = Arc::new(DenoDirProvider::new(None)); + let deno_dir_provider = Arc::new(DenoDirProvider::new(sys.clone(), None)); let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { ca_stores: metadata.ca_stores, ca_data: metadata.ca_data.map(CaData::Bytes), @@ -610,9 +660,7 @@ pub async fn run(data: StandaloneData) -> Result { let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); let cache_setting = CacheSetting::Only; - let pkg_json_resolver = Arc::new(PackageJsonResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), - )); + let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(sys.clone())); let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { Some(binary::NodeModules::Managed { node_modules_dir }) => { // create an npmrc that uses the fake npm_registry_url to resolve packages @@ -625,7 +673,7 @@ pub async fn run(data: StandaloneData) -> Result { registry_configs: Default::default(), }); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(fs.as_ref()), + &sys, npm_global_cache_dir, npmrc.get_all_known_registries_urls(), )); @@ -646,17 +694,17 @@ pub async fn run(data: StandaloneData) -> Result { snapshot, )), maybe_lockfile: None, - fs: fs.clone(), http_client_provider: http_client_provider.clone(), npm_cache_dir, - cache_setting, - text_only_progress_bar: progress_bar, - maybe_node_modules_path, - npm_system_info: Default::default(), npm_install_deps_provider: Arc::new( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), + sys: sys.clone(), + text_only_progress_bar: progress_bar, + cache_setting, + maybe_node_modules_path, + npm_system_info: Default::default(), npmrc, lifecycle_scripts: Default::default(), }, @@ -673,7 +721,7 @@ pub async fn run(data: StandaloneData) -> Result { create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm); let npm_resolver = create_cli_npm_resolver( CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), + sys: sys.clone(), pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir, }), @@ -686,7 +734,7 @@ pub async fn run(data: StandaloneData) -> Result { // so no need to create actual `.npmrc` configuration. let npmrc = create_default_npmrc(); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(fs.as_ref()), + &sys, npm_global_cache_dir, npmrc.get_all_known_registries_urls(), )); @@ -701,18 +749,18 @@ pub async fn run(data: StandaloneData) -> Result { create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( CliManagedNpmResolverCreateOptions { snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), - maybe_lockfile: None, - fs: fs.clone(), http_client_provider: http_client_provider.clone(), - npm_cache_dir, - cache_setting, - text_only_progress_bar: progress_bar, - maybe_node_modules_path: None, - npm_system_info: Default::default(), npm_install_deps_provider: Arc::new( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), + sys: sys.clone(), + cache_setting, + text_only_progress_bar: progress_bar, + npm_cache_dir, + maybe_lockfile: None, + maybe_node_modules_path: None, + npm_system_info: Default::default(), npmrc: create_default_npmrc(), lifecycle_scripts: Default::default(), }, @@ -724,10 +772,11 @@ pub async fn run(data: StandaloneData) -> Result { let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let node_resolver = Arc::new(NodeResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), in_npm_pkg_checker.clone(), + RealIsBuiltInNodeModuleChecker, npm_resolver.clone().into_npm_pkg_folder_resolver(), pkg_json_resolver.clone(), + sys.clone(), )); let cjs_tracker = Arc::new(CjsTracker::new( in_npm_pkg_checker.clone(), @@ -745,7 +794,7 @@ pub async fn run(data: StandaloneData) -> Result { let npm_req_resolver = Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), - fs: CliDenoResolverFs(fs.clone()), + sys: sys.clone(), in_npm_pkg_checker: in_npm_pkg_checker.clone(), node_resolver: node_resolver.clone(), npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), @@ -758,11 +807,11 @@ pub async fn run(data: StandaloneData) -> Result { ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), in_npm_pkg_checker, node_resolver.clone(), npm_resolver.clone().into_npm_pkg_folder_resolver(), pkg_json_resolver.clone(), + sys.clone(), )); let workspace_resolver = { let import_map = match metadata.workspace_resolver.import_map { @@ -841,6 +890,7 @@ pub async fn run(data: StandaloneData) -> Result { )), npm_resolver: npm_resolver.clone(), npm_req_resolver, + source_maps, vfs, workspace_resolver, }), @@ -864,7 +914,7 @@ pub async fn run(data: StandaloneData) -> Result { } let desc_parser = - Arc::new(RuntimePermissionDescriptorParser::new(fs.clone())); + Arc::new(RuntimePermissionDescriptorParser::new(sys.clone())); let permissions = Permissions::from_options(desc_parser.as_ref(), &permissions)?; PermissionsContainer::new(desc_parser, permissions) @@ -894,6 +944,7 @@ pub async fn run(data: StandaloneData) -> Result { root_cert_store_provider, permissions, StorageKeyResolver::empty(), + sys, crate::args::DenoSubcommand::Run(Default::default()), CliMainWorkerOptions { argv: metadata.argv, diff --git a/cli/standalone/serialization.rs b/cli/standalone/serialization.rs index 6062e21019..30802aa081 100644 --- a/cli/standalone/serialization.rs +++ b/cli/standalone/serialization.rs @@ -1,10 +1,13 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; +use std::cell::Cell; use std::collections::BTreeMap; use std::collections::HashMap; use std::io::Write; +use capacity_builder::BytesAppendable; +use deno_ast::swc::common::source_map; use deno_ast::MediaType; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -19,12 +22,15 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_semver::package::PackageReq; +use deno_semver::StackString; +use indexmap::IndexMap; use crate::standalone::virtual_fs::VirtualDirectory; use super::binary::Metadata; use super::virtual_fs::BuiltVfs; use super::virtual_fs::VfsBuilder; +use super::virtual_fs::VirtualDirectoryEntries; const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; @@ -32,60 +38,64 @@ const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; /// * d3n0l4nd /// * /// * -/// * +/// * /// * /// * +/// * /// * d3n0l4nd pub fn serialize_binary_data_section( metadata: &Metadata, npm_snapshot: Option, remote_modules: &RemoteModulesStoreBuilder, + source_map_store: &SourceMapStore, vfs: &BuiltVfs, ) -> Result, AnyError> { - fn write_bytes_with_len(bytes: &mut Vec, data: &[u8]) { - bytes.extend_from_slice(&(data.len() as u64).to_le_bytes()); - bytes.extend_from_slice(data); - } + let metadata = serde_json::to_string(metadata)?; + let npm_snapshot = + npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); + let serialized_vfs = serde_json::to_string(&vfs.entries)?; - let mut bytes = Vec::new(); - bytes.extend_from_slice(MAGIC_BYTES); - - // 1. Metadata - { - let metadata = serde_json::to_string(metadata)?; - write_bytes_with_len(&mut bytes, metadata.as_bytes()); - } - // 2. Npm snapshot - { - let npm_snapshot = - npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); - write_bytes_with_len(&mut bytes, &npm_snapshot); - } - // 3. Remote modules - { - let update_index = bytes.len(); - bytes.extend_from_slice(&(0_u64).to_le_bytes()); - let start_index = bytes.len(); - remote_modules.write(&mut bytes)?; - let length = bytes.len() - start_index; - let length_bytes = (length as u64).to_le_bytes(); - bytes[update_index..update_index + length_bytes.len()] - .copy_from_slice(&length_bytes); - } - // 4. VFS - { - let serialized_vfs = serde_json::to_string(&vfs.root)?; - write_bytes_with_len(&mut bytes, serialized_vfs.as_bytes()); - let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::(); - bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes()); - for file in &vfs.files { - bytes.extend_from_slice(file); + let bytes = capacity_builder::BytesBuilder::build(|builder| { + builder.append(MAGIC_BYTES); + // 1. Metadata + { + builder.append_le(metadata.len() as u64); + builder.append(&metadata); + } + // 2. Npm snapshot + { + builder.append_le(npm_snapshot.len() as u64); + builder.append(&npm_snapshot); + } + // 3. Remote modules + { + remote_modules.write(builder); + } + // 4. VFS + { + builder.append_le(serialized_vfs.len() as u64); + builder.append(&serialized_vfs); + let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::(); + builder.append_le(vfs_bytes_len); + for file in &vfs.files { + builder.append(file); + } + } + // 5. Source maps + { + builder.append_le(source_map_store.data.len() as u32); + for (specifier, source_map) in &source_map_store.data { + builder.append_le(specifier.len() as u32); + builder.append(specifier); + builder.append_le(source_map.len() as u32); + builder.append(source_map.as_ref()); + } } - } - // write the magic bytes at the end so we can use it - // to make sure we've deserialized correctly - bytes.extend_from_slice(MAGIC_BYTES); + // write the magic bytes at the end so we can use it + // to make sure we've deserialized correctly + builder.append(MAGIC_BYTES); + })?; Ok(bytes) } @@ -94,19 +104,14 @@ pub struct DeserializedDataSection { pub metadata: Metadata, pub npm_snapshot: Option, pub remote_modules: RemoteModulesStore, - pub vfs_dir: VirtualDirectory, + pub source_maps: SourceMapStore, + pub vfs_root_entries: VirtualDirectoryEntries, pub vfs_files_data: &'static [u8], } pub fn deserialize_binary_data_section( data: &'static [u8], ) -> Result, AnyError> { - fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { - let (input, len) = read_u64(input)?; - let (input, data) = read_bytes(input, len as usize)?; - Ok((input, data)) - } - fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> { if input.len() < MAGIC_BYTES.len() { bail!("Unexpected end of data. Could not find magic bytes."); @@ -118,34 +123,51 @@ pub fn deserialize_binary_data_section( Ok((input, true)) } + #[allow(clippy::type_complexity)] + fn read_source_map_entry( + input: &[u8], + ) -> Result<(&[u8], (Cow, &[u8])), AnyError> { + let (input, specifier) = read_string_lossy(input)?; + let (input, source_map) = read_bytes_with_u32_len(input)?; + Ok((input, (specifier, source_map))) + } + let (input, found) = read_magic_bytes(data)?; if !found { return Ok(None); } // 1. Metadata - let (input, data) = read_bytes_with_len(input).context("reading metadata")?; + let (input, data) = + read_bytes_with_u64_len(input).context("reading metadata")?; let metadata: Metadata = serde_json::from_slice(data).context("deserializing metadata")?; // 2. Npm snapshot let (input, data) = - read_bytes_with_len(input).context("reading npm snapshot")?; + read_bytes_with_u64_len(input).context("reading npm snapshot")?; let npm_snapshot = if data.is_empty() { None } else { Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?) }; // 3. Remote modules - let (input, data) = - read_bytes_with_len(input).context("reading remote modules data")?; - let remote_modules = - RemoteModulesStore::build(data).context("deserializing remote modules")?; + let (input, remote_modules) = + RemoteModulesStore::build(input).context("deserializing remote modules")?; // 4. VFS - let (input, data) = read_bytes_with_len(input).context("vfs")?; - let vfs_dir: VirtualDirectory = + let (input, data) = read_bytes_with_u64_len(input).context("vfs")?; + let vfs_root_entries: VirtualDirectoryEntries = serde_json::from_slice(data).context("deserializing vfs data")?; let (input, vfs_files_data) = - read_bytes_with_len(input).context("reading vfs files data")?; + read_bytes_with_u64_len(input).context("reading vfs files data")?; + // 5. Source maps + let (mut input, source_map_data_len) = read_u32_as_usize(input)?; + let mut source_maps = SourceMapStore::with_capacity(source_map_data_len); + for _ in 0..source_map_data_len { + let (current_input, (specifier, source_map)) = + read_source_map_entry(input)?; + input = current_input; + source_maps.add(specifier, Cow::Borrowed(source_map)); + } // finally ensure we read the magic bytes at the end let (_input, found) = read_magic_bytes(input)?; @@ -157,7 +179,8 @@ pub fn deserialize_binary_data_section( metadata, npm_snapshot, remote_modules, - vfs_dir, + source_maps, + vfs_root_entries, vfs_files_data, })) } @@ -165,19 +188,31 @@ pub fn deserialize_binary_data_section( #[derive(Default)] pub struct RemoteModulesStoreBuilder { specifiers: Vec<(String, u64)>, - data: Vec<(MediaType, Vec)>, + data: Vec<(MediaType, Vec, Option>)>, data_byte_len: u64, redirects: Vec<(String, String)>, redirects_len: u64, } impl RemoteModulesStoreBuilder { - pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec) { + pub fn add( + &mut self, + specifier: &Url, + media_type: MediaType, + data: Vec, + maybe_transpiled: Option>, + ) { log::debug!("Adding '{}' ({})", specifier, media_type); let specifier = specifier.to_string(); self.specifiers.push((specifier, self.data_byte_len)); - self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data - self.data.push((media_type, data)); + let maybe_transpiled_len = match &maybe_transpiled { + // data length (4 bytes), data + Some(data) => 4 + data.len() as u64, + None => 0, + }; + // media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length + self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len; + self.data.push((media_type, data, maybe_transpiled)); } pub fn add_redirects(&mut self, redirects: &BTreeMap) { @@ -191,26 +226,50 @@ impl RemoteModulesStoreBuilder { } } - fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> { - writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?; - writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?; + fn write<'a, TBytes: capacity_builder::BytesType>( + &'a self, + builder: &mut capacity_builder::BytesBuilder<'a, TBytes>, + ) { + builder.append_le(self.specifiers.len() as u32); + builder.append_le(self.redirects.len() as u32); for (specifier, offset) in &self.specifiers { - writer.write_all(&(specifier.len() as u32).to_le_bytes())?; - writer.write_all(specifier.as_bytes())?; - writer.write_all(&offset.to_le_bytes())?; + builder.append_le(specifier.len() as u32); + builder.append(specifier); + builder.append_le(*offset); } for (from, to) in &self.redirects { - writer.write_all(&(from.len() as u32).to_le_bytes())?; - writer.write_all(from.as_bytes())?; - writer.write_all(&(to.len() as u32).to_le_bytes())?; - writer.write_all(to.as_bytes())?; + builder.append_le(from.len() as u32); + builder.append(from); + builder.append_le(to.len() as u32); + builder.append(to); } - for (media_type, data) in &self.data { - writer.write_all(&[serialize_media_type(*media_type)])?; - writer.write_all(&(data.len() as u64).to_le_bytes())?; - writer.write_all(data)?; + builder.append_le( + self + .data + .iter() + .map(|(_, data, maybe_transpiled)| { + 1 + 4 + + (data.len() as u64) + + 1 + + match maybe_transpiled { + Some(transpiled) => 4 + (transpiled.len() as u64), + None => 0, + } + }) + .sum::(), + ); + for (media_type, data, maybe_transpiled) in &self.data { + builder.append(serialize_media_type(*media_type)); + builder.append_le(data.len() as u32); + builder.append(data); + if let Some(transpiled) = maybe_transpiled { + builder.append(1); + builder.append_le(transpiled.len() as u32); + builder.append(transpiled); + } else { + builder.append(0); + } } - Ok(()) } } @@ -238,6 +297,30 @@ impl DenoCompileModuleSource { } } +pub struct SourceMapStore { + data: IndexMap, Cow<'static, [u8]>>, +} + +impl SourceMapStore { + pub fn with_capacity(capacity: usize) -> Self { + Self { + data: IndexMap::with_capacity(capacity), + } + } + + pub fn add( + &mut self, + specifier: Cow<'static, str>, + source_map: Cow<'static, [u8]>, + ) { + self.data.insert(specifier, source_map); + } + + pub fn get(&self, specifier: &str) -> Option<&[u8]> { + self.data.get(specifier).map(|v| v.as_ref()) + } +} + pub struct DenoCompileModuleData<'a> { pub specifier: &'a Url, pub media_type: MediaType, @@ -284,6 +367,13 @@ impl<'a> DenoCompileModuleData<'a> { } } +pub struct RemoteModuleEntry<'a> { + pub specifier: &'a Url, + pub media_type: MediaType, + pub data: Cow<'static, [u8]>, + pub transpiled_data: Option>, +} + enum RemoteModulesStoreSpecifierValue { Data(usize), Redirect(Url), @@ -295,7 +385,7 @@ pub struct RemoteModulesStore { } impl RemoteModulesStore { - fn build(data: &'static [u8]) -> Result { + fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> { fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> { let (input, specifier) = read_string_lossy(input)?; let specifier = Url::parse(&specifier)?; @@ -338,12 +428,16 @@ impl RemoteModulesStore { Ok((input, specifiers)) } - let (files_data, specifiers) = read_headers(data)?; + let (input, specifiers) = read_headers(input)?; + let (input, files_data) = read_bytes_with_u64_len(input)?; - Ok(Self { - specifiers, - files_data, - }) + Ok(( + input, + Self { + specifiers, + files_data, + }, + )) } pub fn resolve_specifier<'a>( @@ -374,7 +468,7 @@ impl RemoteModulesStore { pub fn read<'a>( &'a self, original_specifier: &'a Url, - ) -> Result>, AnyError> { + ) -> Result>, AnyError> { let mut count = 0; let mut specifier = original_specifier; loop { @@ -390,12 +484,25 @@ impl RemoteModulesStore { let input = &self.files_data[*offset..]; let (input, media_type_byte) = read_bytes(input, 1)?; let media_type = deserialize_media_type(media_type_byte[0])?; - let (input, len) = read_u64(input)?; - let (_input, data) = read_bytes(input, len as usize)?; - return Ok(Some(DenoCompileModuleData { + let (input, data) = read_bytes_with_u32_len(input)?; + check_has_len(input, 1)?; + let (input, has_transpiled) = (&input[1..], input[0]); + let (_, transpiled_data) = match has_transpiled { + 0 => (input, None), + 1 => { + let (input, data) = read_bytes_with_u32_len(input)?; + (input, Some(data)) + } + value => bail!( + "Invalid transpiled data flag: {}. Compiled data is corrupt.", + value + ), + }; + return Ok(Some(RemoteModuleEntry { specifier, media_type, data: Cow::Borrowed(data), + transpiled_data: transpiled_data.map(Cow::Borrowed), })); } None => { @@ -479,12 +586,13 @@ fn deserialize_npm_snapshot( #[allow(clippy::needless_lifetimes)] // clippy bug fn parse_package_dep<'a>( id_to_npm_id: &'a impl Fn(usize) -> Result, - ) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a + ) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a { |input| { let (input, req) = read_string_lossy(input)?; let (input, id) = read_u32_as_usize(input)?; - Ok((input, (req.into_owned(), id_to_npm_id(id)?))) + let req = StackString::from_cow(req); + Ok((input, (req, id_to_npm_id(id)?))) } } @@ -634,17 +742,34 @@ fn parse_vec_n_times_with_index( Ok((input, results)) } +fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { + let (input, len) = read_u64(input)?; + let (input, data) = read_bytes(input, len as usize)?; + Ok((input, data)) +} + +fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { + let (input, len) = read_u32_as_usize(input)?; + let (input, data) = read_bytes(input, len)?; + Ok((input, data)) +} + fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> { - if input.len() < len { - bail!("Unexpected end of data.",); - } + check_has_len(input, len)?; let (len_bytes, input) = input.split_at(len); Ok((input, len_bytes)) } +#[inline(always)] +fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> { + if input.len() < len { + bail!("Unexpected end of data."); + } + Ok(()) +} + fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow), AnyError> { - let (input, str_len) = read_u32_as_usize(input)?; - let (input, data_bytes) = read_bytes(input, str_len)?; + let (input, data_bytes) = read_bytes_with_u32_len(input)?; Ok((input, String::from_utf8_lossy(data_bytes))) } diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs index 04e66d680e..370d07a488 100644 --- a/cli/standalone/virtual_fs.rs +++ b/cli/standalone/virtual_fs.rs @@ -51,8 +51,16 @@ pub enum WindowsSystemRootablePath { impl WindowsSystemRootablePath { pub fn join(&self, name_component: &str) -> PathBuf { // this method doesn't handle multiple components - debug_assert!(!name_component.contains('\\')); - debug_assert!(!name_component.contains('/')); + debug_assert!( + !name_component.contains('\\'), + "Invalid component: {}", + name_component + ); + debug_assert!( + !name_component.contains('/'), + "Invalid component: {}", + name_component + ); match self { WindowsSystemRootablePath::WindowSystemRoot => { @@ -67,7 +75,7 @@ impl WindowsSystemRootablePath { #[derive(Debug)] pub struct BuiltVfs { pub root_path: WindowsSystemRootablePath, - pub root: VirtualDirectory, + pub entries: VirtualDirectoryEntries, pub files: Vec>, } @@ -95,7 +103,7 @@ impl VfsBuilder { Self { executable_root: VirtualDirectory { name: "/".to_string(), - entries: Vec::new(), + entries: Default::default(), }, files: Vec::new(), current_offset: 0, @@ -208,23 +216,20 @@ impl VfsBuilder { continue; } let name = component.as_os_str().to_string_lossy(); - let index = match current_dir - .entries - .binary_search_by(|e| e.name().cmp(&name)) - { + let index = match current_dir.entries.binary_search(&name) { Ok(index) => index, Err(insert_index) => { - current_dir.entries.insert( + current_dir.entries.0.insert( insert_index, VfsEntry::Dir(VirtualDirectory { name: name.to_string(), - entries: Vec::new(), + entries: Default::default(), }), ); insert_index } }; - match &mut current_dir.entries[index] { + match &mut current_dir.entries.0[index] { VfsEntry::Dir(dir) => { current_dir = dir; } @@ -248,14 +253,8 @@ impl VfsBuilder { continue; } let name = component.as_os_str().to_string_lossy(); - let index = match current_dir - .entries - .binary_search_by(|e| e.name().cmp(&name)) - { - Ok(index) => index, - Err(_) => return None, - }; - match &mut current_dir.entries[index] { + let entry = current_dir.entries.get_mut_by_name(&name)?; + match entry { VfsEntry::Dir(dir) => { current_dir = dir; } @@ -320,9 +319,9 @@ impl VfsBuilder { offset, len: data.len() as u64, }; - match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { + match dir.entries.binary_search(&name) { Ok(index) => { - let entry = &mut dir.entries[index]; + let entry = &mut dir.entries.0[index]; match entry { VfsEntry::File(virtual_file) => match sub_data_kind { VfsFileSubDataKind::Raw => { @@ -336,7 +335,7 @@ impl VfsBuilder { } } Err(insert_index) => { - dir.entries.insert( + dir.entries.0.insert( insert_index, VfsEntry::File(VirtualFile { name: name.to_string(), @@ -384,10 +383,10 @@ impl VfsBuilder { let target = normalize_path(path.parent().unwrap().join(&target)); let dir = self.add_dir_raw(path.parent().unwrap()); let name = path.file_name().unwrap().to_string_lossy(); - match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { + match dir.entries.binary_search(&name) { Ok(_) => {} // previously inserted Err(insert_index) => { - dir.entries.insert( + dir.entries.0.insert( insert_index, VfsEntry::Symlink(VirtualSymlink { name: name.to_string(), @@ -426,7 +425,7 @@ impl VfsBuilder { dir: &mut VirtualDirectory, parts: &[String], ) { - for entry in &mut dir.entries { + for entry in &mut dir.entries.0 { match entry { VfsEntry::Dir(dir) => { strip_prefix_from_symlinks(dir, parts); @@ -454,13 +453,13 @@ impl VfsBuilder { if self.min_root_dir.as_ref() == Some(¤t_path) { break; } - match ¤t_dir.entries[0] { + match ¤t_dir.entries.0[0] { VfsEntry::Dir(dir) => { if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { // special directory we want to maintain break; } - match current_dir.entries.remove(0) { + match current_dir.entries.0.remove(0) { VfsEntry::Dir(dir) => { current_path = WindowsSystemRootablePath::Path(current_path.join(&dir.name)); @@ -480,7 +479,7 @@ impl VfsBuilder { } BuiltVfs { root_path: current_path, - root: current_dir, + entries: current_dir.entries, files: self.files, } } @@ -506,7 +505,7 @@ pub fn output_vfs(vfs: &BuiltVfs, executable_name: &str) { return; // no need to compute if won't output } - if vfs.root.entries.is_empty() { + if vfs.entries.is_empty() { return; // nothing to output } @@ -696,7 +695,7 @@ fn vfs_as_display_tree( fn dir_size(dir: &VirtualDirectory, seen_offsets: &mut HashSet) -> Size { let mut size = Size::default(); - for entry in &dir.entries { + for entry in dir.entries.iter() { match entry { VfsEntry::Dir(virtual_directory) => { size = size + dir_size(virtual_directory, seen_offsets); @@ -760,15 +759,10 @@ fn vfs_as_display_tree( fn include_all_entries<'a>( dir_path: &WindowsSystemRootablePath, - vfs_dir: &'a VirtualDirectory, + entries: &'a VirtualDirectoryEntries, seen_offsets: &mut HashSet, ) -> Vec> { - if vfs_dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { - return show_global_node_modules_dir(vfs_dir, seen_offsets); - } - - vfs_dir - .entries + entries .iter() .map(|entry| DirEntryOutput { name: Cow::Borrowed(entry.name()), @@ -826,10 +820,12 @@ fn vfs_as_display_tree( } else { EntryOutput::Subset(children) } + } else if vfs_dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { + EntryOutput::Subset(show_global_node_modules_dir(vfs_dir, seen_offsets)) } else { EntryOutput::Subset(include_all_entries( &WindowsSystemRootablePath::Path(dir), - vfs_dir, + &vfs_dir.entries, seen_offsets, )) } @@ -839,7 +835,7 @@ fn vfs_as_display_tree( // user might not have context about what's being shown let mut seen_offsets = HashSet::with_capacity(vfs.files.len()); let mut child_entries = - include_all_entries(&vfs.root_path, &vfs.root, &mut seen_offsets); + include_all_entries(&vfs.root_path, &vfs.entries, &mut seen_offsets); for child_entry in &mut child_entries { child_entry.collapse_leaf_nodes(); } @@ -859,78 +855,28 @@ enum VfsEntryRef<'a> { Symlink(&'a VirtualSymlink), } -impl<'a> VfsEntryRef<'a> { - pub fn as_fs_stat(&self) -> FsStat { +impl VfsEntryRef<'_> { + pub fn as_metadata(&self) -> FileBackedVfsMetadata { + FileBackedVfsMetadata { + file_type: match self { + Self::Dir(_) => sys_traits::FileType::Dir, + Self::File(_) => sys_traits::FileType::File, + Self::Symlink(_) => sys_traits::FileType::Symlink, + }, + name: self.name().to_string(), + len: match self { + Self::Dir(_) => 0, + Self::File(file) => file.offset.len, + Self::Symlink(_) => 0, + }, + } + } + + pub fn name(&self) -> &str { match self { - VfsEntryRef::Dir(_) => FsStat { - is_directory: true, - is_file: false, - is_symlink: false, - atime: None, - birthtime: None, - mtime: None, - ctime: None, - blksize: 0, - size: 0, - dev: 0, - ino: 0, - mode: 0, - nlink: 0, - uid: 0, - gid: 0, - rdev: 0, - blocks: 0, - is_block_device: false, - is_char_device: false, - is_fifo: false, - is_socket: false, - }, - VfsEntryRef::File(file) => FsStat { - is_directory: false, - is_file: true, - is_symlink: false, - atime: None, - birthtime: None, - mtime: None, - ctime: None, - blksize: 0, - size: file.offset.len, - dev: 0, - ino: 0, - mode: 0, - nlink: 0, - uid: 0, - gid: 0, - rdev: 0, - blocks: 0, - is_block_device: false, - is_char_device: false, - is_fifo: false, - is_socket: false, - }, - VfsEntryRef::Symlink(_) => FsStat { - is_directory: false, - is_file: false, - is_symlink: true, - atime: None, - birthtime: None, - mtime: None, - ctime: None, - blksize: 0, - size: 0, - dev: 0, - ino: 0, - mode: 0, - nlink: 0, - uid: 0, - gid: 0, - rdev: 0, - blocks: 0, - is_block_device: false, - is_char_device: false, - is_fifo: false, - is_socket: false, - }, + Self::Dir(dir) => &dir.name, + Self::File(file) => &file.name, + Self::Symlink(symlink) => &symlink.name, } } } @@ -946,9 +892,9 @@ pub enum VfsEntry { impl VfsEntry { pub fn name(&self) -> &str { match self { - VfsEntry::Dir(dir) => &dir.name, - VfsEntry::File(file) => &file.name, - VfsEntry::Symlink(symlink) => &symlink.name, + Self::Dir(dir) => &dir.name, + Self::File(file) => &file.name, + Self::Symlink(symlink) => &symlink.name, } } @@ -961,27 +907,70 @@ impl VfsEntry { } } +#[derive(Debug, Default, Serialize, Deserialize)] +pub struct VirtualDirectoryEntries(Vec); + +impl VirtualDirectoryEntries { + pub fn new(mut entries: Vec) -> Self { + // needs to be sorted by name + entries.sort_by(|a, b| a.name().cmp(b.name())); + Self(entries) + } + + pub fn take_inner(&mut self) -> Vec { + std::mem::take(&mut self.0) + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn get_by_name(&self, name: &str) -> Option<&VfsEntry> { + self.binary_search(name).ok().map(|index| &self.0[index]) + } + + pub fn get_mut_by_name(&mut self, name: &str) -> Option<&mut VfsEntry> { + self + .binary_search(name) + .ok() + .map(|index| &mut self.0[index]) + } + + pub fn binary_search(&self, name: &str) -> Result { + self.0.binary_search_by(|e| e.name().cmp(name)) + } + + pub fn insert(&mut self, entry: VfsEntry) { + match self.binary_search(entry.name()) { + Ok(index) => { + self.0[index] = entry; + } + Err(insert_index) => { + self.0.insert(insert_index, entry); + } + } + } + + pub fn remove(&mut self, index: usize) -> VfsEntry { + self.0.remove(index) + } + + pub fn iter(&self) -> std::slice::Iter<'_, VfsEntry> { + self.0.iter() + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct VirtualDirectory { #[serde(rename = "n")] pub name: String, // should be sorted by name #[serde(rename = "e")] - pub entries: Vec, -} - -impl VirtualDirectory { - pub fn insert_entry(&mut self, entry: VfsEntry) { - let name = entry.name(); - match self.entries.binary_search_by(|e| e.name().cmp(name)) { - Ok(index) => { - self.entries[index] = entry; - } - Err(insert_index) => { - self.entries.insert(insert_index, entry); - } - } - } + pub entries: VirtualDirectoryEntries, } #[derive(Debug, Clone, Copy, Serialize, Deserialize)] @@ -1136,34 +1125,27 @@ impl VfsRoot { } }; let component = component.to_string_lossy(); - match current_dir + current_entry = current_dir .entries - .binary_search_by(|e| e.name().cmp(&component)) - { - Ok(index) => { - current_entry = current_dir.entries[index].as_ref(); - } - Err(_) => { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - "path not found", - )); - } - } + .get_by_name(&component) + .ok_or_else(|| { + std::io::Error::new(std::io::ErrorKind::NotFound, "path not found") + })? + .as_ref(); } Ok((final_path, current_entry)) } } -struct FileBackedVfsFile { +pub struct FileBackedVfsFile { file: VirtualFile, pos: RefCell, vfs: Arc, } impl FileBackedVfsFile { - fn seek(&self, pos: SeekFrom) -> FsResult { + pub fn seek(&self, pos: SeekFrom) -> std::io::Result { match pos { SeekFrom::Start(pos) => { *self.pos.borrow_mut() = pos; @@ -1172,10 +1154,10 @@ impl FileBackedVfsFile { SeekFrom::End(offset) => { if offset < 0 && -offset as u64 > self.file.offset.len { let msg = "An attempt was made to move the file pointer before the beginning of the file."; - Err( - std::io::Error::new(std::io::ErrorKind::PermissionDenied, msg) - .into(), - ) + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + msg, + )) } else { let mut current_pos = self.pos.borrow_mut(); *current_pos = if offset >= 0 { @@ -1191,7 +1173,7 @@ impl FileBackedVfsFile { if offset >= 0 { *current_pos += offset as u64; } else if -offset as u64 > *current_pos { - return Err(std::io::Error::new(std::io::ErrorKind::PermissionDenied, "An attempt was made to move the file pointer before the beginning of the file.").into()); + return Err(std::io::Error::new(std::io::ErrorKind::PermissionDenied, "An attempt was made to move the file pointer before the beginning of the file.")); } else { *current_pos -= -offset as u64; } @@ -1200,7 +1182,7 @@ impl FileBackedVfsFile { } } - fn read_to_buf(&self, buf: &mut [u8]) -> FsResult { + pub fn read_to_buf(&self, buf: &mut [u8]) -> std::io::Result { let read_pos = { let mut pos = self.pos.borrow_mut(); let read_pos = *pos; @@ -1208,10 +1190,7 @@ impl FileBackedVfsFile { *pos = std::cmp::min(self.file.offset.len, *pos + buf.len() as u64); read_pos }; - self - .vfs - .read_file(&self.file, read_pos, buf) - .map_err(|err| err.into()) + self.vfs.read_file(&self.file, read_pos, buf) } fn read_to_end(&self) -> FsResult> { @@ -1246,7 +1225,7 @@ impl FileBackedVfsFile { #[async_trait::async_trait(?Send)] impl deno_io::fs::File for FileBackedVfsFile { fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult { - self.read_to_buf(buf) + self.read_to_buf(buf).map_err(Into::into) } async fn read_byob( self: Rc, @@ -1290,10 +1269,10 @@ impl deno_io::fs::File for FileBackedVfsFile { } fn seek_sync(self: Rc, pos: SeekFrom) -> FsResult { - self.seek(pos) + self.seek(pos).map_err(|err| err.into()) } async fn seek_async(self: Rc, pos: SeekFrom) -> FsResult { - self.seek(pos) + self.seek(pos).map_err(|err| err.into()) } fn datasync_sync(self: Rc) -> FsResult<()> { @@ -1369,6 +1348,47 @@ impl deno_io::fs::File for FileBackedVfsFile { } } +#[derive(Debug, Clone)] +pub struct FileBackedVfsDirEntry { + pub parent_path: PathBuf, + pub metadata: FileBackedVfsMetadata, +} + +#[derive(Debug, Clone)] +pub struct FileBackedVfsMetadata { + pub name: String, + pub file_type: sys_traits::FileType, + pub len: u64, +} + +impl FileBackedVfsMetadata { + pub fn as_fs_stat(&self) -> FsStat { + FsStat { + is_directory: self.file_type == sys_traits::FileType::Dir, + is_file: self.file_type == sys_traits::FileType::File, + is_symlink: self.file_type == sys_traits::FileType::Symlink, + atime: None, + birthtime: None, + mtime: None, + ctime: None, + blksize: 0, + size: self.len, + dev: 0, + ino: 0, + mode: 0, + nlink: 0, + uid: 0, + gid: 0, + rdev: 0, + blocks: 0, + is_block_device: false, + is_char_device: false, + is_fifo: false, + is_socket: false, + } + } +} + #[derive(Debug)] pub struct FileBackedVfs { vfs_data: Cow<'static, [u8]>, @@ -1394,13 +1414,13 @@ impl FileBackedVfs { pub fn open_file( self: &Arc, path: &Path, - ) -> std::io::Result> { + ) -> std::io::Result { let file = self.file_entry(path)?; - Ok(Rc::new(FileBackedVfsFile { + Ok(FileBackedVfsFile { file: file.clone(), vfs: self.clone(), pos: Default::default(), - })) + }) } pub fn read_dir(&self, path: &Path) -> std::io::Result> { @@ -1419,6 +1439,18 @@ impl FileBackedVfs { ) } + pub fn read_dir_with_metadata<'a>( + &'a self, + path: &Path, + ) -> std::io::Result + 'a> { + let dir = self.dir_entry(path)?; + let path = path.to_path_buf(); + Ok(dir.entries.iter().map(move |entry| FileBackedVfsDirEntry { + parent_path: path.to_path_buf(), + metadata: entry.as_ref().as_metadata(), + })) + } + pub fn read_link(&self, path: &Path) -> std::io::Result { let (_, entry) = self.fs_root.find_entry_no_follow(path)?; match entry { @@ -1432,14 +1464,14 @@ impl FileBackedVfs { } } - pub fn lstat(&self, path: &Path) -> std::io::Result { + pub fn lstat(&self, path: &Path) -> std::io::Result { let (_, entry) = self.fs_root.find_entry_no_follow(path)?; - Ok(entry.as_fs_stat()) + Ok(entry.as_metadata()) } - pub fn stat(&self, path: &Path) -> std::io::Result { + pub fn stat(&self, path: &Path) -> std::io::Result { let (_, entry) = self.fs_root.find_entry(path)?; - Ok(entry.as_fs_stat()) + Ok(entry.as_metadata()) } pub fn canonicalize(&self, path: &Path) -> std::io::Result { @@ -1532,6 +1564,7 @@ impl FileBackedVfs { #[cfg(test)] mod test { use console_static_text::ansi::strip_ansi_codes; + use deno_io::fs::File; use std::io::Write; use test_util::assert_contains; use test_util::TempDir; @@ -1617,25 +1650,31 @@ mod test { ); // metadata - assert!( + assert_eq!( virtual_fs .lstat(&dest_path.join("sub_dir").join("e.txt")) .unwrap() - .is_symlink + .file_type, + sys_traits::FileType::Symlink, ); - assert!( + assert_eq!( virtual_fs .stat(&dest_path.join("sub_dir").join("e.txt")) .unwrap() - .is_file + .file_type, + sys_traits::FileType::File, ); - assert!( + assert_eq!( virtual_fs .stat(&dest_path.join("sub_dir")) .unwrap() - .is_directory, + .file_type, + sys_traits::FileType::Dir, + ); + assert_eq!( + virtual_fs.stat(&dest_path.join("e.txt")).unwrap().file_type, + sys_traits::FileType::File ); - assert!(virtual_fs.stat(&dest_path.join("e.txt")).unwrap().is_file,); } #[test] @@ -1672,11 +1711,12 @@ mod test { read_file(&virtual_fs, &dest_path.join("sub_dir_link").join("c.txt")), "c", ); - assert!( + assert_eq!( virtual_fs .lstat(&dest_path.join("sub_dir_link")) .unwrap() - .is_symlink + .file_type, + sys_traits::FileType::Symlink, ); assert_eq!( @@ -1706,7 +1746,10 @@ mod test { FileBackedVfs::new( Cow::Owned(data), VfsRoot { - dir: vfs.root, + dir: VirtualDirectory { + name: "".to_string(), + entries: vfs.entries, + }, root_path: dest_path.to_path_buf(), start_file_offset: 0, }, @@ -1745,37 +1788,35 @@ mod test { let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); let virtual_fs = Arc::new(virtual_fs); let file = virtual_fs.open_file(&dest_path.join("a.txt")).unwrap(); - file.clone().seek_sync(SeekFrom::Current(2)).unwrap(); + file.seek(SeekFrom::Current(2)).unwrap(); let mut buf = vec![0; 2]; - file.clone().read_sync(&mut buf).unwrap(); + file.read_to_buf(&mut buf).unwrap(); assert_eq!(buf, b"23"); - file.clone().read_sync(&mut buf).unwrap(); + file.read_to_buf(&mut buf).unwrap(); assert_eq!(buf, b"45"); - file.clone().seek_sync(SeekFrom::Current(-4)).unwrap(); - file.clone().read_sync(&mut buf).unwrap(); + file.seek(SeekFrom::Current(-4)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); assert_eq!(buf, b"23"); - file.clone().seek_sync(SeekFrom::Start(2)).unwrap(); - file.clone().read_sync(&mut buf).unwrap(); + file.seek(SeekFrom::Start(2)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); assert_eq!(buf, b"23"); - file.clone().seek_sync(SeekFrom::End(2)).unwrap(); - file.clone().read_sync(&mut buf).unwrap(); + file.seek(SeekFrom::End(2)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); assert_eq!(buf, b"89"); - file.clone().seek_sync(SeekFrom::Current(-8)).unwrap(); - file.clone().read_sync(&mut buf).unwrap(); + file.seek(SeekFrom::Current(-8)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); assert_eq!(buf, b"23"); assert_eq!( file - .clone() - .seek_sync(SeekFrom::Current(-5)) - .err() - .unwrap() - .into_io_error() + .seek(SeekFrom::Current(-5)) + .unwrap_err() .to_string(), "An attempt was made to move the file pointer before the beginning of the file." ); // go beyond the file length, then back - file.clone().seek_sync(SeekFrom::Current(40)).unwrap(); - file.clone().seek_sync(SeekFrom::Current(-38)).unwrap(); + file.seek(SeekFrom::Current(40)).unwrap(); + file.seek(SeekFrom::Current(-38)).unwrap(); + let file = Rc::new(file); let read_buf = file.clone().read(2).await.unwrap(); assert_eq!(read_buf.to_vec(), b"67"); file.clone().seek_sync(SeekFrom::Current(-2)).unwrap(); diff --git a/cli/sys.rs b/cli/sys.rs new file mode 100644 index 0000000000..55b50a199d --- /dev/null +++ b/cli/sys.rs @@ -0,0 +1,218 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +// todo(dsherret): this should instead use conditional compilation and directly +// surface the underlying implementation. +// +// The problem atm is that there's no way to have conditional compilation for +// denort or the deno binary. We should extract out denort to a separate binary. + +use std::borrow::Cow; + +use sys_traits::boxed::BoxedFsDirEntry; +use sys_traits::boxed::BoxedFsFile; +use sys_traits::boxed::BoxedFsMetadataValue; +use sys_traits::boxed::FsMetadataBoxed; +use sys_traits::boxed::FsOpenBoxed; +use sys_traits::boxed::FsReadDirBoxed; +use sys_traits::CreateDirOptions; + +use crate::standalone::DenoCompileFileSystem; + +#[derive(Debug, Clone)] +pub enum CliSys { + #[allow(dead_code)] // will be dead code for denort + #[allow(clippy::disallowed_types)] // ok because sys impl + Real(sys_traits::impls::RealSys), + #[allow(dead_code)] // will be dead code for deno + DenoCompile(DenoCompileFileSystem), +} + +impl Default for CliSys { + fn default() -> Self { + Self::Real(sys_traits::impls::RealSys) + } +} + +impl deno_runtime::deno_node::ExtNodeSys for CliSys {} + +impl sys_traits::BaseFsHardLink for CliSys { + fn base_fs_hard_link( + &self, + src: &std::path::Path, + dst: &std::path::Path, + ) -> std::io::Result<()> { + match self { + Self::Real(sys) => sys.base_fs_hard_link(src, dst), + Self::DenoCompile(sys) => sys.base_fs_hard_link(src, dst), + } + } +} + +impl sys_traits::BaseFsRead for CliSys { + fn base_fs_read( + &self, + p: &std::path::Path, + ) -> std::io::Result> { + match self { + Self::Real(sys) => sys.base_fs_read(p), + Self::DenoCompile(sys) => sys.base_fs_read(p), + } + } +} + +impl sys_traits::BaseFsReadDir for CliSys { + type ReadDirEntry = BoxedFsDirEntry; + + fn base_fs_read_dir( + &self, + p: &std::path::Path, + ) -> std::io::Result< + Box> + '_>, + > { + match self { + Self::Real(sys) => sys.fs_read_dir_boxed(p), + Self::DenoCompile(sys) => sys.fs_read_dir_boxed(p), + } + } +} + +impl sys_traits::BaseFsCanonicalize for CliSys { + fn base_fs_canonicalize( + &self, + p: &std::path::Path, + ) -> std::io::Result { + match self { + Self::Real(sys) => sys.base_fs_canonicalize(p), + Self::DenoCompile(sys) => sys.base_fs_canonicalize(p), + } + } +} + +impl sys_traits::BaseFsMetadata for CliSys { + type Metadata = BoxedFsMetadataValue; + + fn base_fs_metadata( + &self, + path: &std::path::Path, + ) -> std::io::Result { + match self { + Self::Real(sys) => sys.fs_metadata_boxed(path), + Self::DenoCompile(sys) => sys.fs_metadata_boxed(path), + } + } + + fn base_fs_symlink_metadata( + &self, + path: &std::path::Path, + ) -> std::io::Result { + match self { + Self::Real(sys) => sys.fs_symlink_metadata_boxed(path), + Self::DenoCompile(sys) => sys.fs_symlink_metadata_boxed(path), + } + } +} + +impl sys_traits::BaseFsCreateDir for CliSys { + fn base_fs_create_dir( + &self, + p: &std::path::Path, + options: &CreateDirOptions, + ) -> std::io::Result<()> { + match self { + Self::Real(sys) => sys.base_fs_create_dir(p, options), + Self::DenoCompile(sys) => sys.base_fs_create_dir(p, options), + } + } +} + +impl sys_traits::BaseFsOpen for CliSys { + type File = BoxedFsFile; + + fn base_fs_open( + &self, + path: &std::path::Path, + options: &sys_traits::OpenOptions, + ) -> std::io::Result { + match self { + Self::Real(sys) => sys.fs_open_boxed(path, options), + Self::DenoCompile(sys) => sys.fs_open_boxed(path, options), + } + } +} + +impl sys_traits::BaseFsRemoveFile for CliSys { + fn base_fs_remove_file(&self, p: &std::path::Path) -> std::io::Result<()> { + match self { + Self::Real(sys) => sys.base_fs_remove_file(p), + Self::DenoCompile(sys) => sys.base_fs_remove_file(p), + } + } +} + +impl sys_traits::BaseFsRename for CliSys { + fn base_fs_rename( + &self, + old: &std::path::Path, + new: &std::path::Path, + ) -> std::io::Result<()> { + match self { + Self::Real(sys) => sys.base_fs_rename(old, new), + Self::DenoCompile(sys) => sys.base_fs_rename(old, new), + } + } +} + +impl sys_traits::SystemRandom for CliSys { + fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> { + match self { + Self::Real(sys) => sys.sys_random(buf), + Self::DenoCompile(sys) => sys.sys_random(buf), + } + } +} + +impl sys_traits::SystemTimeNow for CliSys { + fn sys_time_now(&self) -> std::time::SystemTime { + match self { + Self::Real(sys) => sys.sys_time_now(), + Self::DenoCompile(sys) => sys.sys_time_now(), + } + } +} + +impl sys_traits::ThreadSleep for CliSys { + fn thread_sleep(&self, dur: std::time::Duration) { + match self { + Self::Real(sys) => sys.thread_sleep(dur), + Self::DenoCompile(sys) => sys.thread_sleep(dur), + } + } +} + +impl sys_traits::EnvCurrentDir for CliSys { + fn env_current_dir(&self) -> std::io::Result { + match self { + Self::Real(sys) => sys.env_current_dir(), + Self::DenoCompile(sys) => sys.env_current_dir(), + } + } +} + +impl sys_traits::BaseEnvVar for CliSys { + fn base_env_var_os( + &self, + key: &std::ffi::OsStr, + ) -> Option { + match self { + Self::Real(sys) => sys.base_env_var_os(key), + Self::DenoCompile(sys) => sys.base_env_var_os(key), + } + } +} + +impl sys_traits::EnvHomeDir for CliSys { + fn env_home_dir(&self) -> Option { + #[allow(clippy::disallowed_types)] // ok because sys impl + sys_traits::impls::RealSys.env_home_dir() + } +} diff --git a/cli/task_runner.rs b/cli/task_runner.rs index d6589a1832..c7232387ca 100644 --- a/cli/task_runner.rs +++ b/cli/task_runner.rs @@ -10,7 +10,6 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::future::LocalBoxFuture; -use deno_runtime::deno_node::NodeResolver; use deno_semver::package::PackageNv; use deno_task_shell::ExecutableCommand; use deno_task_shell::ExecuteResult; @@ -25,6 +24,7 @@ use tokio::task::JoinHandle; use tokio::task::LocalSet; use tokio_util::sync::CancellationToken; +use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; use crate::npm::ManagedCliNpmResolver; @@ -415,7 +415,7 @@ impl ShellCommand for NodeModulesFileRunCommand { pub fn resolve_custom_commands( npm_resolver: &dyn CliNpmResolver, - node_resolver: &NodeResolver, + node_resolver: &CliNodeResolver, ) -> Result>, AnyError> { let mut commands = match npm_resolver.as_inner() { InnerCliNpmResolverRef::Byonm(npm_resolver) => { @@ -522,7 +522,7 @@ fn resolve_execution_path_from_npx_shim( fn resolve_managed_npm_commands( npm_resolver: &ManagedCliNpmResolver, - node_resolver: &NodeResolver, + node_resolver: &CliNodeResolver, ) -> Result>, AnyError> { let mut result = HashMap::new(); let snapshot = npm_resolver.snapshot(); diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index 5983590531..1b47c9bfb0 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -7,6 +7,7 @@ use crate::display::write_json_to_stdout; use crate::factory::CliFactory; use crate::graph_util::has_graph_root_local_dependent_changed; use crate::ops; +use crate::sys::CliSys; use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; use crate::util::file_watcher; @@ -265,7 +266,7 @@ async fn bench_specifier_inner( async fn bench_specifiers( worker_factory: Arc, permissions: &Permissions, - permissions_desc_parser: &Arc, + permissions_desc_parser: &Arc>, specifiers: Vec, options: BenchSpecifierOptions, ) -> Result<(), AnyError> { diff --git a/cli/tools/check.rs b/cli/tools/check.rs index ad5c7c3ab1..acfff70401 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -9,7 +9,6 @@ use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_graph::Module; use deno_graph::ModuleGraph; -use deno_runtime::deno_node::NodeResolver; use deno_terminal::colors; use once_cell::sync::Lazy; use regex::Regex; @@ -29,6 +28,7 @@ use crate::cache::TypeCheckCache; use crate::factory::CliFactory; use crate::graph_util::BuildFastCheckGraphOptions; use crate::graph_util::ModuleGraphBuilder; +use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::tsc; use crate::tsc::Diagnostics; @@ -64,7 +64,7 @@ pub async fn check( let file = file_fetcher.fetch(&s, root_permissions).await?; let snippet_files = extract::extract_snippet_files(file)?; for snippet_file in snippet_files { - specifiers_for_typecheck.push(snippet_file.specifier.clone()); + specifiers_for_typecheck.push(snippet_file.url.clone()); file_fetcher.insert_memory_files(snippet_file); } } @@ -103,7 +103,7 @@ pub struct TypeChecker { cjs_tracker: Arc, cli_options: Arc, module_graph_builder: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, } @@ -113,7 +113,7 @@ impl TypeChecker { cjs_tracker: Arc, cli_options: Arc, module_graph_builder: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { diff --git a/cli/tools/clean.rs b/cli/tools/clean.rs index 2a77434f88..cdc1c51dcd 100644 --- a/cli/tools/clean.rs +++ b/cli/tools/clean.rs @@ -7,6 +7,7 @@ use std::path::Path; use crate::cache::DenoDir; use crate::colors; use crate::display; +use crate::sys::CliSys; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressMessagePrompt; @@ -28,7 +29,7 @@ impl CleanState { } pub fn clean() -> Result<(), AnyError> { - let deno_dir = DenoDir::new(None)?; + let deno_dir = DenoDir::new(CliSys::default(), None)?; if deno_dir.root.exists() { let no_of_files = walkdir::WalkDir::new(&deno_dir.root).into_iter().count(); let progress_bar = ProgressBar::new(ProgressBarStyle::ProgressBars); diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index 7a463a7b09..cbd376bae2 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -5,8 +5,8 @@ use crate::args::CompileFlags; use crate::args::Flags; use crate::factory::CliFactory; use crate::http_util::HttpClientProvider; +use crate::standalone::binary::is_standalone_binary; use crate::standalone::binary::WriteBinOptions; -use crate::standalone::is_standalone_binary; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 2a554c1335..a9054207b0 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -6,6 +6,8 @@ use crate::args::FileFlags; use crate::args::Flags; use crate::cdp; use crate::factory::CliFactory; +use crate::file_fetcher::TextDecodedFile; +use crate::sys::CliSys; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::text_encoding::source_map_from_code; @@ -197,7 +199,7 @@ pub struct CoverageReport { fn generate_coverage_report( script_coverage: &cdp::ScriptCoverage, script_source: String, - maybe_source_map: &Option>, + maybe_source_map: Option<&[u8]>, output: &Option, ) -> CoverageReport { let maybe_source_map = maybe_source_map @@ -427,7 +429,7 @@ fn collect_coverages( .ignore_git_folder() .ignore_node_modules() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, file_patterns)?; + .collect_file_patterns(&CliSys::default(), file_patterns)?; let coverage_patterns = FilePatterns { base: initial_cwd.to_path_buf(), @@ -559,6 +561,12 @@ pub fn cover_files( }, None => None, }; + let get_message = |specifier: &ModuleSpecifier| -> String { + format!( + "Failed to fetch \"{}\" from cache. Before generating coverage report, run `deno test --coverage` to ensure consistent state.", + specifier, + ) + }; for script_coverage in script_coverages { let module_specifier = deno_core::resolve_url_or_path( @@ -566,21 +574,14 @@ pub fn cover_files( cli_options.initial_cwd(), )?; - let maybe_file = if module_specifier.scheme() == "file" { - file_fetcher.get_source(&module_specifier) - } else { - file_fetcher - .fetch_cached(&module_specifier, 10) - .with_context(|| { - format!("Failed to fetch \"{module_specifier}\" from cache.") - })? + let maybe_file_result = file_fetcher + .get_cached_source_or_local(&module_specifier) + .map_err(AnyError::from); + let file = match maybe_file_result { + Ok(Some(file)) => TextDecodedFile::decode(file)?, + Ok(None) => return Err(anyhow!("{}", get_message(&module_specifier))), + Err(err) => return Err(err).context(get_message(&module_specifier)), }; - let file = maybe_file.ok_or_else(|| { - anyhow!("Failed to fetch \"{}\" from cache. - Before generating coverage report, run `deno test --coverage` to ensure consistent state.", - module_specifier - ) - })?.into_text_decoded()?; let original_source = file.source.clone(); // Check if file was transpiled @@ -625,7 +626,7 @@ pub fn cover_files( let coverage_report = generate_coverage_report( &script_coverage, runtime_code.as_str().to_owned(), - &source_map, + source_map.as_deref(), &out_mode, ); diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 647a36dc48..0ff1806a9e 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -10,6 +10,7 @@ use crate::factory::CliFactory; use crate::graph_util::graph_exit_integrity_errors; use crate::graph_util::graph_walk_errors; use crate::graph_util::GraphWalkErrorsOptions; +use crate::sys::CliSys; use crate::tsc::get_types_declaration_file_text; use crate::util::fs::collect_specifiers; use deno_ast::diagnostics::Diagnostic; @@ -114,7 +115,7 @@ pub async fn doc( } DocSourceFileFlag::Paths(ref source_files) => { let module_graph_creator = factory.module_graph_creator().await?; - let fs = factory.fs(); + let sys = CliSys::default(); let module_specifiers = collect_specifiers( FilePatterns { @@ -141,7 +142,7 @@ pub async fn doc( graph_exit_integrity_errors(&graph); let errors = graph_walk_errors( &graph, - fs, + &sys, &module_specifiers, GraphWalkErrorsOptions { check_js: false, @@ -343,14 +344,14 @@ impl deno_doc::html::HrefResolver for DocResolver { let name = &res.req().name; Some(( format!("https://www.npmjs.com/package/{name}"), - name.to_owned(), + name.to_string(), )) } "jsr" => { let res = deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?; let name = &res.req().name; - Some((format!("https://jsr.io/{name}"), name.to_owned())) + Some((format!("https://jsr.io/{name}"), name.to_string())) } _ => None, } diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index e29627345c..7f9a15f4b2 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -17,6 +17,7 @@ use crate::args::UnstableFmtOptions; use crate::cache::Caches; use crate::colors; use crate::factory::CliFactory; +use crate::sys::CliSys; use crate::util::diff::diff; use crate::util::file_watcher; use crate::util::fs::canonicalize_path; @@ -57,7 +58,7 @@ pub async fn format( fmt_flags: FmtFlags, ) -> Result<(), AnyError> { if fmt_flags.is_stdin() { - let cli_options = CliOptions::from_flags(flags)?; + let cli_options = CliOptions::from_flags(&CliSys::default(), flags)?; let start_dir = &cli_options.start_dir; let fmt_config = start_dir .to_fmt_config(FilePatterns::new_with_base(start_dir.dir_path()))?; @@ -230,7 +231,7 @@ fn collect_fmt_files( .ignore_node_modules() .use_gitignore() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) + .collect_file_patterns(&CliSys::default(), files) } /// Formats markdown (using ) and its code blocks diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 7a35f597c3..39a7a912bf 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -278,8 +278,10 @@ fn add_npm_packages_to_json( }); if let Some(pkg) = maybe_package { if let Some(module) = module.as_object_mut() { - module - .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); + module.insert( + "npmPackage".to_string(), + pkg.id.as_serialized().into_string().into(), + ); } } } @@ -296,7 +298,7 @@ fn add_npm_packages_to_json( { dep.insert( "npmPackage".to_string(), - pkg.id.as_serialized().into(), + pkg.id.as_serialized().into_string().into(), ); } } @@ -324,19 +326,19 @@ fn add_npm_packages_to_json( let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len()); for pkg in sorted_packages { let mut kv = serde_json::Map::new(); - kv.insert("name".to_string(), pkg.id.nv.name.clone().into()); + kv.insert("name".to_string(), pkg.id.nv.name.to_string().into()); kv.insert("version".to_string(), pkg.id.nv.version.to_string().into()); let mut deps = pkg.dependencies.values().collect::>(); deps.sort(); let deps = deps .into_iter() - .map(|id| serde_json::Value::String(id.as_serialized())) + .map(|id| serde_json::Value::String(id.as_serialized().into_string())) .collect::>(); kv.insert("dependencies".to_string(), deps.into()); let registry_url = npmrc.get_registry_url(&pkg.id.nv.name); kv.insert("registryUrl".to_string(), registry_url.to_string().into()); - json_packages.insert(pkg.id.as_serialized(), kv.into()); + json_packages.insert(pkg.id.as_serialized().into_string(), kv.into()); } json.insert("npmPackages".to_string(), json_packages.into()); @@ -549,7 +551,7 @@ impl<'a> GraphDisplayContext<'a> { None => Specifier(module.specifier().clone()), }; let was_seen = !self.seen.insert(match &package_or_specifier { - Package(package) => package.id.as_serialized(), + Package(package) => package.id.as_serialized().into_string(), Specifier(specifier) => specifier.to_string(), }); let header_text = if was_seen { @@ -631,7 +633,8 @@ impl<'a> GraphDisplayContext<'a> { )); if let Some(package) = self.npm_info.packages.get(dep_id) { if !package.dependencies.is_empty() { - let was_seen = !self.seen.insert(package.id.as_serialized()); + let was_seen = + !self.seen.insert(package.id.as_serialized().into_string()); if was_seen { child.text = format!("{} {}", child.text, colors::gray("*")); } else { diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index d7c484beba..1bfd17f30d 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -3,7 +3,6 @@ use crate::args::resolve_no_prompt; use crate::args::AddFlags; use crate::args::CaData; -use crate::args::CacheSetting; use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::InstallFlags; @@ -13,13 +12,14 @@ use crate::args::TypeCheckMode; use crate::args::UninstallFlags; use crate::args::UninstallKind; use crate::factory::CliFactory; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::graph_container::ModuleGraphContainer; use crate::http_util::HttpClientProvider; use crate::jsr::JsrFetchResolver; use crate::npm::NpmFetchResolver; use crate::util::fs::canonicalize_path_maybe_not_exists; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::generic_error; @@ -161,11 +161,11 @@ pub async fn infer_name_from_url( let npm_ref = npm_ref.into_inner(); if let Some(sub_path) = npm_ref.sub_path { if !sub_path.contains('/') { - return Some(sub_path); + return Some(sub_path.to_string()); } } if !npm_ref.req.name.contains('/') { - return Some(npm_ref.req.name); + return Some(npm_ref.req.name.into_string()); } return None; } @@ -361,18 +361,19 @@ async fn install_global( let cli_options = factory.cli_options()?; let http_client = factory.http_client_provider(); let deps_http_cache = factory.global_http_cache()?; - let mut deps_file_fetcher = FileFetcher::new( + let deps_file_fetcher = CliFileFetcher::new( deps_http_cache.clone(), - CacheSetting::ReloadAll, - true, http_client.clone(), + factory.sys(), Default::default(), None, + true, + CacheSetting::ReloadAll, + log::Level::Trace, ); let npmrc = factory.cli_options().unwrap().npmrc(); - deps_file_fetcher.set_download_log_level(log::Level::Trace); let deps_file_fetcher = Arc::new(deps_file_fetcher); let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone())); let npm_resolver = Arc::new(NpmFetchResolver::new( diff --git a/cli/tools/lint/ast_buffer/buffer.rs b/cli/tools/lint/ast_buffer/buffer.rs new file mode 100644 index 0000000000..b6387a0ef9 --- /dev/null +++ b/cli/tools/lint/ast_buffer/buffer.rs @@ -0,0 +1,536 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::fmt::Display; + +use deno_ast::swc::common::Span; +use deno_ast::swc::common::DUMMY_SP; +use indexmap::IndexMap; + +/// Each property has this flag to mark what kind of value it holds- +/// Plain objects and arrays are not supported yet, but could be easily +/// added if needed. +#[derive(Debug, PartialEq)] +pub enum PropFlags { + Ref, + RefArr, + String, + Bool, + Null, + Undefined, +} + +impl From for u8 { + fn from(m: PropFlags) -> u8 { + m as u8 + } +} + +impl TryFrom for PropFlags { + type Error = &'static str; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(PropFlags::Ref), + 1 => Ok(PropFlags::RefArr), + 2 => Ok(PropFlags::String), + 3 => Ok(PropFlags::Bool), + 4 => Ok(PropFlags::Null), + 5 => Ok(PropFlags::Undefined), + _ => Err("Unknown Prop flag"), + } + } +} + +const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000; +const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000; +const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000; +const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111; + +// TODO: There is probably a native Rust function to do this. +pub fn append_u32(result: &mut Vec, value: u32) { + let v1: u8 = ((value & MASK_U32_1) >> 24) as u8; + let v2: u8 = ((value & MASK_U32_2) >> 16) as u8; + let v3: u8 = ((value & MASK_U32_3) >> 8) as u8; + let v4: u8 = (value & MASK_U32_4) as u8; + + result.push(v1); + result.push(v2); + result.push(v3); + result.push(v4); +} + +pub fn append_usize(result: &mut Vec, value: usize) { + let raw = u32::try_from(value).unwrap(); + append_u32(result, raw); +} + +pub fn write_usize(result: &mut [u8], value: usize, idx: usize) { + let raw = u32::try_from(value).unwrap(); + + let v1: u8 = ((raw & MASK_U32_1) >> 24) as u8; + let v2: u8 = ((raw & MASK_U32_2) >> 16) as u8; + let v3: u8 = ((raw & MASK_U32_3) >> 8) as u8; + let v4: u8 = (raw & MASK_U32_4) as u8; + + result[idx] = v1; + result[idx + 1] = v2; + result[idx + 2] = v3; + result[idx + 3] = v4; +} + +#[derive(Debug)] +pub struct StringTable { + id: usize, + table: IndexMap, +} + +impl StringTable { + pub fn new() -> Self { + Self { + id: 0, + table: IndexMap::new(), + } + } + + pub fn insert(&mut self, s: &str) -> usize { + if let Some(id) = self.table.get(s) { + return *id; + } + + let id = self.id; + self.id += 1; + self.table.insert(s.to_string(), id); + id + } + + pub fn serialize(&mut self) -> Vec { + let mut result: Vec = vec![]; + append_u32(&mut result, self.table.len() as u32); + + // Assume that it's sorted by id + for (s, _id) in &self.table { + let bytes = s.as_bytes(); + append_u32(&mut result, bytes.len() as u32); + result.append(&mut bytes.to_vec()); + } + + result + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct NodeRef(pub usize); + +/// Represents an offset to a node whose schema hasn't been committed yet +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct PendingNodeRef(pub NodeRef); + +#[derive(Debug)] +pub struct BoolPos(pub usize); +#[derive(Debug)] +pub struct FieldPos(pub usize); +#[derive(Debug)] +pub struct FieldArrPos(pub usize); +#[derive(Debug)] +pub struct StrPos(pub usize); +#[derive(Debug)] +pub struct UndefPos(pub usize); +#[derive(Debug)] +pub struct NullPos(pub usize); + +#[derive(Debug)] +pub enum NodePos { + Bool(BoolPos), + #[allow(dead_code)] + Field(FieldPos), + #[allow(dead_code)] + FieldArr(FieldArrPos), + Str(StrPos), + Undef(UndefPos), + #[allow(dead_code)] + Null(NullPos), +} + +pub trait AstBufSerializer +where + K: Into + Display, + P: Into + Display, +{ + fn header(&mut self, kind: K, parent: NodeRef, span: &Span) + -> PendingNodeRef; + fn ref_field(&mut self, prop: P) -> FieldPos; + fn ref_vec_field(&mut self, prop: P, len: usize) -> FieldArrPos; + fn str_field(&mut self, prop: P) -> StrPos; + fn bool_field(&mut self, prop: P) -> BoolPos; + fn undefined_field(&mut self, prop: P) -> UndefPos; + #[allow(dead_code)] + fn null_field(&mut self, prop: P) -> NullPos; + fn commit_schema(&mut self, offset: PendingNodeRef) -> NodeRef; + + fn write_ref(&mut self, pos: FieldPos, value: NodeRef); + fn write_maybe_ref(&mut self, pos: FieldPos, value: Option); + fn write_refs(&mut self, pos: FieldArrPos, value: Vec); + fn write_str(&mut self, pos: StrPos, value: &str); + fn write_bool(&mut self, pos: BoolPos, value: bool); + + fn serialize(&mut self) -> Vec; +} + +#[derive(Debug)] +pub struct SerializeCtx { + buf: Vec, + start_buf: NodeRef, + str_table: StringTable, + kind_map: Vec, + prop_map: Vec, + field_count: u8, +} + +/// This is the internal context used to allocate and fill the buffer. The point +/// is to be able to write absolute offsets directly in place. +/// +/// The typical workflow is to reserve all necessary space for the currrent +/// node with placeholders for the offsets of the child nodes. Once child +/// nodes have been traversed, we know their offsets and can replace the +/// placeholder values with the actual ones. +impl SerializeCtx { + pub fn new(kind_len: u8, prop_len: u8) -> Self { + let kind_size = kind_len as usize; + let prop_size = prop_len as usize; + let mut ctx = Self { + start_buf: NodeRef(0), + buf: vec![], + str_table: StringTable::new(), + kind_map: vec![0; kind_size], + prop_map: vec![0; prop_size], + field_count: 0, + }; + + let empty_str = ctx.str_table.insert(""); + + // Placeholder node is always 0 + ctx.append_node(0, NodeRef(0), &DUMMY_SP, 0); + ctx.kind_map[0] = empty_str; + ctx.start_buf = NodeRef(ctx.buf.len()); + + // Insert default props that are always present + let type_str = ctx.str_table.insert("type"); + let parent_str = ctx.str_table.insert("parent"); + let range_str = ctx.str_table.insert("range"); + let length_str = ctx.str_table.insert("length"); + + // These values are expected to be in this order on the JS side + ctx.prop_map[0] = empty_str; + ctx.prop_map[1] = type_str; + ctx.prop_map[2] = parent_str; + ctx.prop_map[3] = range_str; + ctx.prop_map[4] = length_str; + + ctx + } + + /// Allocate a node's header + fn field_header

(&mut self, prop: P, prop_flags: PropFlags) -> usize + where + P: Into + Display + Clone, + { + self.field_count += 1; + + let offset = self.buf.len(); + + let n: u8 = prop.clone().into(); + self.buf.push(n); + + if let Some(v) = self.prop_map.get::(n.into()) { + if *v == 0 { + let id = self.str_table.insert(&format!("{prop}")); + self.prop_map[n as usize] = id; + } + } + + let flags: u8 = prop_flags.into(); + self.buf.push(flags); + + offset + } + + /// Allocate a property pointing to another node. + fn field

(&mut self, prop: P, prop_flags: PropFlags) -> usize + where + P: Into + Display + Clone, + { + let offset = self.field_header(prop, prop_flags); + + append_usize(&mut self.buf, 0); + + offset + } + + fn append_node( + &mut self, + kind: u8, + parent: NodeRef, + span: &Span, + prop_count: usize, + ) -> PendingNodeRef { + let offset = self.buf.len(); + + // Node type fits in a u8 + self.buf.push(kind); + + // Offset to the parent node. Will be 0 if none exists + append_usize(&mut self.buf, parent.0); + + // Span, the start and end location of this node + append_u32(&mut self.buf, span.lo.0); + append_u32(&mut self.buf, span.hi.0); + + // No node has more than <10 properties + debug_assert!(prop_count < 10); + self.buf.push(prop_count as u8); + + PendingNodeRef(NodeRef(offset)) + } + + pub fn commit_schema(&mut self, node_ref: PendingNodeRef) -> NodeRef { + let mut offset = node_ref.0 .0; + + // type + parentId + span lo + span hi + offset += 1 + 4 + 4 + 4; + + self.buf[offset] = self.field_count; + self.field_count = 0; + + node_ref.0 + } + + /// Allocate the node header. It's always the same for every node. + /// + /// + /// + /// + /// (There is no node with more than 10 properties) + pub fn header( + &mut self, + kind: N, + parent: NodeRef, + span: &Span, + ) -> PendingNodeRef + where + N: Into + Display + Clone, + { + let n: u8 = kind.clone().into(); + + if let Some(v) = self.kind_map.get::(n.into()) { + if *v == 0 { + let id = self.str_table.insert(&format!("{kind}")); + self.kind_map[n as usize] = id; + } + } + + // Prop count will be filled with the actual value when the + // schema is committed. + self.append_node(n, parent, span, 0) + } + + /// Allocate a reference property that will hold the offset of + /// another node. + pub fn ref_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field(prop, PropFlags::Ref) + } + + /// Allocate a property that is a vec of node offsets pointing to other + /// nodes. + pub fn ref_vec_field

(&mut self, prop: P, len: usize) -> usize + where + P: Into + Display + Clone, + { + let offset = self.field(prop, PropFlags::RefArr); + + for _ in 0..len { + append_u32(&mut self.buf, 0); + } + + offset + } + + // Allocate a property representing a string. Strings are deduplicated + // in the message and the property will only contain the string id. + pub fn str_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field(prop, PropFlags::String) + } + + /// Allocate a bool field + pub fn bool_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + let offset = self.field_header(prop, PropFlags::Bool); + self.buf.push(0); + offset + } + + /// Allocate an undefined field + pub fn undefined_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field_header(prop, PropFlags::Undefined) + } + + /// Allocate an undefined field + #[allow(dead_code)] + pub fn null_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field_header(prop, PropFlags::Null) + } + + /// Replace the placeholder of a reference field with the actual offset + /// to the node we want to point to. + pub fn write_ref(&mut self, field_offset: usize, value: NodeRef) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref { + panic!("Trying to write a ref into a non-ref field") + } + } + + write_usize(&mut self.buf, value.0, field_offset + 2); + } + + /// Helper for writing optional node offsets + pub fn write_maybe_ref( + &mut self, + field_offset: usize, + value: Option, + ) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref { + panic!("Trying to write a ref into a non-ref field") + } + } + + let ref_value = if let Some(v) = value { v } else { NodeRef(0) }; + write_usize(&mut self.buf, ref_value.0, field_offset + 2); + } + + /// Write a vec of node offsets into the property. The necessary space + /// has been reserved earlier. + pub fn write_refs(&mut self, field_offset: usize, value: Vec) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::RefArr { + panic!("Trying to write a ref into a non-ref array field") + } + } + + let mut offset = field_offset + 2; + write_usize(&mut self.buf, value.len(), offset); + offset += 4; + + for item in value { + write_usize(&mut self.buf, item.0, offset); + offset += 4; + } + } + + /// Store the string in our string table and save the id of the string + /// in the current field. + pub fn write_str(&mut self, field_offset: usize, value: &str) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::String { + panic!("Trying to write a ref into a non-string field") + } + } + + let id = self.str_table.insert(value); + write_usize(&mut self.buf, id, field_offset + 2); + } + + /// Write a bool to a field. + pub fn write_bool(&mut self, field_offset: usize, value: bool) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::Bool { + panic!("Trying to write a ref into a non-bool field") + } + } + + self.buf[field_offset + 2] = if value { 1 } else { 0 }; + } + + /// Serialize all information we have into a buffer that can be sent to JS. + /// It has the following structure: + /// + /// <...ast> + /// + /// <- node kind id maps to string id + /// <- node property id maps to string id + /// + /// + /// + pub fn serialize(&mut self) -> Vec { + let mut buf: Vec = vec![]; + + // The buffer starts with the serialized AST first, because that + // contains absolute offsets. By butting this at the start of the + // message we don't have to waste time updating any offsets. + buf.append(&mut self.buf); + + // Next follows the string table. We'll keep track of the offset + // in the message of where the string table begins + let offset_str_table = buf.len(); + + // Serialize string table + buf.append(&mut self.str_table.serialize()); + + // Next, serialize the mappings of kind -> string of encountered + // nodes in the AST. We use this additional lookup table to compress + // the message so that we can save space by using a u8 . All nodes of + // JS, TS and JSX together are <200 + let offset_kind_map = buf.len(); + + // Write the total number of entries in the kind -> str mapping table + // TODO: make this a u8 + append_usize(&mut buf, self.kind_map.len()); + for v in &self.kind_map { + append_usize(&mut buf, *v); + } + + // Store offset to prop -> string map. It's the same as with node kind + // as the total number of properties is <120 which allows us to store it + // as u8. + let offset_prop_map = buf.len(); + // Write the total number of entries in the kind -> str mapping table + append_usize(&mut buf, self.prop_map.len()); + for v in &self.prop_map { + append_usize(&mut buf, *v); + } + + // Putting offsets of relevant parts of the buffer at the end. This + // allows us to hop to the relevant part by merely looking at the last + // for values in the message. Each value represents an offset into the + // buffer. + append_usize(&mut buf, offset_kind_map); + append_usize(&mut buf, offset_prop_map); + append_usize(&mut buf, offset_str_table); + append_usize(&mut buf, self.start_buf.0); + + buf + } +} diff --git a/cli/tools/lint/ast_buffer/mod.rs b/cli/tools/lint/ast_buffer/mod.rs new file mode 100644 index 0000000000..8838bcc5f2 --- /dev/null +++ b/cli/tools/lint/ast_buffer/mod.rs @@ -0,0 +1,13 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_ast::ParsedSource; +use swc::serialize_swc_to_buffer; + +mod buffer; +mod swc; +mod ts_estree; + +pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec { + // TODO: We could support multiple languages here + serialize_swc_to_buffer(parsed_source) +} diff --git a/cli/tools/lint/ast_buffer/swc.rs b/cli/tools/lint/ast_buffer/swc.rs new file mode 100644 index 0000000000..b26c213105 --- /dev/null +++ b/cli/tools/lint/ast_buffer/swc.rs @@ -0,0 +1,3134 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_ast::swc::ast::AssignTarget; +use deno_ast::swc::ast::AssignTargetPat; +use deno_ast::swc::ast::BlockStmtOrExpr; +use deno_ast::swc::ast::Callee; +use deno_ast::swc::ast::ClassMember; +use deno_ast::swc::ast::Decl; +use deno_ast::swc::ast::ExportSpecifier; +use deno_ast::swc::ast::Expr; +use deno_ast::swc::ast::ExprOrSpread; +use deno_ast::swc::ast::FnExpr; +use deno_ast::swc::ast::ForHead; +use deno_ast::swc::ast::Function; +use deno_ast::swc::ast::Ident; +use deno_ast::swc::ast::IdentName; +use deno_ast::swc::ast::JSXAttrName; +use deno_ast::swc::ast::JSXAttrOrSpread; +use deno_ast::swc::ast::JSXAttrValue; +use deno_ast::swc::ast::JSXElement; +use deno_ast::swc::ast::JSXElementChild; +use deno_ast::swc::ast::JSXElementName; +use deno_ast::swc::ast::JSXEmptyExpr; +use deno_ast::swc::ast::JSXExpr; +use deno_ast::swc::ast::JSXExprContainer; +use deno_ast::swc::ast::JSXFragment; +use deno_ast::swc::ast::JSXMemberExpr; +use deno_ast::swc::ast::JSXNamespacedName; +use deno_ast::swc::ast::JSXObject; +use deno_ast::swc::ast::JSXOpeningElement; +use deno_ast::swc::ast::Lit; +use deno_ast::swc::ast::MemberExpr; +use deno_ast::swc::ast::MemberProp; +use deno_ast::swc::ast::ModuleDecl; +use deno_ast::swc::ast::ModuleExportName; +use deno_ast::swc::ast::ModuleItem; +use deno_ast::swc::ast::ObjectPatProp; +use deno_ast::swc::ast::OptChainBase; +use deno_ast::swc::ast::Param; +use deno_ast::swc::ast::ParamOrTsParamProp; +use deno_ast::swc::ast::Pat; +use deno_ast::swc::ast::PrivateName; +use deno_ast::swc::ast::Program; +use deno_ast::swc::ast::Prop; +use deno_ast::swc::ast::PropName; +use deno_ast::swc::ast::PropOrSpread; +use deno_ast::swc::ast::SimpleAssignTarget; +use deno_ast::swc::ast::Stmt; +use deno_ast::swc::ast::SuperProp; +use deno_ast::swc::ast::Tpl; +use deno_ast::swc::ast::TsEntityName; +use deno_ast::swc::ast::TsEnumMemberId; +use deno_ast::swc::ast::TsFnOrConstructorType; +use deno_ast::swc::ast::TsFnParam; +use deno_ast::swc::ast::TsIndexSignature; +use deno_ast::swc::ast::TsLit; +use deno_ast::swc::ast::TsLitType; +use deno_ast::swc::ast::TsThisTypeOrIdent; +use deno_ast::swc::ast::TsType; +use deno_ast::swc::ast::TsTypeAnn; +use deno_ast::swc::ast::TsTypeElement; +use deno_ast::swc::ast::TsTypeParam; +use deno_ast::swc::ast::TsTypeParamDecl; +use deno_ast::swc::ast::TsTypeParamInstantiation; +use deno_ast::swc::ast::TsTypeQueryExpr; +use deno_ast::swc::ast::TsUnionOrIntersectionType; +use deno_ast::swc::ast::VarDeclOrExpr; +use deno_ast::swc::common::Span; +use deno_ast::swc::common::Spanned; +use deno_ast::swc::common::SyntaxContext; +use deno_ast::view::Accessibility; +use deno_ast::view::AssignOp; +use deno_ast::view::BinaryOp; +use deno_ast::view::TruePlusMinus; +use deno_ast::view::TsKeywordTypeKind; +use deno_ast::view::TsTypeOperatorOp; +use deno_ast::view::UnaryOp; +use deno_ast::view::UpdateOp; +use deno_ast::view::VarDeclKind; +use deno_ast::ParsedSource; + +use super::buffer::AstBufSerializer; +use super::buffer::BoolPos; +use super::buffer::NodePos; +use super::buffer::NodeRef; +use super::buffer::StrPos; +use super::ts_estree::AstNode; +use super::ts_estree::AstProp; +use super::ts_estree::TsEsTreeBuilder; + +pub fn serialize_swc_to_buffer(parsed_source: &ParsedSource) -> Vec { + let mut ctx = TsEsTreeBuilder::new(); + + let program = &parsed_source.program(); + + let raw = ctx.header(AstNode::Program, NodeRef(0), &program.span()); + let source_type_pos = ctx.str_field(AstProp::SourceType); + + match program.as_ref() { + Program::Module(module) => { + let body_pos = ctx.ref_vec_field(AstProp::Body, module.body.len()); + let pos = ctx.commit_schema(raw); + + let children = module + .body + .iter() + .map(|item| match item { + ModuleItem::ModuleDecl(module_decl) => { + serialize_module_decl(&mut ctx, module_decl, pos) + } + ModuleItem::Stmt(stmt) => serialize_stmt(&mut ctx, stmt, pos), + }) + .collect::>(); + + ctx.write_str(source_type_pos, "module"); + ctx.write_refs(body_pos, children); + } + Program::Script(script) => { + let body_pos = ctx.ref_vec_field(AstProp::Body, script.body.len()); + let pos = ctx.commit_schema(raw); + + let children = script + .body + .iter() + .map(|stmt| serialize_stmt(&mut ctx, stmt, pos)) + .collect::>(); + + ctx.write_str(source_type_pos, "script"); + ctx.write_refs(body_pos, children); + } + } + + ctx.serialize() +} + +fn serialize_module_decl( + ctx: &mut TsEsTreeBuilder, + module_decl: &ModuleDecl, + parent: NodeRef, +) -> NodeRef { + match module_decl { + ModuleDecl::Import(node) => { + let raw = ctx.header(AstNode::ImportExpression, parent, &node.span); + ctx.commit_schema(raw) + } + ModuleDecl::ExportDecl(node) => { + let raw = ctx.header(AstNode::ExportNamedDeclaration, parent, &node.span); + let decl_pos = ctx.ref_field(AstProp::Declarations); + let pos = ctx.commit_schema(raw); + + let decl = serialize_decl(ctx, &node.decl, pos); + + ctx.write_ref(decl_pos, decl); + + pos + } + ModuleDecl::ExportNamed(node) => { + let raw = ctx.header(AstNode::ExportNamedDeclaration, parent, &node.span); + let src_pos = ctx.ref_field(AstProp::Source); + let spec_pos = + ctx.ref_vec_field(AstProp::Specifiers, node.specifiers.len()); + let id = ctx.commit_schema(raw); + + // FIXME: Flags + // let mut flags = FlagValue::new(); + // flags.set(Flag::ExportType); + + let src_id = node + .src + .as_ref() + .map(|src| serialize_lit(ctx, &Lit::Str(*src.clone()), id)); + + let spec_ids = node + .specifiers + .iter() + .map(|spec| { + match spec { + ExportSpecifier::Named(child) => { + let raw = ctx.header(AstNode::ExportSpecifier, id, &child.span); + let local_pos = ctx.ref_field(AstProp::Local); + let exp_pos = ctx.ref_field(AstProp::Exported); + let spec_pos = ctx.commit_schema(raw); + + // let mut flags = FlagValue::new(); + // flags.set(Flag::ExportType); + + let local = + serialize_module_exported_name(ctx, &child.orig, spec_pos); + + let exported = child.exported.as_ref().map(|exported| { + serialize_module_exported_name(ctx, exported, spec_pos) + }); + + // ctx.write_flags(&flags); + ctx.write_ref(local_pos, local); + ctx.write_maybe_ref(exp_pos, exported); + + spec_pos + } + + // These two aren't syntactically valid + ExportSpecifier::Namespace(_) => todo!(), + ExportSpecifier::Default(_) => todo!(), + } + }) + .collect::>(); + + // ctx.write_flags(&flags); + ctx.write_maybe_ref(src_pos, src_id); + ctx.write_refs(spec_pos, spec_ids); + + id + } + ModuleDecl::ExportDefaultDecl(node) => { + let raw = + ctx.header(AstNode::ExportDefaultDeclaration, parent, &node.span); + ctx.commit_schema(raw) + } + ModuleDecl::ExportDefaultExpr(node) => { + let raw = + ctx.header(AstNode::ExportDefaultDeclaration, parent, &node.span); + ctx.commit_schema(raw) + } + ModuleDecl::ExportAll(node) => { + let raw = ctx.header(AstNode::ExportAllDeclaration, parent, &node.span); + ctx.commit_schema(raw) + } + ModuleDecl::TsImportEquals(node) => { + let raw = ctx.header(AstNode::TsImportEquals, parent, &node.span); + ctx.commit_schema(raw) + } + ModuleDecl::TsExportAssignment(node) => { + let raw = ctx.header(AstNode::TsExportAssignment, parent, &node.span); + ctx.commit_schema(raw) + } + ModuleDecl::TsNamespaceExport(node) => { + let raw = ctx.header(AstNode::TsNamespaceExport, parent, &node.span); + ctx.commit_schema(raw) + } + } +} + +fn serialize_stmt( + ctx: &mut TsEsTreeBuilder, + stmt: &Stmt, + parent: NodeRef, +) -> NodeRef { + match stmt { + Stmt::Block(node) => { + let raw = ctx.header(AstNode::BlockStatement, parent, &node.span); + let body_pos = ctx.ref_vec_field(AstProp::Body, node.stmts.len()); + let pos = ctx.commit_schema(raw); + + let children = node + .stmts + .iter() + .map(|stmt| serialize_stmt(ctx, stmt, pos)) + .collect::>(); + + ctx.write_refs(body_pos, children); + + pos + } + Stmt::Empty(_) => NodeRef(0), + Stmt::Debugger(node) => { + let raw = ctx.header(AstNode::DebuggerStatement, parent, &node.span); + ctx.commit_schema(raw) + } + Stmt::With(_) => todo!(), + Stmt::Return(node) => { + let raw = ctx.header(AstNode::ReturnStatement, parent, &node.span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = node.arg.as_ref().map(|arg| serialize_expr(ctx, arg, pos)); + ctx.write_maybe_ref(arg_pos, arg); + + pos + } + Stmt::Labeled(node) => { + let raw = ctx.header(AstNode::LabeledStatement, parent, &node.span); + let label_pos = ctx.ref_field(AstProp::Label); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let ident = serialize_ident(ctx, &node.label, pos); + let stmt = serialize_stmt(ctx, &node.body, pos); + + ctx.write_ref(label_pos, ident); + ctx.write_ref(body_pos, stmt); + + pos + } + Stmt::Break(node) => { + let raw = ctx.header(AstNode::BreakStatement, parent, &node.span); + let label_pos = ctx.ref_field(AstProp::Label); + let pos = ctx.commit_schema(raw); + + let arg = node + .label + .as_ref() + .map(|label| serialize_ident(ctx, label, pos)); + + ctx.write_maybe_ref(label_pos, arg); + + pos + } + Stmt::Continue(node) => { + let raw = ctx.header(AstNode::ContinueStatement, parent, &node.span); + let label_pos = ctx.ref_field(AstProp::Label); + let pos = ctx.commit_schema(raw); + + let arg = node + .label + .as_ref() + .map(|label| serialize_ident(ctx, label, pos)); + + ctx.write_maybe_ref(label_pos, arg); + + pos + } + Stmt::If(node) => { + let raw = ctx.header(AstNode::IfStatement, parent, &node.span); + let test_pos = ctx.ref_field(AstProp::Test); + let cons_pos = ctx.ref_field(AstProp::Consequent); + let alt_pos = ctx.ref_field(AstProp::Alternate); + let pos = ctx.commit_schema(raw); + + let test = serialize_expr(ctx, node.test.as_ref(), pos); + let cons = serialize_stmt(ctx, node.cons.as_ref(), pos); + let alt = node.alt.as_ref().map(|alt| serialize_stmt(ctx, alt, pos)); + + ctx.write_ref(test_pos, test); + ctx.write_ref(cons_pos, cons); + ctx.write_maybe_ref(alt_pos, alt); + + pos + } + Stmt::Switch(node) => { + let raw = ctx.header(AstNode::SwitchStatement, parent, &node.span); + let disc_pos = ctx.ref_field(AstProp::Discriminant); + let cases_pos = ctx.ref_vec_field(AstProp::Cases, node.cases.len()); + let pos = ctx.commit_schema(raw); + + let disc = serialize_expr(ctx, &node.discriminant, pos); + + let cases = node + .cases + .iter() + .map(|case| { + let raw = ctx.header(AstNode::SwitchCase, pos, &case.span); + let test_pos = ctx.ref_field(AstProp::Test); + let cons_pos = + ctx.ref_vec_field(AstProp::Consequent, case.cons.len()); + let case_pos = ctx.commit_schema(raw); + + let test = case + .test + .as_ref() + .map(|test| serialize_expr(ctx, test, case_pos)); + + let cons = case + .cons + .iter() + .map(|cons| serialize_stmt(ctx, cons, case_pos)) + .collect::>(); + + ctx.write_maybe_ref(test_pos, test); + ctx.write_refs(cons_pos, cons); + + case_pos + }) + .collect::>(); + + ctx.write_ref(disc_pos, disc); + ctx.write_refs(cases_pos, cases); + + pos + } + Stmt::Throw(node) => { + let raw = ctx.header(AstNode::ThrowStatement, parent, &node.span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = serialize_expr(ctx, &node.arg, pos); + ctx.write_ref(arg_pos, arg); + + pos + } + Stmt::Try(node) => { + let raw = ctx.header(AstNode::TryStatement, parent, &node.span); + let block_pos = ctx.ref_field(AstProp::Block); + let handler_pos = ctx.ref_field(AstProp::Handler); + let finalizer_pos = ctx.ref_field(AstProp::Finalizer); + let pos = ctx.commit_schema(raw); + + let block = serialize_stmt(ctx, &Stmt::Block(node.block.clone()), pos); + + let handler = node.handler.as_ref().map(|catch| { + let raw = ctx.header(AstNode::CatchClause, pos, &catch.span); + let param_pos = ctx.ref_field(AstProp::Param); + let body_pos = ctx.ref_field(AstProp::Body); + let clause_pos = ctx.commit_schema(raw); + + let param = catch + .param + .as_ref() + .map(|param| serialize_pat(ctx, param, clause_pos)); + + let body = + serialize_stmt(ctx, &Stmt::Block(catch.body.clone()), clause_pos); + + ctx.write_maybe_ref(param_pos, param); + ctx.write_ref(body_pos, body); + + clause_pos + }); + + let finalizer = node.finalizer.as_ref().map(|finalizer| { + serialize_stmt(ctx, &Stmt::Block(finalizer.clone()), pos) + }); + + ctx.write_ref(block_pos, block); + ctx.write_maybe_ref(handler_pos, handler); + ctx.write_maybe_ref(finalizer_pos, finalizer); + + pos + } + Stmt::While(node) => { + let raw = ctx.header(AstNode::WhileStatement, parent, &node.span); + let test_pos = ctx.ref_field(AstProp::Test); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let test = serialize_expr(ctx, node.test.as_ref(), pos); + let stmt = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_ref(test_pos, test); + ctx.write_ref(body_pos, stmt); + + pos + } + Stmt::DoWhile(node) => { + let raw = ctx.header(AstNode::DoWhileStatement, parent, &node.span); + let test_pos = ctx.ref_field(AstProp::Test); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let expr = serialize_expr(ctx, node.test.as_ref(), pos); + let stmt = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_ref(test_pos, expr); + ctx.write_ref(body_pos, stmt); + + pos + } + Stmt::For(node) => { + let raw = ctx.header(AstNode::ForStatement, parent, &node.span); + let init_pos = ctx.ref_field(AstProp::Init); + let test_pos = ctx.ref_field(AstProp::Test); + let update_pos = ctx.ref_field(AstProp::Update); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let init = node.init.as_ref().map(|init| match init { + VarDeclOrExpr::VarDecl(var_decl) => { + serialize_stmt(ctx, &Stmt::Decl(Decl::Var(var_decl.clone())), pos) + } + VarDeclOrExpr::Expr(expr) => serialize_expr(ctx, expr, pos), + }); + + let test = node + .test + .as_ref() + .map(|expr| serialize_expr(ctx, expr, pos)); + let update = node + .update + .as_ref() + .map(|expr| serialize_expr(ctx, expr, pos)); + let body = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_maybe_ref(init_pos, init); + ctx.write_maybe_ref(test_pos, test); + ctx.write_maybe_ref(update_pos, update); + ctx.write_ref(body_pos, body); + + pos + } + Stmt::ForIn(node) => { + let raw = ctx.header(AstNode::ForInStatement, parent, &node.span); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let left = serialize_for_head(ctx, &node.left, pos); + let right = serialize_expr(ctx, node.right.as_ref(), pos); + let body = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + ctx.write_ref(body_pos, body); + + pos + } + Stmt::ForOf(node) => { + let raw = ctx.header(AstNode::ForOfStatement, parent, &node.span); + let await_pos = ctx.bool_field(AstProp::Await); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let left = serialize_for_head(ctx, &node.left, pos); + let right = serialize_expr(ctx, node.right.as_ref(), pos); + let body = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_bool(await_pos, node.is_await); + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + ctx.write_ref(body_pos, body); + + pos + } + Stmt::Decl(node) => serialize_decl(ctx, node, parent), + Stmt::Expr(node) => { + let raw = ctx.header(AstNode::ExpressionStatement, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let pos = ctx.commit_schema(raw); + + let expr = serialize_expr(ctx, node.expr.as_ref(), pos); + ctx.write_ref(expr_pos, expr); + + pos + } + } +} + +fn serialize_expr( + ctx: &mut TsEsTreeBuilder, + expr: &Expr, + parent: NodeRef, +) -> NodeRef { + match expr { + Expr::This(node) => { + let raw = ctx.header(AstNode::ThisExpression, parent, &node.span); + ctx.commit_schema(raw) + } + Expr::Array(node) => { + let raw = ctx.header(AstNode::ArrayExpression, parent, &node.span); + let elems_pos = ctx.ref_vec_field(AstProp::Elements, node.elems.len()); + let pos = ctx.commit_schema(raw); + + let elems = node + .elems + .iter() + .map(|item| { + item + .as_ref() + .map_or(NodeRef(0), |item| serialize_expr_or_spread(ctx, item, pos)) + }) + .collect::>(); + + ctx.write_refs(elems_pos, elems); + + pos + } + Expr::Object(node) => { + let raw = ctx.header(AstNode::ObjectExpression, parent, &node.span); + let props_pos = ctx.ref_vec_field(AstProp::Properties, node.props.len()); + let pos = ctx.commit_schema(raw); + + let prop_ids = node + .props + .iter() + .map(|prop| serialize_prop_or_spread(ctx, prop, pos)) + .collect::>(); + + ctx.write_refs(props_pos, prop_ids); + + pos + } + Expr::Fn(node) => { + let fn_obj = node.function.as_ref(); + + let raw = ctx.header(AstNode::FunctionExpression, parent, &fn_obj.span); + + let async_pos = ctx.bool_field(AstProp::Async); + let gen_pos = ctx.bool_field(AstProp::Generator); + let id_pos = ctx.ref_field(AstProp::Id); + let tparams_pos = ctx.ref_field(AstProp::TypeParameters); + let params_pos = ctx.ref_vec_field(AstProp::Params, fn_obj.params.len()); + let return_pos = ctx.ref_field(AstProp::ReturnType); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let ident = node + .ident + .as_ref() + .map(|ident| serialize_ident(ctx, ident, pos)); + + let type_params = + maybe_serialize_ts_type_param(ctx, &fn_obj.type_params, pos); + + let params = fn_obj + .params + .iter() + .map(|param| serialize_pat(ctx, ¶m.pat, pos)) + .collect::>(); + + let return_id = + maybe_serialize_ts_type_ann(ctx, &fn_obj.return_type, pos); + let body = fn_obj + .body + .as_ref() + .map(|block| serialize_stmt(ctx, &Stmt::Block(block.clone()), pos)); + + ctx.write_bool(async_pos, fn_obj.is_async); + ctx.write_bool(gen_pos, fn_obj.is_generator); + ctx.write_maybe_ref(id_pos, ident); + ctx.write_maybe_ref(tparams_pos, type_params); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(return_pos, return_id); + ctx.write_maybe_ref(body_pos, body); + + pos + } + Expr::Unary(node) => { + let raw = ctx.header(AstNode::UnaryExpression, parent, &node.span); + let flag_pos = ctx.str_field(AstProp::Operator); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = serialize_expr(ctx, &node.arg, pos); + + ctx.write_str( + flag_pos, + match node.op { + UnaryOp::Minus => "-", + UnaryOp::Plus => "+", + UnaryOp::Bang => "!", + UnaryOp::Tilde => "~", + UnaryOp::TypeOf => "typeof", + UnaryOp::Void => "void", + UnaryOp::Delete => "delete", + }, + ); + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::Update(node) => { + let raw = ctx.header(AstNode::UpdateExpression, parent, &node.span); + let prefix_pos = ctx.bool_field(AstProp::Prefix); + let arg_pos = ctx.ref_field(AstProp::Argument); + let op_ops = ctx.str_field(AstProp::Operator); + let pos = ctx.commit_schema(raw); + + let arg = serialize_expr(ctx, node.arg.as_ref(), pos); + + ctx.write_bool(prefix_pos, node.prefix); + ctx.write_ref(arg_pos, arg); + ctx.write_str( + op_ops, + match node.op { + UpdateOp::PlusPlus => "++", + UpdateOp::MinusMinus => "--", + }, + ); + + pos + } + Expr::Bin(node) => { + let (node_type, flag_str) = match node.op { + BinaryOp::LogicalAnd => (AstNode::LogicalExpression, "&&"), + BinaryOp::LogicalOr => (AstNode::LogicalExpression, "||"), + BinaryOp::NullishCoalescing => (AstNode::LogicalExpression, "??"), + BinaryOp::EqEq => (AstNode::BinaryExpression, "=="), + BinaryOp::NotEq => (AstNode::BinaryExpression, "!="), + BinaryOp::EqEqEq => (AstNode::BinaryExpression, "==="), + BinaryOp::NotEqEq => (AstNode::BinaryExpression, "!="), + BinaryOp::Lt => (AstNode::BinaryExpression, "<"), + BinaryOp::LtEq => (AstNode::BinaryExpression, "<="), + BinaryOp::Gt => (AstNode::BinaryExpression, ">"), + BinaryOp::GtEq => (AstNode::BinaryExpression, ">="), + BinaryOp::LShift => (AstNode::BinaryExpression, "<<"), + BinaryOp::RShift => (AstNode::BinaryExpression, ">>"), + BinaryOp::ZeroFillRShift => (AstNode::BinaryExpression, ">>>"), + BinaryOp::Add => (AstNode::BinaryExpression, "+"), + BinaryOp::Sub => (AstNode::BinaryExpression, "-"), + BinaryOp::Mul => (AstNode::BinaryExpression, "*"), + BinaryOp::Div => (AstNode::BinaryExpression, "/"), + BinaryOp::Mod => (AstNode::BinaryExpression, "%"), + BinaryOp::BitOr => (AstNode::BinaryExpression, "|"), + BinaryOp::BitXor => (AstNode::BinaryExpression, "^"), + BinaryOp::BitAnd => (AstNode::BinaryExpression, "&"), + BinaryOp::In => (AstNode::BinaryExpression, "in"), + BinaryOp::InstanceOf => (AstNode::BinaryExpression, "instanceof"), + BinaryOp::Exp => (AstNode::BinaryExpression, "**"), + }; + + let raw = ctx.header(node_type, parent, &node.span); + let op_pos = ctx.str_field(AstProp::Operator); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let pos = ctx.commit_schema(raw); + + let left_id = serialize_expr(ctx, node.left.as_ref(), pos); + let right_id = serialize_expr(ctx, node.right.as_ref(), pos); + + ctx.write_str(op_pos, flag_str); + ctx.write_ref(left_pos, left_id); + ctx.write_ref(right_pos, right_id); + + pos + } + Expr::Assign(node) => { + let raw = ctx.header(AstNode::AssignmentExpression, parent, &node.span); + let op_pos = ctx.str_field(AstProp::Operator); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let pos = ctx.commit_schema(raw); + + let left = match &node.left { + AssignTarget::Simple(simple_assign_target) => { + match simple_assign_target { + SimpleAssignTarget::Ident(target) => { + serialize_ident(ctx, &target.id, pos) + } + SimpleAssignTarget::Member(target) => { + serialize_expr(ctx, &Expr::Member(target.clone()), pos) + } + SimpleAssignTarget::SuperProp(target) => { + serialize_expr(ctx, &Expr::SuperProp(target.clone()), pos) + } + SimpleAssignTarget::Paren(target) => { + serialize_expr(ctx, &target.expr, pos) + } + SimpleAssignTarget::OptChain(target) => { + serialize_expr(ctx, &Expr::OptChain(target.clone()), pos) + } + SimpleAssignTarget::TsAs(target) => { + serialize_expr(ctx, &Expr::TsAs(target.clone()), pos) + } + SimpleAssignTarget::TsSatisfies(target) => { + serialize_expr(ctx, &Expr::TsSatisfies(target.clone()), pos) + } + SimpleAssignTarget::TsNonNull(target) => { + serialize_expr(ctx, &Expr::TsNonNull(target.clone()), pos) + } + SimpleAssignTarget::TsTypeAssertion(target) => { + serialize_expr(ctx, &Expr::TsTypeAssertion(target.clone()), pos) + } + SimpleAssignTarget::TsInstantiation(target) => { + serialize_expr(ctx, &Expr::TsInstantiation(target.clone()), pos) + } + SimpleAssignTarget::Invalid(_) => unreachable!(), + } + } + AssignTarget::Pat(target) => match target { + AssignTargetPat::Array(array_pat) => { + serialize_pat(ctx, &Pat::Array(array_pat.clone()), pos) + } + AssignTargetPat::Object(object_pat) => { + serialize_pat(ctx, &Pat::Object(object_pat.clone()), pos) + } + AssignTargetPat::Invalid(_) => unreachable!(), + }, + }; + + let right = serialize_expr(ctx, node.right.as_ref(), pos); + + ctx.write_str( + op_pos, + match node.op { + AssignOp::Assign => "=", + AssignOp::AddAssign => "+=", + AssignOp::SubAssign => "-=", + AssignOp::MulAssign => "*=", + AssignOp::DivAssign => "/=", + AssignOp::ModAssign => "%=", + AssignOp::LShiftAssign => "<<=", + AssignOp::RShiftAssign => ">>=", + AssignOp::ZeroFillRShiftAssign => ">>>=", + AssignOp::BitOrAssign => "|=", + AssignOp::BitXorAssign => "^=", + AssignOp::BitAndAssign => "&=", + AssignOp::ExpAssign => "**=", + AssignOp::AndAssign => "&&=", + AssignOp::OrAssign => "||=", + AssignOp::NullishAssign => "??=", + }, + ); + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + + pos + } + Expr::Member(node) => serialize_member_expr(ctx, node, parent, false), + Expr::SuperProp(node) => { + let raw = ctx.header(AstNode::MemberExpression, parent, &node.span); + let computed_pos = ctx.bool_field(AstProp::Computed); + let obj_pos = ctx.ref_field(AstProp::Object); + let prop_pos = ctx.ref_field(AstProp::Property); + let pos = ctx.commit_schema(raw); + + let raw = ctx.header(AstNode::Super, pos, &node.obj.span); + let obj = ctx.commit_schema(raw); + + let mut computed = false; + let prop = match &node.prop { + SuperProp::Ident(ident_name) => { + serialize_ident_name(ctx, ident_name, pos) + } + SuperProp::Computed(prop) => { + computed = true; + serialize_expr(ctx, &prop.expr, pos) + } + }; + + ctx.write_bool(computed_pos, computed); + ctx.write_ref(obj_pos, obj); + ctx.write_ref(prop_pos, prop); + + pos + } + Expr::Cond(node) => { + let raw = ctx.header(AstNode::ConditionalExpression, parent, &node.span); + let test_pos = ctx.ref_field(AstProp::Test); + let cons_pos = ctx.ref_field(AstProp::Consequent); + let alt_pos = ctx.ref_field(AstProp::Alternate); + let pos = ctx.commit_schema(raw); + + let test = serialize_expr(ctx, node.test.as_ref(), pos); + let cons = serialize_expr(ctx, node.cons.as_ref(), pos); + let alt = serialize_expr(ctx, node.alt.as_ref(), pos); + + ctx.write_ref(test_pos, test); + ctx.write_ref(cons_pos, cons); + ctx.write_ref(alt_pos, alt); + + pos + } + Expr::Call(node) => { + let raw = ctx.header(AstNode::CallExpression, parent, &node.span); + let opt_pos = ctx.bool_field(AstProp::Optional); + let callee_pos = ctx.ref_field(AstProp::Callee); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let args_pos = ctx.ref_vec_field(AstProp::Arguments, node.args.len()); + let pos = ctx.commit_schema(raw); + + let callee = match &node.callee { + Callee::Super(super_node) => { + let raw = ctx.header(AstNode::Super, pos, &super_node.span); + ctx.commit_schema(raw) + } + Callee::Import(_) => todo!(), + Callee::Expr(expr) => serialize_expr(ctx, expr, pos), + }; + + let type_arg = node.type_args.clone().map(|param_node| { + serialize_ts_param_inst(ctx, param_node.as_ref(), pos) + }); + + let args = node + .args + .iter() + .map(|arg| serialize_expr_or_spread(ctx, arg, pos)) + .collect::>(); + + ctx.write_bool(opt_pos, false); + ctx.write_ref(callee_pos, callee); + ctx.write_maybe_ref(type_args_pos, type_arg); + ctx.write_refs(args_pos, args); + + pos + } + Expr::New(node) => { + let raw = ctx.header(AstNode::NewExpression, parent, &node.span); + let callee_pos = ctx.ref_field(AstProp::Callee); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let args_pos = ctx.ref_vec_field( + AstProp::Arguments, + node.args.as_ref().map_or(0, |v| v.len()), + ); + let pos = ctx.commit_schema(raw); + + let callee = serialize_expr(ctx, node.callee.as_ref(), pos); + + let args: Vec = node.args.as_ref().map_or(vec![], |args| { + args + .iter() + .map(|arg| serialize_expr_or_spread(ctx, arg, pos)) + .collect::>() + }); + + let type_args = node.type_args.clone().map(|param_node| { + serialize_ts_param_inst(ctx, param_node.as_ref(), pos) + }); + + ctx.write_ref(callee_pos, callee); + ctx.write_maybe_ref(type_args_pos, type_args); + ctx.write_refs(args_pos, args); + + pos + } + Expr::Seq(node) => { + let raw = ctx.header(AstNode::SequenceExpression, parent, &node.span); + let exprs_pos = ctx.ref_vec_field(AstProp::Expressions, node.exprs.len()); + let pos = ctx.commit_schema(raw); + + let children = node + .exprs + .iter() + .map(|expr| serialize_expr(ctx, expr, pos)) + .collect::>(); + + ctx.write_refs(exprs_pos, children); + + pos + } + Expr::Ident(node) => serialize_ident(ctx, node, parent), + Expr::Lit(node) => serialize_lit(ctx, node, parent), + Expr::Tpl(node) => { + let raw = ctx.header(AstNode::TemplateLiteral, parent, &node.span); + let quasis_pos = ctx.ref_vec_field(AstProp::Quasis, node.quasis.len()); + let exprs_pos = ctx.ref_vec_field(AstProp::Expressions, node.exprs.len()); + let pos = ctx.commit_schema(raw); + + let quasis = node + .quasis + .iter() + .map(|quasi| { + let raw = ctx.header(AstNode::TemplateElement, pos, &quasi.span); + let tail_pos = ctx.bool_field(AstProp::Tail); + let raw_pos = ctx.str_field(AstProp::Raw); + let cooked_pos = ctx.str_field(AstProp::Cooked); + let tpl_pos = ctx.commit_schema(raw); + + ctx.write_bool(tail_pos, quasi.tail); + ctx.write_str(raw_pos, &quasi.raw); + ctx.write_str( + cooked_pos, + &quasi + .cooked + .as_ref() + .map_or("".to_string(), |v| v.to_string()), + ); + + tpl_pos + }) + .collect::>(); + + let exprs = node + .exprs + .iter() + .map(|expr| serialize_expr(ctx, expr, pos)) + .collect::>(); + + ctx.write_refs(quasis_pos, quasis); + ctx.write_refs(exprs_pos, exprs); + + pos + } + Expr::TaggedTpl(node) => { + let raw = + ctx.header(AstNode::TaggedTemplateExpression, parent, &node.span); + let tag_pos = ctx.ref_field(AstProp::Tag); + let type_arg_pos = ctx.ref_field(AstProp::TypeArguments); + let quasi_pos = ctx.ref_field(AstProp::Quasi); + let pos = ctx.commit_schema(raw); + + let tag = serialize_expr(ctx, &node.tag, pos); + + let type_param_id = node + .type_params + .clone() + .map(|params| serialize_ts_param_inst(ctx, params.as_ref(), pos)); + let quasi = serialize_expr(ctx, &Expr::Tpl(*node.tpl.clone()), pos); + + ctx.write_ref(tag_pos, tag); + ctx.write_maybe_ref(type_arg_pos, type_param_id); + ctx.write_ref(quasi_pos, quasi); + + pos + } + Expr::Arrow(node) => { + let raw = + ctx.header(AstNode::ArrowFunctionExpression, parent, &node.span); + let async_pos = ctx.bool_field(AstProp::Async); + let gen_pos = ctx.bool_field(AstProp::Generator); + let type_param_pos = ctx.ref_field(AstProp::TypeParameters); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let body_pos = ctx.ref_field(AstProp::Body); + let return_type_pos = ctx.ref_field(AstProp::ReturnType); + let pos = ctx.commit_schema(raw); + + let type_param = + maybe_serialize_ts_type_param(ctx, &node.type_params, pos); + + let params = node + .params + .iter() + .map(|param| serialize_pat(ctx, param, pos)) + .collect::>(); + + let body = match node.body.as_ref() { + BlockStmtOrExpr::BlockStmt(block_stmt) => { + serialize_stmt(ctx, &Stmt::Block(block_stmt.clone()), pos) + } + BlockStmtOrExpr::Expr(expr) => serialize_expr(ctx, expr.as_ref(), pos), + }; + + let return_type = + maybe_serialize_ts_type_ann(ctx, &node.return_type, pos); + + ctx.write_bool(async_pos, node.is_async); + ctx.write_bool(gen_pos, node.is_generator); + ctx.write_maybe_ref(type_param_pos, type_param); + ctx.write_refs(params_pos, params); + ctx.write_ref(body_pos, body); + ctx.write_maybe_ref(return_type_pos, return_type); + + pos + } + Expr::Class(node) => { + // FIXME + let raw = ctx.header(AstNode::ClassExpression, parent, &node.class.span); + ctx.commit_schema(raw) + } + Expr::Yield(node) => { + let raw = ctx.header(AstNode::YieldExpression, parent, &node.span); + let delegate_pos = ctx.bool_field(AstProp::Delegate); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = node + .arg + .as_ref() + .map(|arg| serialize_expr(ctx, arg.as_ref(), pos)); + + ctx.write_bool(delegate_pos, node.delegate); + ctx.write_maybe_ref(arg_pos, arg); + + pos + } + Expr::MetaProp(node) => { + let raw = ctx.header(AstNode::MetaProp, parent, &node.span); + ctx.commit_schema(raw) + } + Expr::Await(node) => { + let raw = ctx.header(AstNode::AwaitExpression, parent, &node.span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = serialize_expr(ctx, node.arg.as_ref(), pos); + + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::Paren(node) => { + // Paren nodes are treated as a syntax only thing in TSEStree + // and are never materialized to actual AST nodes. + serialize_expr(ctx, &node.expr, parent) + } + Expr::JSXMember(node) => serialize_jsx_member_expr(ctx, node, parent), + Expr::JSXNamespacedName(node) => { + serialize_jsx_namespaced_name(ctx, node, parent) + } + Expr::JSXEmpty(node) => serialize_jsx_empty_expr(ctx, node, parent), + Expr::JSXElement(node) => serialize_jsx_element(ctx, node, parent), + Expr::JSXFragment(node) => serialize_jsx_fragment(ctx, node, parent), + Expr::TsTypeAssertion(node) => { + let raw = ctx.header(AstNode::TSTypeAssertion, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let expr = serialize_expr(ctx, &node.expr, parent); + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Expr::TsConstAssertion(node) => { + let raw = ctx.header(AstNode::TsConstAssertion, parent, &node.span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = serialize_expr(ctx, node.expr.as_ref(), pos); + + // FIXME + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::TsNonNull(node) => { + let raw = ctx.header(AstNode::TSNonNullExpression, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let pos = ctx.commit_schema(raw); + + let expr_id = serialize_expr(ctx, node.expr.as_ref(), pos); + + ctx.write_ref(expr_pos, expr_id); + + pos + } + Expr::TsAs(node) => { + let raw = ctx.header(AstNode::TSAsExpression, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let expr = serialize_expr(ctx, node.expr.as_ref(), pos); + let type_ann = serialize_ts_type(ctx, node.type_ann.as_ref(), pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Expr::TsInstantiation(node) => { + let raw = ctx.header(AstNode::TsInstantiation, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let pos = ctx.commit_schema(raw); + + let expr = serialize_expr(ctx, node.expr.as_ref(), pos); + + let type_arg = serialize_ts_param_inst(ctx, node.type_args.as_ref(), pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_ref(type_args_pos, type_arg); + + pos + } + Expr::TsSatisfies(node) => { + let raw = ctx.header(AstNode::TSSatisfiesExpression, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let epxr = serialize_expr(ctx, node.expr.as_ref(), pos); + let type_ann = serialize_ts_type(ctx, node.type_ann.as_ref(), pos); + + ctx.write_ref(expr_pos, epxr); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Expr::PrivateName(node) => serialize_private_name(ctx, node, parent), + Expr::OptChain(node) => { + let raw = ctx.header(AstNode::ChainExpression, parent, &node.span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let arg = match node.base.as_ref() { + OptChainBase::Member(member_expr) => { + serialize_member_expr(ctx, member_expr, pos, true) + } + OptChainBase::Call(opt_call) => { + let raw = ctx.header(AstNode::CallExpression, pos, &opt_call.span); + let opt_pos = ctx.bool_field(AstProp::Optional); + let callee_pos = ctx.ref_field(AstProp::Callee); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let args_pos = + ctx.ref_vec_field(AstProp::Arguments, opt_call.args.len()); + let call_pos = ctx.commit_schema(raw); + + let callee = serialize_expr(ctx, &opt_call.callee, pos); + + let type_param_id = opt_call.type_args.clone().map(|params| { + serialize_ts_param_inst(ctx, params.as_ref(), call_pos) + }); + + let args = opt_call + .args + .iter() + .map(|arg| serialize_expr_or_spread(ctx, arg, pos)) + .collect::>(); + + ctx.write_bool(opt_pos, true); + ctx.write_ref(callee_pos, callee); + ctx.write_maybe_ref(type_args_pos, type_param_id); + ctx.write_refs(args_pos, args); + + call_pos + } + }; + + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::Invalid(_) => { + unreachable!() + } + } +} + +fn serialize_prop_or_spread( + ctx: &mut TsEsTreeBuilder, + prop: &PropOrSpread, + parent: NodeRef, +) -> NodeRef { + match prop { + PropOrSpread::Spread(spread_element) => serialize_spread( + ctx, + spread_element.expr.as_ref(), + &spread_element.dot3_token, + parent, + ), + PropOrSpread::Prop(prop) => { + let raw = ctx.header(AstNode::Property, parent, &prop.span()); + + let shorthand_pos = ctx.bool_field(AstProp::Shorthand); + let computed_pos = ctx.bool_field(AstProp::Computed); + let method_pos = ctx.bool_field(AstProp::Method); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let value_pos = ctx.ref_field(AstProp::Value); + let pos = ctx.commit_schema(raw); + + let mut shorthand = false; + let mut computed = false; + let mut method = false; + let mut kind = "init"; + + // FIXME: optional + let (key_id, value_id) = match prop.as_ref() { + Prop::Shorthand(ident) => { + shorthand = true; + + let value = serialize_ident(ctx, ident, pos); + (value, value) + } + Prop::KeyValue(key_value_prop) => { + if let PropName::Computed(_) = key_value_prop.key { + computed = true; + } + + let key = serialize_prop_name(ctx, &key_value_prop.key, pos); + let value = serialize_expr(ctx, key_value_prop.value.as_ref(), pos); + + (key, value) + } + Prop::Assign(assign_prop) => { + let raw = + ctx.header(AstNode::AssignmentPattern, pos, &assign_prop.span); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let child_pos = ctx.commit_schema(raw); + + let left = serialize_ident(ctx, &assign_prop.key, child_pos); + let right = + serialize_expr(ctx, assign_prop.value.as_ref(), child_pos); + + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + + (left, child_pos) + } + Prop::Getter(getter_prop) => { + kind = "get"; + + let key = serialize_prop_name(ctx, &getter_prop.key, pos); + + let value = serialize_expr( + ctx, + &Expr::Fn(FnExpr { + ident: None, + function: Box::new(Function { + params: vec![], + decorators: vec![], + span: getter_prop.span, + ctxt: SyntaxContext::empty(), + body: getter_prop.body.clone(), + is_generator: false, + is_async: false, + type_params: None, // FIXME + return_type: None, + }), + }), + pos, + ); + + (key, value) + } + Prop::Setter(setter_prop) => { + kind = "set"; + + let key_id = serialize_prop_name(ctx, &setter_prop.key, pos); + + let param = Param::from(*setter_prop.param.clone()); + + let value_id = serialize_expr( + ctx, + &Expr::Fn(FnExpr { + ident: None, + function: Box::new(Function { + params: vec![param], + decorators: vec![], + span: setter_prop.span, + ctxt: SyntaxContext::empty(), + body: setter_prop.body.clone(), + is_generator: false, + is_async: false, + type_params: None, + return_type: None, + }), + }), + pos, + ); + + (key_id, value_id) + } + Prop::Method(method_prop) => { + method = true; + + let key_id = serialize_prop_name(ctx, &method_prop.key, pos); + + let value_id = serialize_expr( + ctx, + &Expr::Fn(FnExpr { + ident: None, + function: method_prop.function.clone(), + }), + pos, + ); + + (key_id, value_id) + } + }; + + ctx.write_bool(shorthand_pos, shorthand); + ctx.write_bool(computed_pos, computed); + ctx.write_bool(method_pos, method); + ctx.write_str(kind_pos, kind); + ctx.write_ref(key_pos, key_id); + ctx.write_ref(value_pos, value_id); + + pos + } + } +} + +fn serialize_member_expr( + ctx: &mut TsEsTreeBuilder, + node: &MemberExpr, + parent: NodeRef, + optional: bool, +) -> NodeRef { + let raw = ctx.header(AstNode::MemberExpression, parent, &node.span); + let opt_pos = ctx.bool_field(AstProp::Optional); + let computed_pos = ctx.bool_field(AstProp::Computed); + let obj_pos = ctx.ref_field(AstProp::Object); + let prop_pos = ctx.ref_field(AstProp::Property); + let pos = ctx.commit_schema(raw); + + let obj = serialize_expr(ctx, node.obj.as_ref(), pos); + + let mut computed = false; + + let prop = match &node.prop { + MemberProp::Ident(ident_name) => serialize_ident_name(ctx, ident_name, pos), + MemberProp::PrivateName(private_name) => { + serialize_private_name(ctx, private_name, pos) + } + MemberProp::Computed(computed_prop_name) => { + computed = true; + serialize_expr(ctx, computed_prop_name.expr.as_ref(), pos) + } + }; + + ctx.write_bool(opt_pos, optional); + ctx.write_bool(computed_pos, computed); + ctx.write_ref(obj_pos, obj); + ctx.write_ref(prop_pos, prop); + + pos +} + +fn serialize_class_member( + ctx: &mut TsEsTreeBuilder, + member: &ClassMember, + parent: NodeRef, +) -> NodeRef { + match member { + ClassMember::Constructor(constructor) => { + let raw = + ctx.header(AstNode::MethodDefinition, parent, &constructor.span); + let key_pos = ctx.ref_field(AstProp::Key); + let body_pos = ctx.ref_field(AstProp::Body); + let args_pos = + ctx.ref_vec_field(AstProp::Arguments, constructor.params.len()); + let acc_pos = if constructor.accessibility.is_some() { + NodePos::Str(ctx.str_field(AstProp::Accessibility)) + } else { + NodePos::Undef(ctx.undefined_field(AstProp::Accessibility)) + }; + let member_id = ctx.commit_schema(raw); + + // FIXME flags + + let key = serialize_prop_name(ctx, &constructor.key, member_id); + let body = constructor + .body + .as_ref() + .map(|body| serialize_stmt(ctx, &Stmt::Block(body.clone()), member_id)); + + let params = constructor + .params + .iter() + .map(|param| match param { + ParamOrTsParamProp::TsParamProp(_) => { + todo!() + } + ParamOrTsParamProp::Param(param) => { + serialize_pat(ctx, ¶m.pat, member_id) + } + }) + .collect::>(); + + if let Some(acc) = constructor.accessibility { + if let NodePos::Str(str_pos) = acc_pos { + ctx.write_str(str_pos, &accessibility_to_str(acc)); + } + } + + ctx.write_ref(key_pos, key); + ctx.write_maybe_ref(body_pos, body); + // FIXME + ctx.write_refs(args_pos, params); + + member_id + } + ClassMember::Method(method) => { + let raw = ctx.header(AstNode::MethodDefinition, parent, &method.span); + + let member_id = ctx.commit_schema(raw); + + // let mut flags = FlagValue::new(); + // flags.set(Flag::ClassMethod); + if method.function.is_async { + // FIXME + } + + // accessibility_to_flag(&mut flags, method.accessibility); + + let _key_id = serialize_prop_name(ctx, &method.key, member_id); + + let _body_id = + method.function.body.as_ref().map(|body| { + serialize_stmt(ctx, &Stmt::Block(body.clone()), member_id) + }); + + let _params = method + .function + .params + .iter() + .map(|param| serialize_pat(ctx, ¶m.pat, member_id)) + .collect::>(); + + // ctx.write_node(member_id, ); + // ctx.write_flags(&flags); + // ctx.write_id(key_id); + // ctx.write_id(body_id); + // ctx.write_ids(AstProp::Params, params); + + member_id + } + ClassMember::PrivateMethod(_) => todo!(), + ClassMember::ClassProp(_) => todo!(), + ClassMember::PrivateProp(_) => todo!(), + ClassMember::TsIndexSignature(member) => { + serialize_ts_index_sig(ctx, member, parent) + } + ClassMember::Empty(_) => unreachable!(), + ClassMember::StaticBlock(_) => todo!(), + ClassMember::AutoAccessor(_) => todo!(), + } +} + +fn serialize_expr_or_spread( + ctx: &mut TsEsTreeBuilder, + arg: &ExprOrSpread, + parent: NodeRef, +) -> NodeRef { + if let Some(spread) = &arg.spread { + serialize_spread(ctx, &arg.expr, spread, parent) + } else { + serialize_expr(ctx, arg.expr.as_ref(), parent) + } +} + +fn serialize_ident( + ctx: &mut TsEsTreeBuilder, + ident: &Ident, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::Identifier, parent, &ident.span); + let name_pos = ctx.str_field(AstProp::Name); + let pos = ctx.commit_schema(raw); + + ctx.write_str(name_pos, ident.sym.as_str()); + + pos +} + +fn serialize_module_exported_name( + ctx: &mut TsEsTreeBuilder, + name: &ModuleExportName, + parent: NodeRef, +) -> NodeRef { + match &name { + ModuleExportName::Ident(ident) => serialize_ident(ctx, ident, parent), + ModuleExportName::Str(lit) => { + serialize_lit(ctx, &Lit::Str(lit.clone()), parent) + } + } +} + +fn serialize_decl( + ctx: &mut TsEsTreeBuilder, + decl: &Decl, + parent: NodeRef, +) -> NodeRef { + match decl { + Decl::Class(node) => { + let raw = ctx.header(AstNode::ClassDeclaration, parent, &node.class.span); + let declare_pos = ctx.bool_field(AstProp::Declare); + let abstract_pos = ctx.bool_field(AstProp::Abstract); + let id_pos = ctx.ref_field(AstProp::Id); + let body_pos = ctx.ref_field(AstProp::Body); + let type_params_pos = ctx.ref_field(AstProp::TypeParameters); + let super_pos = ctx.ref_field(AstProp::SuperClass); + let super_type_pos = ctx.ref_field(AstProp::SuperTypeArguments); + let impl_pos = + ctx.ref_vec_field(AstProp::Implements, node.class.implements.len()); + let id = ctx.commit_schema(raw); + + let body_raw = ctx.header(AstNode::ClassBody, id, &node.class.span); + let body_body_pos = + ctx.ref_vec_field(AstProp::Body, node.class.body.len()); + let body_id = ctx.commit_schema(body_raw); + + let ident = serialize_ident(ctx, &node.ident, id); + let type_params = + maybe_serialize_ts_type_param(ctx, &node.class.type_params, id); + + let super_class = node + .class + .super_class + .as_ref() + .map(|super_class| serialize_expr(ctx, super_class, id)); + + let super_type_params = node + .class + .super_type_params + .as_ref() + .map(|super_params| serialize_ts_param_inst(ctx, super_params, id)); + + let implement_ids = node + .class + .implements + .iter() + .map(|implements| { + let raw = + ctx.header(AstNode::TSClassImplements, id, &implements.span); + + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let child_pos = ctx.commit_schema(raw); + + let type_args = implements + .type_args + .clone() + .map(|args| serialize_ts_param_inst(ctx, &args, child_pos)); + + let expr = serialize_expr(ctx, &implements.expr, child_pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_maybe_ref(type_args_pos, type_args); + + child_pos + }) + .collect::>(); + + let member_ids = node + .class + .body + .iter() + .map(|member| serialize_class_member(ctx, member, parent)) + .collect::>(); + + ctx.write_ref(body_pos, body_id); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_bool(abstract_pos, node.class.is_abstract); + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(type_params_pos, type_params); + ctx.write_maybe_ref(super_pos, super_class); + ctx.write_maybe_ref(super_type_pos, super_type_params); + ctx.write_refs(impl_pos, implement_ids); + + // body + ctx.write_refs(body_body_pos, member_ids); + + id + } + Decl::Fn(node) => { + let raw = + ctx.header(AstNode::FunctionDeclaration, parent, &node.function.span); + let declare_pos = ctx.bool_field(AstProp::Declare); + let async_pos = ctx.bool_field(AstProp::Async); + let gen_pos = ctx.bool_field(AstProp::Generator); + let id_pos = ctx.ref_field(AstProp::Id); + let type_params_pos = ctx.ref_field(AstProp::TypeParameters); + let return_pos = ctx.ref_field(AstProp::ReturnType); + let body_pos = ctx.ref_field(AstProp::Body); + let params_pos = + ctx.ref_vec_field(AstProp::Params, node.function.params.len()); + let pos = ctx.commit_schema(raw); + + let ident_id = serialize_ident(ctx, &node.ident, parent); + let type_param_id = + maybe_serialize_ts_type_param(ctx, &node.function.type_params, pos); + let return_type = + maybe_serialize_ts_type_ann(ctx, &node.function.return_type, pos); + + let body = node + .function + .body + .as_ref() + .map(|body| serialize_stmt(ctx, &Stmt::Block(body.clone()), pos)); + + let params = node + .function + .params + .iter() + .map(|param| serialize_pat(ctx, ¶m.pat, pos)) + .collect::>(); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_bool(async_pos, node.function.is_async); + ctx.write_bool(gen_pos, node.function.is_generator); + ctx.write_ref(id_pos, ident_id); + ctx.write_maybe_ref(type_params_pos, type_param_id); + ctx.write_maybe_ref(return_pos, return_type); + ctx.write_maybe_ref(body_pos, body); + ctx.write_refs(params_pos, params); + + pos + } + Decl::Var(node) => { + let raw = ctx.header(AstNode::VariableDeclaration, parent, &node.span); + let declare_pos = ctx.bool_field(AstProp::Declare); + let kind_pos = ctx.str_field(AstProp::Kind); + let decls_pos = + ctx.ref_vec_field(AstProp::Declarations, node.decls.len()); + let id = ctx.commit_schema(raw); + + let children = node + .decls + .iter() + .map(|decl| { + let raw = ctx.header(AstNode::VariableDeclarator, id, &decl.span); + let id_pos = ctx.ref_field(AstProp::Id); + let init_pos = ctx.ref_field(AstProp::Init); + let child_id = ctx.commit_schema(raw); + + // FIXME: Definite? + + let ident = serialize_pat(ctx, &decl.name, child_id); + + let init = decl + .init + .as_ref() + .map(|init| serialize_expr(ctx, init.as_ref(), child_id)); + + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(init_pos, init); + + child_id + }) + .collect::>(); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_str( + kind_pos, + match node.kind { + VarDeclKind::Var => "var", + VarDeclKind::Let => "let", + VarDeclKind::Const => "const", + }, + ); + ctx.write_refs(decls_pos, children); + + id + } + Decl::Using(_) => { + todo!(); + } + Decl::TsInterface(node) => { + let raw = ctx.header(AstNode::TSInterface, parent, &node.span); + let declare_pos = ctx.bool_field(AstProp::Declare); + let id_pos = ctx.ref_field(AstProp::Id); + let extends_pos = ctx.ref_vec_field(AstProp::Extends, node.extends.len()); + let type_param_pos = ctx.ref_field(AstProp::TypeParameters); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let body_raw = ctx.header(AstNode::TSInterfaceBody, pos, &node.body.span); + let body_body_pos = + ctx.ref_vec_field(AstProp::Body, node.body.body.len()); + let body_id = ctx.commit_schema(body_raw); + + let ident_id = serialize_ident(ctx, &node.id, pos); + let type_param = + maybe_serialize_ts_type_param(ctx, &node.type_params, pos); + + let extend_ids = node + .extends + .iter() + .map(|item| { + let raw = ctx.header(AstNode::TSInterfaceHeritage, pos, &item.span); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let expr_pos = ctx.ref_field(AstProp::Expression); + let child_pos = ctx.commit_schema(raw); + + let expr = serialize_expr(ctx, &item.expr, child_pos); + let type_args = item.type_args.clone().map(|params| { + serialize_ts_param_inst(ctx, params.as_ref(), child_pos) + }); + + ctx.write_ref(expr_pos, expr); + ctx.write_maybe_ref(type_args_pos, type_args); + + child_pos + }) + .collect::>(); + + let body_elem_ids = node + .body + .body + .iter() + .map(|item| match item { + TsTypeElement::TsCallSignatureDecl(ts_call) => { + let raw = ctx.header( + AstNode::TsCallSignatureDeclaration, + pos, + &ts_call.span, + ); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let params_pos = + ctx.ref_vec_field(AstProp::Params, ts_call.params.len()); + let return_pos = ctx.ref_field(AstProp::ReturnType); + let item_id = ctx.commit_schema(raw); + + let type_param = + maybe_serialize_ts_type_param(ctx, &ts_call.type_params, pos); + let return_type = + maybe_serialize_ts_type_ann(ctx, &ts_call.type_ann, pos); + let params = ts_call + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, pos)) + .collect::>(); + + ctx.write_maybe_ref(type_ann_pos, type_param); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(return_pos, return_type); + + item_id + } + TsTypeElement::TsConstructSignatureDecl(_) => todo!(), + TsTypeElement::TsPropertySignature(sig) => { + let raw = ctx.header(AstNode::TSPropertySignature, pos, &sig.span); + + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let key_pos = ctx.ref_field(AstProp::Key); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let item_pos = ctx.commit_schema(raw); + + let key = serialize_expr(ctx, &sig.key, item_pos); + let type_ann = + maybe_serialize_ts_type_ann(ctx, &sig.type_ann, item_pos); + + ctx.write_bool(computed_pos, sig.computed); + ctx.write_bool(optional_pos, sig.optional); + ctx.write_bool(readonly_pos, sig.readonly); + ctx.write_ref(key_pos, key); + ctx.write_maybe_ref(type_ann_pos, type_ann); + + item_pos + } + TsTypeElement::TsGetterSignature(sig) => { + let raw = ctx.header(AstNode::TSMethodSignature, pos, &sig.span); + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let return_type_pos = ctx.ref_field(AstProp::ReturnType); + let item_pos = ctx.commit_schema(raw); + + let key = serialize_expr(ctx, sig.key.as_ref(), item_pos); + let return_type = + maybe_serialize_ts_type_ann(ctx, &sig.type_ann, item_pos); + + ctx.write_bool(computed_pos, false); + ctx.write_bool(optional_pos, false); + ctx.write_bool(readonly_pos, false); + ctx.write_str(kind_pos, "getter"); + ctx.write_maybe_ref(return_type_pos, return_type); + ctx.write_ref(key_pos, key); + + item_pos + } + TsTypeElement::TsSetterSignature(sig) => { + let raw = ctx.header(AstNode::TSMethodSignature, pos, &sig.span); + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let params_pos = ctx.ref_vec_field(AstProp::Params, 1); + let item_pos = ctx.commit_schema(raw); + + let key = serialize_expr(ctx, sig.key.as_ref(), item_pos); + let params = serialize_ts_fn_param(ctx, &sig.param, item_pos); + + ctx.write_bool(computed_pos, false); + ctx.write_bool(optional_pos, false); + ctx.write_bool(readonly_pos, false); + ctx.write_str(kind_pos, "setter"); + ctx.write_ref(key_pos, key); + ctx.write_refs(params_pos, vec![params]); + + item_pos + } + TsTypeElement::TsMethodSignature(sig) => { + let raw = ctx.header(AstNode::TSMethodSignature, pos, &sig.span); + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let params_pos = + ctx.ref_vec_field(AstProp::Params, sig.params.len()); + let return_type_pos = ctx.ref_field(AstProp::ReturnType); + let item_pos = ctx.commit_schema(raw); + + let key = serialize_expr(ctx, sig.key.as_ref(), item_pos); + let params = sig + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, item_pos)) + .collect::>(); + let return_type = + maybe_serialize_ts_type_ann(ctx, &sig.type_ann, item_pos); + + ctx.write_bool(computed_pos, false); + ctx.write_bool(optional_pos, false); + ctx.write_bool(readonly_pos, false); + ctx.write_str(kind_pos, "method"); + ctx.write_ref(key_pos, key); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(return_type_pos, return_type); + + item_pos + } + TsTypeElement::TsIndexSignature(sig) => { + serialize_ts_index_sig(ctx, sig, pos) + } + }) + .collect::>(); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_ref(id_pos, ident_id); + ctx.write_maybe_ref(type_param_pos, type_param); + ctx.write_refs(extends_pos, extend_ids); + ctx.write_ref(body_pos, body_id); + + // Body + ctx.write_refs(body_body_pos, body_elem_ids); + + pos + } + Decl::TsTypeAlias(node) => { + let raw = ctx.header(AstNode::TsTypeAlias, parent, &node.span); + let declare_pos = ctx.bool_field(AstProp::Declare); + let id_pos = ctx.ref_field(AstProp::Id); + let type_params_pos = ctx.ref_field(AstProp::TypeParameters); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let ident = serialize_ident(ctx, &node.id, pos); + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + let type_param = + maybe_serialize_ts_type_param(ctx, &node.type_params, pos); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(type_params_pos, type_param); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Decl::TsEnum(node) => { + let raw = ctx.header(AstNode::TSEnumDeclaration, parent, &node.span); + let declare_pos = ctx.bool_field(AstProp::Declare); + let const_pos = ctx.bool_field(AstProp::Const); + let id_pos = ctx.ref_field(AstProp::Id); + let body_pos = ctx.ref_field(AstProp::Body); + let pos = ctx.commit_schema(raw); + + let body_raw = ctx.header(AstNode::TSEnumBody, pos, &node.span); + let members_pos = ctx.ref_vec_field(AstProp::Members, node.members.len()); + let body = ctx.commit_schema(body_raw); + + let ident_id = serialize_ident(ctx, &node.id, parent); + + let members = node + .members + .iter() + .map(|member| { + let raw = ctx.header(AstNode::TSEnumMember, body, &member.span); + let id_pos = ctx.ref_field(AstProp::Id); + let init_pos = ctx.ref_field(AstProp::Initializer); + let member_id = ctx.commit_schema(raw); + + let ident = match &member.id { + TsEnumMemberId::Ident(ident) => { + serialize_ident(ctx, ident, member_id) + } + TsEnumMemberId::Str(lit_str) => { + serialize_lit(ctx, &Lit::Str(lit_str.clone()), member_id) + } + }; + + let init = member + .init + .as_ref() + .map(|init| serialize_expr(ctx, init, member_id)); + + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(init_pos, init); + + member_id + }) + .collect::>(); + + ctx.write_refs(members_pos, members); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_bool(const_pos, node.is_const); + ctx.write_ref(id_pos, ident_id); + ctx.write_ref(body_pos, body); + + pos + } + Decl::TsModule(ts_module_decl) => { + let raw = ctx.header(AstNode::TsModule, parent, &ts_module_decl.span); + ctx.commit_schema(raw) + } + } +} + +fn serialize_ts_index_sig( + ctx: &mut TsEsTreeBuilder, + node: &TsIndexSignature, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::TSMethodSignature, parent, &node.span); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let static_pos = ctx.bool_field(AstProp::Static); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + let params = node + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, pos)) + .collect::>(); + + ctx.write_bool(readonly_pos, false); + ctx.write_bool(static_pos, node.is_static); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(type_ann_pos, type_ann); + + pos +} + +fn accessibility_to_str(accessibility: Accessibility) -> String { + match accessibility { + Accessibility::Public => "public".to_string(), + Accessibility::Protected => "protected".to_string(), + Accessibility::Private => "private".to_string(), + } +} + +fn serialize_private_name( + ctx: &mut TsEsTreeBuilder, + node: &PrivateName, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::PrivateIdentifier, parent, &node.span); + let name_pos = ctx.str_field(AstProp::Name); + let pos = ctx.commit_schema(raw); + + ctx.write_str(name_pos, node.name.as_str()); + + pos +} + +fn serialize_jsx_element( + ctx: &mut TsEsTreeBuilder, + node: &JSXElement, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXElement, parent, &node.span); + let open_pos = ctx.ref_field(AstProp::OpeningElement); + let close_pos = ctx.ref_field(AstProp::ClosingElement); + let children_pos = ctx.ref_vec_field(AstProp::Children, node.children.len()); + let pos = ctx.commit_schema(raw); + + let open = serialize_jsx_opening_element(ctx, &node.opening, pos); + + let close = node.closing.as_ref().map(|closing| { + let raw = ctx.header(AstNode::JSXClosingElement, pos, &closing.span); + let name_pos = ctx.ref_field(AstProp::Name); + let closing_pos = ctx.commit_schema(raw); + + let name = serialize_jsx_element_name(ctx, &closing.name, closing_pos); + ctx.write_ref(name_pos, name); + + closing_pos + }); + + let children = serialize_jsx_children(ctx, &node.children, pos); + + ctx.write_ref(open_pos, open); + ctx.write_maybe_ref(close_pos, close); + ctx.write_refs(children_pos, children); + + pos +} + +fn serialize_jsx_fragment( + ctx: &mut TsEsTreeBuilder, + node: &JSXFragment, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXFragment, parent, &node.span); + + let opening_pos = ctx.ref_field(AstProp::OpeningFragment); + let closing_pos = ctx.ref_field(AstProp::ClosingFragment); + let children_pos = ctx.ref_vec_field(AstProp::Children, node.children.len()); + let pos = ctx.commit_schema(raw); + + let raw = ctx.header(AstNode::JSXOpeningFragment, pos, &node.opening.span); + let opening_id = ctx.commit_schema(raw); + + let raw = ctx.header(AstNode::JSXClosingFragment, pos, &node.closing.span); + let closing_id = ctx.commit_schema(raw); + + let children = serialize_jsx_children(ctx, &node.children, pos); + + ctx.write_ref(opening_pos, opening_id); + ctx.write_ref(closing_pos, closing_id); + ctx.write_refs(children_pos, children); + + pos +} + +fn serialize_jsx_children( + ctx: &mut TsEsTreeBuilder, + children: &[JSXElementChild], + parent: NodeRef, +) -> Vec { + children + .iter() + .map(|child| { + match child { + JSXElementChild::JSXText(text) => { + let raw = ctx.header(AstNode::JSXText, parent, &text.span); + let raw_pos = ctx.str_field(AstProp::Raw); + let value_pos = ctx.str_field(AstProp::Value); + let pos = ctx.commit_schema(raw); + + ctx.write_str(raw_pos, &text.raw); + ctx.write_str(value_pos, &text.value); + + pos + } + JSXElementChild::JSXExprContainer(container) => { + serialize_jsx_container_expr(ctx, container, parent) + } + JSXElementChild::JSXElement(el) => { + serialize_jsx_element(ctx, el, parent) + } + JSXElementChild::JSXFragment(frag) => { + serialize_jsx_fragment(ctx, frag, parent) + } + // No parser supports this + JSXElementChild::JSXSpreadChild(_) => unreachable!(), + } + }) + .collect::>() +} + +fn serialize_jsx_member_expr( + ctx: &mut TsEsTreeBuilder, + node: &JSXMemberExpr, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXMemberExpression, parent, &node.span); + let obj_ref = ctx.ref_field(AstProp::Object); + let prop_ref = ctx.ref_field(AstProp::Property); + let pos = ctx.commit_schema(raw); + + let obj = match &node.obj { + JSXObject::JSXMemberExpr(member) => { + serialize_jsx_member_expr(ctx, member, pos) + } + JSXObject::Ident(ident) => serialize_jsx_identifier(ctx, ident, parent), + }; + + let prop = serialize_ident_name_as_jsx_identifier(ctx, &node.prop, pos); + + ctx.write_ref(obj_ref, obj); + ctx.write_ref(prop_ref, prop); + + pos +} + +fn serialize_jsx_element_name( + ctx: &mut TsEsTreeBuilder, + node: &JSXElementName, + parent: NodeRef, +) -> NodeRef { + match &node { + JSXElementName::Ident(ident) => { + serialize_jsx_identifier(ctx, ident, parent) + } + JSXElementName::JSXMemberExpr(member) => { + serialize_jsx_member_expr(ctx, member, parent) + } + JSXElementName::JSXNamespacedName(ns) => { + serialize_jsx_namespaced_name(ctx, ns, parent) + } + } +} + +fn serialize_jsx_opening_element( + ctx: &mut TsEsTreeBuilder, + node: &JSXOpeningElement, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXOpeningElement, parent, &node.span); + let sclose_pos = ctx.bool_field(AstProp::SelfClosing); + let name_pos = ctx.ref_field(AstProp::Name); + let attrs_pos = ctx.ref_vec_field(AstProp::Attributes, node.attrs.len()); + let pos = ctx.commit_schema(raw); + + let name = serialize_jsx_element_name(ctx, &node.name, pos); + + // FIXME: type args + + let attrs = node + .attrs + .iter() + .map(|attr| match attr { + JSXAttrOrSpread::JSXAttr(attr) => { + let raw = ctx.header(AstNode::JSXAttribute, pos, &attr.span); + let name_pos = ctx.ref_field(AstProp::Name); + let value_pos = ctx.ref_field(AstProp::Value); + let attr_pos = ctx.commit_schema(raw); + + let name = match &attr.name { + JSXAttrName::Ident(name) => { + serialize_ident_name_as_jsx_identifier(ctx, name, attr_pos) + } + JSXAttrName::JSXNamespacedName(node) => { + serialize_jsx_namespaced_name(ctx, node, attr_pos) + } + }; + + let value = attr.value.as_ref().map(|value| match value { + JSXAttrValue::Lit(lit) => serialize_lit(ctx, lit, attr_pos), + JSXAttrValue::JSXExprContainer(container) => { + serialize_jsx_container_expr(ctx, container, attr_pos) + } + JSXAttrValue::JSXElement(el) => { + serialize_jsx_element(ctx, el, attr_pos) + } + JSXAttrValue::JSXFragment(frag) => { + serialize_jsx_fragment(ctx, frag, attr_pos) + } + }); + + ctx.write_ref(name_pos, name); + ctx.write_maybe_ref(value_pos, value); + + attr_pos + } + JSXAttrOrSpread::SpreadElement(spread) => { + let raw = ctx.header(AstNode::JSXAttribute, pos, &spread.dot3_token); + let arg_pos = ctx.ref_field(AstProp::Argument); + let attr_pos = ctx.commit_schema(raw); + + let arg = serialize_expr(ctx, &spread.expr, attr_pos); + + ctx.write_ref(arg_pos, arg); + + attr_pos + } + }) + .collect::>(); + + ctx.write_bool(sclose_pos, node.self_closing); + ctx.write_ref(name_pos, name); + ctx.write_refs(attrs_pos, attrs); + + pos +} + +fn serialize_jsx_container_expr( + ctx: &mut TsEsTreeBuilder, + node: &JSXExprContainer, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXExpressionContainer, parent, &node.span); + let expr_pos = ctx.ref_field(AstProp::Expression); + let pos = ctx.commit_schema(raw); + + let expr = match &node.expr { + JSXExpr::JSXEmptyExpr(expr) => serialize_jsx_empty_expr(ctx, expr, pos), + JSXExpr::Expr(expr) => serialize_expr(ctx, expr, pos), + }; + + ctx.write_ref(expr_pos, expr); + + pos +} + +fn serialize_jsx_empty_expr( + ctx: &mut TsEsTreeBuilder, + node: &JSXEmptyExpr, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXEmptyExpression, parent, &node.span); + ctx.commit_schema(raw) +} + +fn serialize_jsx_namespaced_name( + ctx: &mut TsEsTreeBuilder, + node: &JSXNamespacedName, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXNamespacedName, parent, &node.span); + let ns_pos = ctx.ref_field(AstProp::Namespace); + let name_pos = ctx.ref_field(AstProp::Name); + let pos = ctx.commit_schema(raw); + + let ns_id = serialize_ident_name_as_jsx_identifier(ctx, &node.ns, pos); + let name_id = serialize_ident_name_as_jsx_identifier(ctx, &node.name, pos); + + ctx.write_ref(ns_pos, ns_id); + ctx.write_ref(name_pos, name_id); + + pos +} + +fn serialize_ident_name_as_jsx_identifier( + ctx: &mut TsEsTreeBuilder, + node: &IdentName, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXIdentifier, parent, &node.span); + let name_pos = ctx.str_field(AstProp::Name); + let pos = ctx.commit_schema(raw); + + ctx.write_str(name_pos, &node.sym); + + pos +} + +fn serialize_jsx_identifier( + ctx: &mut TsEsTreeBuilder, + node: &Ident, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::JSXIdentifier, parent, &node.span); + let name_pos = ctx.str_field(AstProp::Name); + let pos = ctx.commit_schema(raw); + + ctx.write_str(name_pos, &node.sym); + + pos +} + +fn serialize_pat( + ctx: &mut TsEsTreeBuilder, + pat: &Pat, + parent: NodeRef, +) -> NodeRef { + match pat { + Pat::Ident(node) => serialize_ident(ctx, &node.id, parent), + Pat::Array(node) => { + let raw = ctx.header(AstNode::ArrayPattern, parent, &node.span); + let opt_pos = ctx.bool_field(AstProp::Optional); + let type_pos = ctx.ref_field(AstProp::TypeAnnotation); + let elems_pos = ctx.ref_vec_field(AstProp::Elements, node.elems.len()); + let pos = ctx.commit_schema(raw); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + let children = node + .elems + .iter() + .map(|pat| { + pat + .as_ref() + .map_or(NodeRef(0), |v| serialize_pat(ctx, v, pos)) + }) + .collect::>(); + + ctx.write_bool(opt_pos, node.optional); + ctx.write_maybe_ref(type_pos, type_ann); + ctx.write_refs(elems_pos, children); + + pos + } + Pat::Rest(node) => { + let raw = ctx.header(AstNode::RestElement, parent, &node.span); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + let arg = serialize_pat(ctx, &node.arg, parent); + + ctx.write_maybe_ref(type_ann_pos, type_ann); + ctx.write_ref(arg_pos, arg); + + pos + } + Pat::Object(node) => { + let raw = ctx.header(AstNode::ObjectPattern, parent, &node.span); + let opt_pos = ctx.bool_field(AstProp::Optional); + let props_pos = ctx.ref_vec_field(AstProp::Properties, node.props.len()); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + let children = node + .props + .iter() + .map(|prop| match prop { + ObjectPatProp::KeyValue(key_value_prop) => { + let raw = + ctx.header(AstNode::Property, pos, &key_value_prop.span()); + let computed_pos = ctx.bool_field(AstProp::Computed); + let key_pos = ctx.ref_field(AstProp::Key); + let value_pos = ctx.ref_field(AstProp::Value); + let child_pos = ctx.commit_schema(raw); + + let computed = matches!(key_value_prop.key, PropName::Computed(_)); + + let key = serialize_prop_name(ctx, &key_value_prop.key, child_pos); + let value = + serialize_pat(ctx, key_value_prop.value.as_ref(), child_pos); + + ctx.write_bool(computed_pos, computed); + ctx.write_ref(key_pos, key); + ctx.write_ref(value_pos, value); + + child_pos + } + ObjectPatProp::Assign(assign_pat_prop) => { + let raw = ctx.header(AstNode::Property, pos, &assign_pat_prop.span); + // TOOD: Doesn't seem to be present in SWC ast + let _computed_pos = ctx.bool_field(AstProp::Computed); + let key_pos = ctx.ref_field(AstProp::Key); + let value_pos = ctx.ref_field(AstProp::Value); + let child_pos = ctx.commit_schema(raw); + + let ident = serialize_ident(ctx, &assign_pat_prop.key.id, parent); + + let value = assign_pat_prop + .value + .as_ref() + .map(|value| serialize_expr(ctx, value, child_pos)); + + ctx.write_ref(key_pos, ident); + ctx.write_maybe_ref(value_pos, value); + + child_pos + } + ObjectPatProp::Rest(rest_pat) => { + serialize_pat(ctx, &Pat::Rest(rest_pat.clone()), parent) + } + }) + .collect::>(); + + ctx.write_bool(opt_pos, node.optional); + ctx.write_maybe_ref(type_ann_pos, type_ann); + ctx.write_refs(props_pos, children); + + pos + } + Pat::Assign(node) => { + let raw = ctx.header(AstNode::AssignmentPattern, parent, &node.span); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let pos = ctx.commit_schema(raw); + + let left = serialize_pat(ctx, &node.left, pos); + let right = serialize_expr(ctx, &node.right, pos); + + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + + pos + } + Pat::Invalid(_) => unreachable!(), + Pat::Expr(node) => serialize_expr(ctx, node, parent), + } +} + +fn serialize_for_head( + ctx: &mut TsEsTreeBuilder, + for_head: &ForHead, + parent: NodeRef, +) -> NodeRef { + match for_head { + ForHead::VarDecl(var_decl) => { + serialize_decl(ctx, &Decl::Var(var_decl.clone()), parent) + } + ForHead::UsingDecl(using_decl) => { + serialize_decl(ctx, &Decl::Using(using_decl.clone()), parent) + } + ForHead::Pat(pat) => serialize_pat(ctx, pat, parent), + } +} + +fn serialize_spread( + ctx: &mut TsEsTreeBuilder, + expr: &Expr, + span: &Span, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::SpreadElement, parent, span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let pos = ctx.commit_schema(raw); + + let expr_pos = serialize_expr(ctx, expr, parent); + ctx.write_ref(arg_pos, expr_pos); + + pos +} + +fn serialize_ident_name( + ctx: &mut TsEsTreeBuilder, + ident_name: &IdentName, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::Identifier, parent, &ident_name.span); + let name_pos = ctx.str_field(AstProp::Name); + let pos = ctx.commit_schema(raw); + + ctx.write_str(name_pos, ident_name.sym.as_str()); + + pos +} + +fn serialize_prop_name( + ctx: &mut TsEsTreeBuilder, + prop_name: &PropName, + parent: NodeRef, +) -> NodeRef { + match prop_name { + PropName::Ident(ident_name) => { + serialize_ident_name(ctx, ident_name, parent) + } + PropName::Str(str_prop) => { + let raw = ctx.header(AstNode::StringLiteral, parent, &str_prop.span); + let value_pos = ctx.str_field(AstProp::Value); + ctx.write_str(value_pos, &str_prop.value); + ctx.commit_schema(raw) + } + PropName::Num(number) => { + serialize_lit(ctx, &Lit::Num(number.clone()), parent) + } + PropName::Computed(node) => serialize_expr(ctx, &node.expr, parent), + PropName::BigInt(big_int) => { + serialize_lit(ctx, &Lit::BigInt(big_int.clone()), parent) + } + } +} + +fn serialize_lit( + ctx: &mut TsEsTreeBuilder, + lit: &Lit, + parent: NodeRef, +) -> NodeRef { + match lit { + Lit::Str(node) => { + let raw = ctx.header(AstNode::StringLiteral, parent, &node.span); + let value_pos = ctx.str_field(AstProp::Value); + let pos = ctx.commit_schema(raw); + + ctx.write_str(value_pos, &node.value); + + pos + } + Lit::Bool(lit_bool) => { + let raw = ctx.header(AstNode::Bool, parent, &lit_bool.span); + let value_pos = ctx.bool_field(AstProp::Value); + let pos = ctx.commit_schema(raw); + + ctx.write_bool(value_pos, lit_bool.value); + + pos + } + Lit::Null(node) => { + let raw = ctx.header(AstNode::Null, parent, &node.span); + ctx.commit_schema(raw) + } + Lit::Num(node) => { + let raw = ctx.header(AstNode::NumericLiteral, parent, &node.span); + let value_pos = ctx.str_field(AstProp::Value); + let pos = ctx.commit_schema(raw); + + let value = node.raw.as_ref().unwrap(); + ctx.write_str(value_pos, value); + + pos + } + Lit::BigInt(node) => { + let raw = ctx.header(AstNode::BigIntLiteral, parent, &node.span); + let value_pos = ctx.str_field(AstProp::Value); + let pos = ctx.commit_schema(raw); + + ctx.write_str(value_pos, &node.value.to_string()); + + pos + } + Lit::Regex(node) => { + let raw = ctx.header(AstNode::RegExpLiteral, parent, &node.span); + let pattern_pos = ctx.str_field(AstProp::Pattern); + let flags_pos = ctx.str_field(AstProp::Flags); + let pos = ctx.commit_schema(raw); + + ctx.write_str(pattern_pos, node.exp.as_str()); + ctx.write_str(flags_pos, node.flags.as_str()); + + pos + } + Lit::JSXText(jsxtext) => { + let raw = ctx.header(AstNode::JSXText, parent, &jsxtext.span); + ctx.commit_schema(raw) + } + } +} + +fn serialize_ts_param_inst( + ctx: &mut TsEsTreeBuilder, + node: &TsTypeParamInstantiation, + parent: NodeRef, +) -> NodeRef { + let raw = + ctx.header(AstNode::TSTypeParameterInstantiation, parent, &node.span); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let pos = ctx.commit_schema(raw); + + let params = node + .params + .iter() + .map(|param| serialize_ts_type(ctx, param, pos)) + .collect::>(); + + ctx.write_refs(params_pos, params); + + pos +} + +fn serialize_ts_type( + ctx: &mut TsEsTreeBuilder, + node: &TsType, + parent: NodeRef, +) -> NodeRef { + match node { + TsType::TsKeywordType(node) => { + let kind = match node.kind { + TsKeywordTypeKind::TsAnyKeyword => AstNode::TSAnyKeyword, + TsKeywordTypeKind::TsUnknownKeyword => AstNode::TSUnknownKeyword, + TsKeywordTypeKind::TsNumberKeyword => AstNode::TSNumberKeyword, + TsKeywordTypeKind::TsObjectKeyword => AstNode::TSObjectKeyword, + TsKeywordTypeKind::TsBooleanKeyword => AstNode::TSBooleanKeyword, + TsKeywordTypeKind::TsBigIntKeyword => AstNode::TSBigIntKeyword, + TsKeywordTypeKind::TsStringKeyword => AstNode::TSStringKeyword, + TsKeywordTypeKind::TsSymbolKeyword => AstNode::TSSymbolKeyword, + TsKeywordTypeKind::TsVoidKeyword => AstNode::TSVoidKeyword, + TsKeywordTypeKind::TsUndefinedKeyword => AstNode::TSUndefinedKeyword, + TsKeywordTypeKind::TsNullKeyword => AstNode::TSNullKeyword, + TsKeywordTypeKind::TsNeverKeyword => AstNode::TSNeverKeyword, + TsKeywordTypeKind::TsIntrinsicKeyword => AstNode::TSIntrinsicKeyword, + }; + + let raw = ctx.header(kind, parent, &node.span); + ctx.commit_schema(raw) + } + TsType::TsThisType(node) => { + let raw = ctx.header(AstNode::TSThisType, parent, &node.span); + ctx.commit_schema(raw) + } + TsType::TsFnOrConstructorType(node) => match node { + TsFnOrConstructorType::TsFnType(node) => { + let raw = ctx.header(AstNode::TSFunctionType, parent, &node.span); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let pos = ctx.commit_schema(raw); + + let param_ids = node + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, pos)) + .collect::>(); + + ctx.write_refs(params_pos, param_ids); + + pos + } + TsFnOrConstructorType::TsConstructorType(_) => { + todo!() + } + }, + TsType::TsTypeRef(node) => { + let raw = ctx.header(AstNode::TSTypeReference, parent, &node.span); + let name_pos = ctx.ref_field(AstProp::TypeName); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let pos = ctx.commit_schema(raw); + + let name = serialize_ts_entity_name(ctx, &node.type_name, pos); + + let type_args = node + .type_params + .clone() + .map(|param| serialize_ts_param_inst(ctx, ¶m, pos)); + + ctx.write_ref(name_pos, name); + ctx.write_maybe_ref(type_args_pos, type_args); + + pos + } + TsType::TsTypeQuery(node) => { + let raw = ctx.header(AstNode::TSTypeQuery, parent, &node.span); + let name_pos = ctx.ref_field(AstProp::ExprName); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let pos = ctx.commit_schema(raw); + + let expr_name = match &node.expr_name { + TsTypeQueryExpr::TsEntityName(entity) => { + serialize_ts_entity_name(ctx, entity, pos) + } + TsTypeQueryExpr::Import(child) => { + serialize_ts_type(ctx, &TsType::TsImportType(child.clone()), pos) + } + }; + + let type_args = node + .type_args + .clone() + .map(|param| serialize_ts_param_inst(ctx, ¶m, pos)); + + ctx.write_ref(name_pos, expr_name); + ctx.write_maybe_ref(type_args_pos, type_args); + + pos + } + TsType::TsTypeLit(_) => { + // TODO: Not sure what this is + todo!() + } + TsType::TsArrayType(node) => { + let raw = ctx.header(AstNode::TSArrayType, parent, &node.span); + let elem_pos = ctx.ref_field(AstProp::ElementType); + let pos = ctx.commit_schema(raw); + + let elem = serialize_ts_type(ctx, &node.elem_type, pos); + + ctx.write_ref(elem_pos, elem); + + pos + } + TsType::TsTupleType(node) => { + let raw = ctx.header(AstNode::TSTupleType, parent, &node.span); + let children_pos = + ctx.ref_vec_field(AstProp::ElementTypes, node.elem_types.len()); + let pos = ctx.commit_schema(raw); + + let children = node + .elem_types + .iter() + .map(|elem| { + if let Some(label) = &elem.label { + let raw = ctx.header(AstNode::TSNamedTupleMember, pos, &elem.span); + let label_pos = ctx.ref_field(AstProp::Label); + let type_pos = ctx.ref_field(AstProp::ElementType); + let child_pos = ctx.commit_schema(raw); + + let label_id = serialize_pat(ctx, label, child_pos); + let type_id = serialize_ts_type(ctx, elem.ty.as_ref(), child_pos); + + ctx.write_ref(label_pos, label_id); + ctx.write_ref(type_pos, type_id); + + child_pos + } else { + serialize_ts_type(ctx, elem.ty.as_ref(), pos) + } + }) + .collect::>(); + + ctx.write_refs(children_pos, children); + + pos + } + TsType::TsOptionalType(_) => todo!(), + TsType::TsRestType(node) => { + let raw = ctx.header(AstNode::TSRestType, parent, &node.span); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + TsType::TsUnionOrIntersectionType(node) => match node { + TsUnionOrIntersectionType::TsUnionType(node) => { + let raw = ctx.header(AstNode::TSUnionType, parent, &node.span); + let types_pos = ctx.ref_vec_field(AstProp::Types, node.types.len()); + let pos = ctx.commit_schema(raw); + + let children = node + .types + .iter() + .map(|item| serialize_ts_type(ctx, item, pos)) + .collect::>(); + + ctx.write_refs(types_pos, children); + + pos + } + TsUnionOrIntersectionType::TsIntersectionType(node) => { + let raw = ctx.header(AstNode::TSIntersectionType, parent, &node.span); + let types_pos = ctx.ref_vec_field(AstProp::Types, node.types.len()); + let pos = ctx.commit_schema(raw); + + let children = node + .types + .iter() + .map(|item| serialize_ts_type(ctx, item, pos)) + .collect::>(); + + ctx.write_refs(types_pos, children); + + pos + } + }, + TsType::TsConditionalType(node) => { + let raw = ctx.header(AstNode::TSConditionalType, parent, &node.span); + let check_pos = ctx.ref_field(AstProp::CheckType); + let extends_pos = ctx.ref_field(AstProp::ExtendsType); + let true_pos = ctx.ref_field(AstProp::TrueType); + let false_pos = ctx.ref_field(AstProp::FalseType); + let pos = ctx.commit_schema(raw); + + let check = serialize_ts_type(ctx, &node.check_type, pos); + let extends = serialize_ts_type(ctx, &node.extends_type, pos); + let v_true = serialize_ts_type(ctx, &node.true_type, pos); + let v_false = serialize_ts_type(ctx, &node.false_type, pos); + + ctx.write_ref(check_pos, check); + ctx.write_ref(extends_pos, extends); + ctx.write_ref(true_pos, v_true); + ctx.write_ref(false_pos, v_false); + + pos + } + TsType::TsInferType(node) => { + let raw = ctx.header(AstNode::TSInferType, parent, &node.span); + let param_pos = ctx.ref_field(AstProp::TypeParameter); + let pos = ctx.commit_schema(raw); + + let param = serialize_ts_type_param(ctx, &node.type_param, parent); + + ctx.write_ref(param_pos, param); + + pos + } + TsType::TsParenthesizedType(_) => todo!(), + TsType::TsTypeOperator(node) => { + let raw = ctx.header(AstNode::TSTypeOperator, parent, &node.span); + let operator_pos = ctx.str_field(AstProp::Operator); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_str( + operator_pos, + match node.op { + TsTypeOperatorOp::KeyOf => "keyof", + TsTypeOperatorOp::Unique => "unique", + TsTypeOperatorOp::ReadOnly => "readonly", + }, + ); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + TsType::TsIndexedAccessType(node) => { + let raw = ctx.header(AstNode::TSIndexedAccessType, parent, &node.span); + let index_type_pos = ctx.ref_field(AstProp::IndexType); + let obj_type_pos = ctx.ref_field(AstProp::ObjectType); + let pos = ctx.commit_schema(raw); + + let index = serialize_ts_type(ctx, &node.index_type, pos); + let obj = serialize_ts_type(ctx, &node.obj_type, pos); + + ctx.write_ref(index_type_pos, index); + ctx.write_ref(obj_type_pos, obj); + + pos + } + TsType::TsMappedType(node) => { + let raw = ctx.header(AstNode::TSMappedType, parent, &node.span); + let name_pos = ctx.ref_field(AstProp::NameType); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let type_param_pos = ctx.ref_field(AstProp::TypeParameter); + let pos = ctx.commit_schema(raw); + + let opt_pos = + create_true_plus_minus_field(ctx, AstProp::Optional, node.optional); + let readonly_pos = + create_true_plus_minus_field(ctx, AstProp::Readonly, node.readonly); + + let name_id = maybe_serialize_ts_type(ctx, &node.name_type, pos); + let type_ann = maybe_serialize_ts_type(ctx, &node.type_ann, pos); + let type_param = serialize_ts_type_param(ctx, &node.type_param, pos); + + write_true_plus_minus(ctx, opt_pos, node.optional); + write_true_plus_minus(ctx, readonly_pos, node.readonly); + ctx.write_maybe_ref(name_pos, name_id); + ctx.write_maybe_ref(type_ann_pos, type_ann); + ctx.write_ref(type_param_pos, type_param); + + pos + } + TsType::TsLitType(node) => serialize_ts_lit_type(ctx, node, parent), + TsType::TsTypePredicate(node) => { + let raw = ctx.header(AstNode::TSTypePredicate, parent, &node.span); + let asserts_pos = ctx.bool_field(AstProp::Asserts); + let param_name_pos = ctx.ref_field(AstProp::ParameterName); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let param_name = match &node.param_name { + TsThisTypeOrIdent::TsThisType(ts_this_type) => { + let raw = ctx.header(AstNode::TSThisType, pos, &ts_this_type.span); + ctx.commit_schema(raw) + } + TsThisTypeOrIdent::Ident(ident) => serialize_ident(ctx, ident, pos), + }; + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + ctx.write_bool(asserts_pos, node.asserts); + ctx.write_ref(param_name_pos, param_name); + ctx.write_maybe_ref(type_ann_pos, type_ann); + + pos + } + TsType::TsImportType(node) => { + let raw = ctx.header(AstNode::TSTypePredicate, parent, &node.span); + let arg_pos = ctx.ref_field(AstProp::Argument); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let qualifier_pos = ctx.ref_field(AstProp::Qualifier); + let pos = ctx.commit_schema(raw); + + let arg = serialize_ts_lit_type( + ctx, + &TsLitType { + lit: TsLit::Str(node.arg.clone()), + span: node.arg.span, + }, + pos, + ); + + let type_arg = node.type_args.clone().map(|param_node| { + serialize_ts_param_inst(ctx, param_node.as_ref(), pos) + }); + + let qualifier = node.qualifier.clone().map_or(NodeRef(0), |quali| { + serialize_ts_entity_name(ctx, &quali, pos) + }); + + ctx.write_ref(arg_pos, arg); + ctx.write_ref(qualifier_pos, qualifier); + ctx.write_maybe_ref(type_args_pos, type_arg); + + pos + } + } +} + +fn serialize_ts_lit_type( + ctx: &mut TsEsTreeBuilder, + node: &TsLitType, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::TSLiteralType, parent, &node.span); + let lit_pos = ctx.ref_field(AstProp::Literal); + let pos = ctx.commit_schema(raw); + + let lit = match &node.lit { + TsLit::Number(lit) => serialize_lit(ctx, &Lit::Num(lit.clone()), pos), + TsLit::Str(lit) => serialize_lit(ctx, &Lit::Str(lit.clone()), pos), + TsLit::Bool(lit) => serialize_lit(ctx, &Lit::Bool(*lit), pos), + TsLit::BigInt(lit) => serialize_lit(ctx, &Lit::BigInt(lit.clone()), pos), + TsLit::Tpl(lit) => serialize_expr( + ctx, + &Expr::Tpl(Tpl { + span: lit.span, + exprs: vec![], + quasis: lit.quasis.clone(), + }), + pos, + ), + }; + + ctx.write_ref(lit_pos, lit); + + pos +} + +fn create_true_plus_minus_field( + ctx: &mut TsEsTreeBuilder, + prop: AstProp, + value: Option, +) -> NodePos { + if let Some(v) = value { + match v { + TruePlusMinus::True => NodePos::Bool(ctx.bool_field(prop)), + TruePlusMinus::Plus | TruePlusMinus::Minus => { + NodePos::Str(ctx.str_field(prop)) + } + } + } else { + NodePos::Undef(ctx.undefined_field(prop)) + } +} + +fn extract_pos(pos: NodePos) -> usize { + match pos { + NodePos::Bool(bool_pos) => bool_pos.0, + NodePos::Field(field_pos) => field_pos.0, + NodePos::FieldArr(field_arr_pos) => field_arr_pos.0, + NodePos::Str(str_pos) => str_pos.0, + NodePos::Undef(undef_pos) => undef_pos.0, + NodePos::Null(null_pos) => null_pos.0, + } +} + +fn write_true_plus_minus( + ctx: &mut TsEsTreeBuilder, + pos: NodePos, + value: Option, +) { + if let Some(v) = value { + match v { + TruePlusMinus::True => { + let bool_pos = BoolPos(extract_pos(pos)); + ctx.write_bool(bool_pos, true); + } + TruePlusMinus::Plus => { + let str_pos = StrPos(extract_pos(pos)); + ctx.write_str(str_pos, "+") + } + TruePlusMinus::Minus => { + let str_pos = StrPos(extract_pos(pos)); + ctx.write_str(str_pos, "-") + } + } + } +} + +fn serialize_ts_entity_name( + ctx: &mut TsEsTreeBuilder, + node: &TsEntityName, + parent: NodeRef, +) -> NodeRef { + match &node { + TsEntityName::TsQualifiedName(_) => todo!(), + TsEntityName::Ident(ident) => serialize_ident(ctx, ident, parent), + } +} + +fn maybe_serialize_ts_type_ann( + ctx: &mut TsEsTreeBuilder, + node: &Option>, + parent: NodeRef, +) -> Option { + node + .as_ref() + .map(|type_ann| serialize_ts_type_ann(ctx, type_ann, parent)) +} + +fn serialize_ts_type_ann( + ctx: &mut TsEsTreeBuilder, + node: &TsTypeAnn, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::TSTypeAnnotation, parent, &node.span); + let type_pos = ctx.ref_field(AstProp::TypeAnnotation); + let pos = ctx.commit_schema(raw); + + let v_type = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_ref(type_pos, v_type); + + pos +} + +fn maybe_serialize_ts_type( + ctx: &mut TsEsTreeBuilder, + node: &Option>, + parent: NodeRef, +) -> Option { + node + .as_ref() + .map(|item| serialize_ts_type(ctx, item, parent)) +} + +fn serialize_ts_type_param( + ctx: &mut TsEsTreeBuilder, + node: &TsTypeParam, + parent: NodeRef, +) -> NodeRef { + let raw = ctx.header(AstNode::TSTypeParameter, parent, &node.span); + let name_pos = ctx.ref_field(AstProp::Name); + let constraint_pos = ctx.ref_field(AstProp::Constraint); + let default_pos = ctx.ref_field(AstProp::Default); + let const_pos = ctx.bool_field(AstProp::Const); + let in_pos = ctx.bool_field(AstProp::In); + let out_pos = ctx.bool_field(AstProp::Out); + let pos = ctx.commit_schema(raw); + + let name = serialize_ident(ctx, &node.name, pos); + let constraint = maybe_serialize_ts_type(ctx, &node.constraint, pos); + let default = maybe_serialize_ts_type(ctx, &node.default, pos); + + ctx.write_bool(const_pos, node.is_const); + ctx.write_bool(in_pos, node.is_in); + ctx.write_bool(out_pos, node.is_out); + ctx.write_ref(name_pos, name); + ctx.write_maybe_ref(constraint_pos, constraint); + ctx.write_maybe_ref(default_pos, default); + + pos +} + +fn maybe_serialize_ts_type_param( + ctx: &mut TsEsTreeBuilder, + node: &Option>, + parent: NodeRef, +) -> Option { + node.as_ref().map(|node| { + let raw = + ctx.header(AstNode::TSTypeParameterDeclaration, parent, &node.span); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let pos = ctx.commit_schema(raw); + + let params = node + .params + .iter() + .map(|param| serialize_ts_type_param(ctx, param, pos)) + .collect::>(); + + ctx.write_refs(params_pos, params); + + pos + }) +} + +fn serialize_ts_fn_param( + ctx: &mut TsEsTreeBuilder, + node: &TsFnParam, + parent: NodeRef, +) -> NodeRef { + match node { + TsFnParam::Ident(ident) => serialize_ident(ctx, ident, parent), + TsFnParam::Array(pat) => { + serialize_pat(ctx, &Pat::Array(pat.clone()), parent) + } + TsFnParam::Rest(pat) => serialize_pat(ctx, &Pat::Rest(pat.clone()), parent), + TsFnParam::Object(pat) => { + serialize_pat(ctx, &Pat::Object(pat.clone()), parent) + } + } +} diff --git a/cli/tools/lint/ast_buffer/ts_estree.rs b/cli/tools/lint/ast_buffer/ts_estree.rs new file mode 100644 index 0000000000..29bdb0d378 --- /dev/null +++ b/cli/tools/lint/ast_buffer/ts_estree.rs @@ -0,0 +1,520 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::fmt; +use std::fmt::Debug; +use std::fmt::Display; + +use deno_ast::swc::common::Span; + +use super::buffer::AstBufSerializer; +use super::buffer::BoolPos; +use super::buffer::FieldArrPos; +use super::buffer::FieldPos; +use super::buffer::NodeRef; +use super::buffer::NullPos; +use super::buffer::PendingNodeRef; +use super::buffer::SerializeCtx; +use super::buffer::StrPos; +use super::buffer::UndefPos; + +#[derive(Debug, Clone, PartialEq)] +pub enum AstNode { + // First node must always be the empty/invalid node + Invalid, + // Typically the + Program, + + // Module declarations + ExportAllDeclaration, + ExportDefaultDeclaration, + ExportNamedDeclaration, + ImportDeclaration, + TsExportAssignment, + TsImportEquals, + TsNamespaceExport, + + // Decls + ClassDeclaration, + FunctionDeclaration, + TSEnumDeclaration, + TSInterface, + TsModule, + TsTypeAlias, + Using, + VariableDeclaration, + + // Statements + BlockStatement, + BreakStatement, + ContinueStatement, + DebuggerStatement, + DoWhileStatement, + EmptyStatement, + ExpressionStatement, + ForInStatement, + ForOfStatement, + ForStatement, + IfStatement, + LabeledStatement, + ReturnStatement, + SwitchCase, + SwitchStatement, + ThrowStatement, + TryStatement, + WhileStatement, + WithStatement, + + // Expressions + ArrayExpression, + ArrowFunctionExpression, + AssignmentExpression, + AwaitExpression, + BinaryExpression, + CallExpression, + ChainExpression, + ClassExpression, + ConditionalExpression, + FunctionExpression, + Identifier, + ImportExpression, + LogicalExpression, + MemberExpression, + MetaProp, + NewExpression, + ObjectExpression, + PrivateIdentifier, + SequenceExpression, + Super, + TaggedTemplateExpression, + TemplateLiteral, + ThisExpression, + TSAsExpression, + TsConstAssertion, + TsInstantiation, + TSNonNullExpression, + TSSatisfiesExpression, + TSTypeAssertion, + UnaryExpression, + UpdateExpression, + YieldExpression, + + // TODO: TSEsTree uses a single literal node + // Literals + StringLiteral, + Bool, + Null, + NumericLiteral, + BigIntLiteral, + RegExpLiteral, + + EmptyExpr, + SpreadElement, + Property, + VariableDeclarator, + CatchClause, + RestElement, + ExportSpecifier, + TemplateElement, + MethodDefinition, + ClassBody, + + // Patterns + ArrayPattern, + AssignmentPattern, + ObjectPattern, + + // JSX + JSXAttribute, + JSXClosingElement, + JSXClosingFragment, + JSXElement, + JSXEmptyExpression, + JSXExpressionContainer, + JSXFragment, + JSXIdentifier, + JSXMemberExpression, + JSXNamespacedName, + JSXOpeningElement, + JSXOpeningFragment, + JSXSpreadAttribute, + JSXSpreadChild, + JSXText, + + TSTypeAnnotation, + TSTypeParameterDeclaration, + TSTypeParameter, + TSTypeParameterInstantiation, + TSEnumMember, + TSInterfaceBody, + TSInterfaceHeritage, + TSTypeReference, + TSThisType, + TSLiteralType, + TSInferType, + TSConditionalType, + TSUnionType, + TSIntersectionType, + TSMappedType, + TSTypeQuery, + TSTupleType, + TSNamedTupleMember, + TSFunctionType, + TsCallSignatureDeclaration, + TSPropertySignature, + TSMethodSignature, + TSIndexSignature, + TSIndexedAccessType, + TSTypeOperator, + TSTypePredicate, + TSImportType, + TSRestType, + TSArrayType, + TSClassImplements, + + TSAnyKeyword, + TSBigIntKeyword, + TSBooleanKeyword, + TSIntrinsicKeyword, + TSNeverKeyword, + TSNullKeyword, + TSNumberKeyword, + TSObjectKeyword, + TSStringKeyword, + TSSymbolKeyword, + TSUndefinedKeyword, + TSUnknownKeyword, + TSVoidKeyword, + TSEnumBody, // Last value is used for max value +} + +impl Display for AstNode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl From for u8 { + fn from(m: AstNode) -> u8 { + m as u8 + } +} + +#[derive(Debug, Clone)] +pub enum AstProp { + // Base, these must be in sync with JS in the same order. + Invalid, + Type, + Parent, + Range, + Length, // Not used in AST, but can be used in attr selectors + + // Starting from here the order doesn't matter. + // Following are all possible AST node properties. + Abstract, + Accessibility, + Alternate, + Argument, + Arguments, + Asserts, + Async, + Attributes, + Await, + Block, + Body, + Callee, + Cases, + Children, + CheckType, + ClosingElement, + ClosingFragment, + Computed, + Consequent, + Const, + Constraint, + Cooked, + Declaration, + Declarations, + Declare, + Default, + Definite, + Delegate, + Discriminant, + Elements, + ElementType, + ElementTypes, + ExprName, + Expression, + Expressions, + Exported, + Extends, + ExtendsType, + FalseType, + Finalizer, + Flags, + Generator, + Handler, + Id, + In, + IndexType, + Init, + Initializer, + Implements, + Key, + Kind, + Label, + Left, + Literal, + Local, + Members, + Meta, + Method, + Name, + Namespace, + NameType, + Object, + ObjectType, + OpeningElement, + OpeningFragment, + Operator, + Optional, + Out, + Param, + ParameterName, + Params, + Pattern, + Prefix, + Properties, + Property, + Qualifier, + Quasi, + Quasis, + Raw, + Readonly, + ReturnType, + Right, + SelfClosing, + Shorthand, + Source, + SourceType, + Specifiers, + Static, + SuperClass, + SuperTypeArguments, + Tag, + Tail, + Test, + TrueType, + TypeAnnotation, + TypeArguments, + TypeName, + TypeParameter, + TypeParameters, + Types, + Update, + Value, // Last value is used for max value +} + +// TODO: Feels like there should be an easier way to iterater over an +// enum in Rust and lowercase the first letter. +impl Display for AstProp { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let s = match self { + AstProp::Invalid => "__invalid__", // unused + AstProp::Parent => "parent", + AstProp::Range => "range", + AstProp::Type => "type", + AstProp::Length => "length", + AstProp::Abstract => "abstract", + AstProp::Accessibility => "accessibility", + AstProp::Alternate => "alternate", + AstProp::Argument => "argument", + AstProp::Arguments => "arguments", + AstProp::Asserts => "asserts", + AstProp::Async => "async", + AstProp::Attributes => "attributes", + AstProp::Await => "await", + AstProp::Block => "block", + AstProp::Body => "body", + AstProp::Callee => "callee", + AstProp::Cases => "cases", + AstProp::Children => "children", + AstProp::CheckType => "checkType", + AstProp::ClosingElement => "closingElement", + AstProp::ClosingFragment => "closingFragment", + AstProp::Computed => "computed", + AstProp::Consequent => "consequent", + AstProp::Const => "const", + AstProp::Constraint => "constraint", + AstProp::Cooked => "cooked", + AstProp::Declaration => "declaration", + AstProp::Declarations => "declarations", + AstProp::Declare => "declare", + AstProp::Default => "default", + AstProp::Definite => "definite", + AstProp::Delegate => "delegate", + AstProp::Discriminant => "discriminant", + AstProp::Elements => "elements", + AstProp::ElementType => "elementType", + AstProp::ElementTypes => "elementTypes", + AstProp::ExprName => "exprName", + AstProp::Expression => "expression", + AstProp::Expressions => "expressions", + AstProp::Exported => "exported", + AstProp::Extends => "extends", + AstProp::ExtendsType => "extendsType", + AstProp::FalseType => "falseType", + AstProp::Finalizer => "finalizer", + AstProp::Flags => "flags", + AstProp::Generator => "generator", + AstProp::Handler => "handler", + AstProp::Id => "id", + AstProp::In => "in", + AstProp::IndexType => "indexType", + AstProp::Init => "init", + AstProp::Initializer => "initializer", + AstProp::Implements => "implements", + AstProp::Key => "key", + AstProp::Kind => "kind", + AstProp::Label => "label", + AstProp::Left => "left", + AstProp::Literal => "literal", + AstProp::Local => "local", + AstProp::Members => "members", + AstProp::Meta => "meta", + AstProp::Method => "method", + AstProp::Name => "name", + AstProp::Namespace => "namespace", + AstProp::NameType => "nameType", + AstProp::Object => "object", + AstProp::ObjectType => "objectType", + AstProp::OpeningElement => "openingElement", + AstProp::OpeningFragment => "openingFragment", + AstProp::Operator => "operator", + AstProp::Optional => "optional", + AstProp::Out => "out", + AstProp::Param => "param", + AstProp::ParameterName => "parameterName", + AstProp::Params => "params", + AstProp::Pattern => "pattern", + AstProp::Prefix => "prefix", + AstProp::Properties => "properties", + AstProp::Property => "property", + AstProp::Qualifier => "qualifier", + AstProp::Quasi => "quasi", + AstProp::Quasis => "quasis", + AstProp::Raw => "raw", + AstProp::Readonly => "readonly", + AstProp::ReturnType => "returnType", + AstProp::Right => "right", + AstProp::SelfClosing => "selfClosing", + AstProp::Shorthand => "shorthand", + AstProp::Source => "source", + AstProp::SourceType => "sourceType", + AstProp::Specifiers => "specifiers", + AstProp::Static => "static", + AstProp::SuperClass => "superClass", + AstProp::SuperTypeArguments => "superTypeArguments", + AstProp::Tag => "tag", + AstProp::Tail => "tail", + AstProp::Test => "test", + AstProp::TrueType => "trueType", + AstProp::TypeAnnotation => "typeAnnotation", + AstProp::TypeArguments => "typeArguments", + AstProp::TypeName => "typeName", + AstProp::TypeParameter => "typeParameter", + AstProp::TypeParameters => "typeParameters", + AstProp::Types => "types", + AstProp::Update => "update", + AstProp::Value => "value", + }; + + write!(f, "{}", s) + } +} + +impl From for u8 { + fn from(m: AstProp) -> u8 { + m as u8 + } +} + +pub struct TsEsTreeBuilder { + ctx: SerializeCtx, +} + +// TODO: Add a builder API to make it easier to convert from different source +// ast formats. +impl TsEsTreeBuilder { + pub fn new() -> Self { + // Max values + // TODO: Maybe there is a rust macro to grab the last enum value? + let kind_max_count: u8 = u8::from(AstNode::TSEnumBody) + 1; + let prop_max_count: u8 = u8::from(AstProp::Value) + 1; + Self { + ctx: SerializeCtx::new(kind_max_count, prop_max_count), + } + } +} + +impl AstBufSerializer for TsEsTreeBuilder { + fn header( + &mut self, + kind: AstNode, + parent: NodeRef, + span: &Span, + ) -> PendingNodeRef { + self.ctx.header(kind, parent, span) + } + + fn commit_schema(&mut self, offset: PendingNodeRef) -> NodeRef { + self.ctx.commit_schema(offset) + } + + fn ref_field(&mut self, prop: AstProp) -> FieldPos { + FieldPos(self.ctx.ref_field(prop)) + } + + fn ref_vec_field(&mut self, prop: AstProp, len: usize) -> FieldArrPos { + FieldArrPos(self.ctx.ref_vec_field(prop, len)) + } + + fn str_field(&mut self, prop: AstProp) -> StrPos { + StrPos(self.ctx.str_field(prop)) + } + + fn bool_field(&mut self, prop: AstProp) -> BoolPos { + BoolPos(self.ctx.bool_field(prop)) + } + + fn undefined_field(&mut self, prop: AstProp) -> UndefPos { + UndefPos(self.ctx.undefined_field(prop)) + } + + fn null_field(&mut self, prop: AstProp) -> NullPos { + NullPos(self.ctx.null_field(prop)) + } + + fn write_ref(&mut self, pos: FieldPos, value: NodeRef) { + self.ctx.write_ref(pos.0, value); + } + + fn write_maybe_ref(&mut self, pos: FieldPos, value: Option) { + self.ctx.write_maybe_ref(pos.0, value); + } + + fn write_refs(&mut self, pos: FieldArrPos, value: Vec) { + self.ctx.write_refs(pos.0, value); + } + + fn write_str(&mut self, pos: StrPos, value: &str) { + self.ctx.write_str(pos.0, value); + } + + fn write_bool(&mut self, pos: BoolPos, value: bool) { + self.ctx.write_bool(pos.0, value); + } + + fn serialize(&mut self) -> Vec { + self.ctx.serialize() + } +} diff --git a/cli/tools/lint/linter.rs b/cli/tools/lint/linter.rs index 2c2bc43acb..6bb3c628fa 100644 --- a/cli/tools/lint/linter.rs +++ b/cli/tools/lint/linter.rs @@ -15,8 +15,9 @@ use deno_lint::linter::LintConfig as DenoLintConfig; use deno_lint::linter::LintFileOptions; use deno_lint::linter::Linter as DenoLintLinter; use deno_lint::linter::LinterOptions; +use deno_path_util::fs::atomic_write_file_with_retries; -use crate::util::fs::atomic_write_file_with_retries; +use crate::sys::CliSys; use crate::util::fs::specifier_from_file_path; use super::rules::FileOrPackageLintRule; @@ -176,8 +177,9 @@ impl CliLinter { if fix_iterations > 0 { // everything looks good and the file still parses, so write it out atomic_write_file_with_retries( + &CliSys::default(), file_path, - source.text().as_ref(), + source.text().as_bytes(), crate::cache::CACHE_PERM, ) .context("Failed writing fix to file.")?; diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index 596359bdc0..6d3997ac3b 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -20,7 +20,7 @@ use deno_core::unsync::future::LocalFutureExt; use deno_core::unsync::future::SharedLocal; use deno_graph::ModuleGraph; use deno_lint::diagnostic::LintDiagnostic; -use deno_lint::linter::LintConfig; +use deno_lint::linter::LintConfig as DenoLintConfig; use log::debug; use reporters::create_reporter; use reporters::LintReporter; @@ -29,7 +29,6 @@ use std::collections::HashSet; use std::fs; use std::io::stdin; use std::io::Read; -use std::path::Path; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; @@ -44,17 +43,22 @@ use crate::cache::IncrementalCache; use crate::colors; use crate::factory::CliFactory; use crate::graph_util::ModuleGraphCreator; +use crate::sys::CliSys; use crate::tools::fmt::run_parallelized; use crate::util::display; use crate::util::file_watcher; +use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; use crate::util::path::is_script_ext; use crate::util::sync::AtomicFlag; +mod ast_buffer; mod linter; mod reporters; mod rules; +// TODO(bartlomieju): remove once we wire plugins through the CLI linter +pub use ast_buffer::serialize_ast_to_buffer; pub use linter::CliLinter; pub use linter::CliLinterOptions; pub use rules::collect_no_slow_type_diagnostics; @@ -69,136 +73,139 @@ pub async fn lint( flags: Arc, lint_flags: LintFlags, ) -> Result<(), AnyError> { - if let Some(watch_flags) = &lint_flags.watch { + if lint_flags.watch.is_some() { if lint_flags.is_stdin() { return Err(generic_error( "Lint watch on standard input is not supported.", )); } - file_watcher::watch_func( - flags, - file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen), - move |flags, watcher_communicator, changed_paths| { - let lint_flags = lint_flags.clone(); - watcher_communicator.show_path_changed(changed_paths.clone()); - Ok(async move { - let factory = CliFactory::from_flags(flags); - let cli_options = factory.cli_options()?; - let lint_config = cli_options.resolve_deno_lint_config()?; - let mut paths_with_options_batches = - resolve_paths_with_options_batches(cli_options, &lint_flags)?; - for paths_with_options in &mut paths_with_options_batches { - _ = watcher_communicator - .watch_paths(paths_with_options.paths.clone()); - let files = std::mem::take(&mut paths_with_options.paths); - paths_with_options.paths = if let Some(paths) = &changed_paths { - // lint all files on any changed (https://github.com/denoland/deno/issues/12446) - files - .iter() - .any(|path| { - canonicalize_path(path) - .map(|p| paths.contains(&p)) - .unwrap_or(false) - }) - .then_some(files) - .unwrap_or_else(|| [].to_vec()) - } else { - files - }; - } + return lint_with_watch(flags, lint_flags).await; + } - let mut linter = WorkspaceLinter::new( - factory.caches()?.clone(), - factory.lint_rule_provider().await?, - factory.module_graph_creator().await?.clone(), - cli_options.start_dir.clone(), - &cli_options.resolve_workspace_lint_options(&lint_flags)?, - ); - for paths_with_options in paths_with_options_batches { - linter - .lint_files( - cli_options, - paths_with_options.options, - lint_config.clone(), - paths_with_options.dir, - paths_with_options.paths, - ) - .await?; - } - - linter.finish(); - - Ok(()) - }) - }, - ) - .await?; + let factory = CliFactory::from_flags(flags); + let cli_options = factory.cli_options()?; + let lint_rule_provider = factory.lint_rule_provider().await?; + let is_stdin = lint_flags.is_stdin(); + let deno_lint_config = cli_options.resolve_deno_lint_config()?; + let workspace_lint_options = + cli_options.resolve_workspace_lint_options(&lint_flags)?; + let success = if is_stdin { + lint_stdin( + cli_options, + lint_rule_provider, + workspace_lint_options, + lint_flags, + deno_lint_config, + )? } else { - let factory = CliFactory::from_flags(flags); - let cli_options = factory.cli_options()?; - let is_stdin = lint_flags.is_stdin(); - let deno_lint_config = cli_options.resolve_deno_lint_config()?; - let workspace_lint_options = - cli_options.resolve_workspace_lint_options(&lint_flags)?; - let success = if is_stdin { - let start_dir = &cli_options.start_dir; - let reporter_lock = Arc::new(Mutex::new(create_reporter( - workspace_lint_options.reporter_kind, - ))); - let lint_config = start_dir - .to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?; - let lint_options = LintOptions::resolve(lint_config, &lint_flags); - let lint_rules = factory - .lint_rule_provider() - .await? - .resolve_lint_rules_err_empty( - lint_options.rules, - start_dir.maybe_deno_json().map(|c| c.as_ref()), - )?; - let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME); - if let Some(ext) = cli_options.ext_flag() { - file_path.set_extension(ext); - } - let r = lint_stdin(&file_path, lint_rules, deno_lint_config); - let success = handle_lint_result( - &file_path.to_string_lossy(), - r, - reporter_lock.clone(), - ); - reporter_lock.lock().close(1); - success - } else { - let mut linter = WorkspaceLinter::new( - factory.caches()?.clone(), - factory.lint_rule_provider().await?, - factory.module_graph_creator().await?.clone(), - cli_options.start_dir.clone(), - &workspace_lint_options, - ); - let paths_with_options_batches = - resolve_paths_with_options_batches(cli_options, &lint_flags)?; - for paths_with_options in paths_with_options_batches { - linter - .lint_files( - cli_options, - paths_with_options.options, - deno_lint_config.clone(), - paths_with_options.dir, - paths_with_options.paths, - ) - .await?; - } - linter.finish() - }; - if !success { - deno_runtime::exit(1); + let mut linter = WorkspaceLinter::new( + factory.caches()?.clone(), + lint_rule_provider, + factory.module_graph_creator().await?.clone(), + cli_options.start_dir.clone(), + &workspace_lint_options, + ); + let paths_with_options_batches = + resolve_paths_with_options_batches(cli_options, &lint_flags)?; + for paths_with_options in paths_with_options_batches { + linter + .lint_files( + cli_options, + paths_with_options.options, + deno_lint_config.clone(), + paths_with_options.dir, + paths_with_options.paths, + ) + .await?; } + linter.finish() + }; + if !success { + deno_runtime::exit(1); } Ok(()) } +async fn lint_with_watch_inner( + flags: Arc, + lint_flags: LintFlags, + watcher_communicator: Arc, + changed_paths: Option>, +) -> Result<(), AnyError> { + let factory = CliFactory::from_flags(flags); + let cli_options = factory.cli_options()?; + let lint_config = cli_options.resolve_deno_lint_config()?; + let mut paths_with_options_batches = + resolve_paths_with_options_batches(cli_options, &lint_flags)?; + for paths_with_options in &mut paths_with_options_batches { + _ = watcher_communicator.watch_paths(paths_with_options.paths.clone()); + + let files = std::mem::take(&mut paths_with_options.paths); + paths_with_options.paths = if let Some(paths) = &changed_paths { + // lint all files on any changed (https://github.com/denoland/deno/issues/12446) + files + .iter() + .any(|path| { + canonicalize_path(path) + .map(|p| paths.contains(&p)) + .unwrap_or(false) + }) + .then_some(files) + .unwrap_or_else(|| [].to_vec()) + } else { + files + }; + } + + let mut linter = WorkspaceLinter::new( + factory.caches()?.clone(), + factory.lint_rule_provider().await?, + factory.module_graph_creator().await?.clone(), + cli_options.start_dir.clone(), + &cli_options.resolve_workspace_lint_options(&lint_flags)?, + ); + for paths_with_options in paths_with_options_batches { + linter + .lint_files( + cli_options, + paths_with_options.options, + lint_config.clone(), + paths_with_options.dir, + paths_with_options.paths, + ) + .await?; + } + + linter.finish(); + + Ok(()) +} + +async fn lint_with_watch( + flags: Arc, + lint_flags: LintFlags, +) -> Result<(), AnyError> { + let watch_flags = lint_flags.watch.as_ref().unwrap(); + + file_watcher::watch_func( + flags, + file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen), + move |flags, watcher_communicator, changed_paths| { + let lint_flags = lint_flags.clone(); + watcher_communicator.show_path_changed(changed_paths.clone()); + Ok(lint_with_watch_inner( + flags, + lint_flags, + watcher_communicator, + changed_paths, + )) + }, + ) + .await +} + struct PathsWithOptions { dir: WorkspaceDirectory, paths: Vec, @@ -269,7 +276,7 @@ impl WorkspaceLinter { &mut self, cli_options: &Arc, lint_options: LintOptions, - lint_config: LintConfig, + lint_config: DenoLintConfig, member_dir: WorkspaceDirectory, paths: Vec, ) -> Result<(), AnyError> { @@ -294,112 +301,63 @@ impl WorkspaceLinter { deno_lint_config: lint_config, })); + let has_error = self.has_error.clone(); + let reporter_lock = self.reporter_lock.clone(); + let mut futures = Vec::with_capacity(2); if linter.has_package_rules() { - if self.workspace_module_graph.is_none() { - let module_graph_creator = self.module_graph_creator.clone(); - let packages = self.workspace_dir.jsr_packages_for_publish(); - self.workspace_module_graph = Some( - async move { - module_graph_creator - .create_and_validate_publish_graph(&packages, true) - .await - .map(Rc::new) - .map_err(Rc::new) - } - .boxed_local() - .shared_local(), - ); - } - let workspace_module_graph_future = - self.workspace_module_graph.as_ref().unwrap().clone(); - let publish_config = member_dir.maybe_package_config(); - if let Some(publish_config) = publish_config { - let has_error = self.has_error.clone(); - let reporter_lock = self.reporter_lock.clone(); - let linter = linter.clone(); - let path_urls = paths - .iter() - .filter_map(|p| ModuleSpecifier::from_file_path(p).ok()) - .collect::>(); - futures.push( - async move { - let graph = workspace_module_graph_future - .await - .map_err(|err| anyhow!("{:#}", err))?; - let export_urls = - publish_config.config_file.resolve_export_value_urls()?; - if !export_urls.iter().any(|url| path_urls.contains(url)) { - return Ok(()); // entrypoint is not specified, so skip - } - let diagnostics = linter.lint_package(&graph, &export_urls); - if !diagnostics.is_empty() { - has_error.raise(); - let mut reporter = reporter_lock.lock(); - for diagnostic in &diagnostics { - reporter.visit_diagnostic(diagnostic); - } - } - Ok(()) - } - .boxed_local(), - ); + if let Some(fut) = self.run_package_rules(&linter, &member_dir, &paths) { + futures.push(fut); } } - futures.push({ - let has_error = self.has_error.clone(); - let reporter_lock = self.reporter_lock.clone(); - let maybe_incremental_cache = maybe_incremental_cache.clone(); - let linter = linter.clone(); - let cli_options = cli_options.clone(); - async move { - run_parallelized(paths, { - move |file_path| { - let file_text = - deno_ast::strip_bom(fs::read_to_string(&file_path)?); + let maybe_incremental_cache_ = maybe_incremental_cache.clone(); + let linter = linter.clone(); + let cli_options = cli_options.clone(); + let fut = async move { + let operation = move |file_path: PathBuf| { + let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?); - // don't bother rechecking this file if it didn't have any diagnostics before - if let Some(incremental_cache) = &maybe_incremental_cache { - if incremental_cache.is_file_same(&file_path, &file_text) { - return Ok(()); - } - } - - let r = linter.lint_file( - &file_path, - file_text, - cli_options.ext_flag().as_deref(), - ); - if let Ok((file_source, file_diagnostics)) = &r { - if let Some(incremental_cache) = &maybe_incremental_cache { - if file_diagnostics.is_empty() { - // update the incremental cache if there were no diagnostics - incremental_cache.update_file( - &file_path, - // ensure the returned text is used here as it may have been modified via --fix - file_source.text(), - ) - } - } - } - - let success = handle_lint_result( - &file_path.to_string_lossy(), - r, - reporter_lock.clone(), - ); - if !success { - has_error.raise(); - } - - Ok(()) + // don't bother rechecking this file if it didn't have any diagnostics before + if let Some(incremental_cache) = &maybe_incremental_cache_ { + if incremental_cache.is_file_same(&file_path, &file_text) { + return Ok(()); } - }) - .await - } - .boxed_local() - }); + } + + let r = linter.lint_file( + &file_path, + file_text, + cli_options.ext_flag().as_deref(), + ); + if let Ok((file_source, file_diagnostics)) = &r { + if let Some(incremental_cache) = &maybe_incremental_cache_ { + if file_diagnostics.is_empty() { + // update the incremental cache if there were no diagnostics + incremental_cache.update_file( + &file_path, + // ensure the returned text is used here as it may have been modified via --fix + file_source.text(), + ) + } + } + } + + let success = handle_lint_result( + &file_path.to_string_lossy(), + r, + reporter_lock.clone(), + ); + if !success { + has_error.raise(); + } + + Ok(()) + }; + run_parallelized(paths, operation).await + } + .boxed_local(); + futures.push(fut); if lint_options.fix { // run sequentially when using `--fix` to lower the chances of weird @@ -419,6 +377,63 @@ impl WorkspaceLinter { Ok(()) } + fn run_package_rules( + &mut self, + linter: &Arc, + member_dir: &WorkspaceDirectory, + paths: &[PathBuf], + ) -> Option>> { + if self.workspace_module_graph.is_none() { + let module_graph_creator = self.module_graph_creator.clone(); + let packages = self.workspace_dir.jsr_packages_for_publish(); + self.workspace_module_graph = Some( + async move { + module_graph_creator + .create_and_validate_publish_graph(&packages, true) + .await + .map(Rc::new) + .map_err(Rc::new) + } + .boxed_local() + .shared_local(), + ); + } + + let workspace_module_graph_future = + self.workspace_module_graph.as_ref().unwrap().clone(); + let maybe_publish_config = member_dir.maybe_package_config(); + let publish_config = maybe_publish_config?; + + let has_error = self.has_error.clone(); + let reporter_lock = self.reporter_lock.clone(); + let linter = linter.clone(); + let path_urls = paths + .iter() + .filter_map(|p| ModuleSpecifier::from_file_path(p).ok()) + .collect::>(); + let fut = async move { + let graph = workspace_module_graph_future + .await + .map_err(|err| anyhow!("{:#}", err))?; + let export_urls = + publish_config.config_file.resolve_export_value_urls()?; + if !export_urls.iter().any(|url| path_urls.contains(url)) { + return Ok(()); // entrypoint is not specified, so skip + } + let diagnostics = linter.lint_package(&graph, &export_urls); + if !diagnostics.is_empty() { + has_error.raise(); + let mut reporter = reporter_lock.lock(); + for diagnostic in &diagnostics { + reporter.visit_diagnostic(diagnostic); + } + } + Ok(()) + } + .boxed_local(); + Some(fut) + } + pub fn finish(self) -> bool { debug!("Found {} files", self.file_count); self.reporter_lock.lock().close(self.file_count); @@ -438,7 +453,7 @@ fn collect_lint_files( .ignore_node_modules() .use_gitignore() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) + .collect_file_patterns(&CliSys::default(), files) } #[allow(clippy::print_stdout)] @@ -494,10 +509,27 @@ pub fn print_rules_list(json: bool, maybe_rules_tags: Option>) { /// Treats input as TypeScript. /// Compatible with `--json` flag. fn lint_stdin( - file_path: &Path, - configured_rules: ConfiguredRules, - deno_lint_config: LintConfig, -) -> Result<(ParsedSource, Vec), AnyError> { + cli_options: &Arc, + lint_rule_provider: LintRuleProvider, + workspace_lint_options: WorkspaceLintOptions, + lint_flags: LintFlags, + deno_lint_config: DenoLintConfig, +) -> Result { + let start_dir = &cli_options.start_dir; + let reporter_lock = Arc::new(Mutex::new(create_reporter( + workspace_lint_options.reporter_kind, + ))); + let lint_config = start_dir + .to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?; + let lint_options = LintOptions::resolve(lint_config, &lint_flags); + let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty( + lint_options.rules, + start_dir.maybe_deno_json().map(|c| c.as_ref()), + )?; + let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME); + if let Some(ext) = cli_options.ext_flag() { + file_path.set_extension(ext); + } let mut source_code = String::new(); if stdin().read_to_string(&mut source_code).is_err() { return Err(generic_error("Failed to read from stdin")); @@ -509,9 +541,14 @@ fn lint_stdin( deno_lint_config, }); - linter - .lint_file(file_path, deno_ast::strip_bom(source_code), None) - .map_err(AnyError::from) + let r = linter + .lint_file(&file_path, deno_ast::strip_bom(source_code), None) + .map_err(AnyError::from); + + let success = + handle_lint_result(&file_path.to_string_lossy(), r, reporter_lock.clone()); + reporter_lock.lock().close(1); + Ok(success) } fn handle_lint_result( diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs index 001e401459..45a040d236 100644 --- a/cli/tools/registry/mod.rs +++ b/cli/tools/registry/mod.rs @@ -26,6 +26,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::url::Url; +use deno_runtime::deno_fetch; use deno_terminal::colors; use http_body_util::BodyExt; use serde::Deserialize; @@ -911,9 +912,7 @@ async fn publish_package( package.config ); - let body = http_body_util::Full::new(package.tarball.bytes.clone()) - .map_err(|never| match never {}) - .boxed(); + let body = deno_fetch::ReqBody::full(package.tarball.bytes.clone()); let response = http_client .post(url.parse()?, body)? .header( diff --git a/cli/tools/registry/paths.rs b/cli/tools/registry/paths.rs index 8b6c05fc01..1c675982df 100644 --- a/cli/tools/registry/paths.rs +++ b/cli/tools/registry/paths.rs @@ -6,6 +6,7 @@ use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; +use crate::sys::CliSys; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_config::glob::FileCollector; @@ -323,11 +324,11 @@ fn collect_paths( file_patterns: FilePatterns, ) -> Result, AnyError> { FileCollector::new(|e| { - if !e.metadata.is_file { + if !e.metadata.file_type().is_file() { if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) { diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType { specifier, - kind: if e.metadata.is_symlink { + kind: if e.metadata.file_type().is_symlink() { "symlink".to_string() } else { "Unknown".to_string() @@ -345,5 +346,5 @@ fn collect_paths( .ignore_node_modules() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .use_gitignore() - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, file_patterns) + .collect_file_patterns(&CliSys::default(), file_patterns) } diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 6f89ec7aae..ab4d92762f 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -4,6 +4,7 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; @@ -14,6 +15,7 @@ use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::Version; use deno_semver::VersionReq; use deps::KeyPath; @@ -23,12 +25,11 @@ use jsonc_parser::cst::CstRootNode; use jsonc_parser::json; use crate::args::AddFlags; -use crate::args::CacheSetting; use crate::args::CliOptions; use crate::args::Flags; use crate::args::RemoveFlags; use crate::factory::CliFactory; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::jsr::JsrFetchResolver; use crate::npm::NpmFetchResolver; @@ -283,7 +284,7 @@ fn package_json_dependency_entry( (npm_package.into(), selected.version_req) } else { ( - selected.import_name, + selected.import_name.into_string(), format!("npm:{}@{}", npm_package, selected.version_req), ) } @@ -292,7 +293,7 @@ fn package_json_dependency_entry( let scope_replaced = jsr_package.replace('/', "__"); let version_req = format!("npm:@jsr/{scope_replaced}@{}", selected.version_req); - (selected.import_name, version_req) + (selected.import_name.into_string(), version_req) } else { (selected.package_name, selected.version_req) } @@ -411,18 +412,19 @@ pub async fn add( let http_client = cli_factory.http_client_provider(); let deps_http_cache = cli_factory.global_http_cache()?; - let mut deps_file_fetcher = FileFetcher::new( + let deps_file_fetcher = CliFileFetcher::new( deps_http_cache.clone(), - CacheSetting::ReloadAll, - true, http_client.clone(), + cli_factory.sys(), Default::default(), None, + true, + CacheSetting::ReloadAll, + log::Level::Trace, ); let npmrc = cli_factory.cli_options().unwrap().npmrc(); - deps_file_fetcher.set_download_log_level(log::Level::Trace); let deps_file_fetcher = Arc::new(deps_file_fetcher); let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone())); let npm_resolver = @@ -549,10 +551,10 @@ pub async fn add( } struct SelectedPackage { - import_name: String, + import_name: StackString, package_name: String, version_req: String, - selected_version: String, + selected_version: StackString, } enum NotFoundHelp { @@ -683,7 +685,7 @@ async fn find_package_and_select_version_for_req( import_name: add_package_req.alias, package_name: prefixed_name, version_req: format!("{}{}", range_symbol, &nv.version), - selected_version: nv.version.to_string(), + selected_version: nv.version.to_custom_string::(), })) } @@ -705,7 +707,7 @@ enum AddRmPackageReqValue { #[derive(Debug, PartialEq, Eq)] pub struct AddRmPackageReq { - alias: String, + alias: StackString, value: AddRmPackageReqValue, } @@ -753,7 +755,11 @@ impl AddRmPackageReq { return Ok(Err(PackageReq::from_str(entry_text)?)); } - (maybe_prefix.unwrap(), Some(alias.to_string()), entry_text) + ( + maybe_prefix.unwrap(), + Some(StackString::from(alias)), + entry_text, + ) } None => return Ok(Err(PackageReq::from_str(entry_text)?)), }, @@ -765,7 +771,7 @@ impl AddRmPackageReq { JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?; let package_req = req_ref.into_inner().req; Ok(Ok(AddRmPackageReq { - alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()), + alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()), value: AddRmPackageReqValue::Jsr(package_req), })) } @@ -785,7 +791,7 @@ impl AddRmPackageReq { ); } Ok(Ok(AddRmPackageReq { - alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()), + alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()), value: AddRmPackageReqValue::Npm(package_req), })) } @@ -878,14 +884,14 @@ mod test { assert_eq!( AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(), AddRmPackageReq { - alias: "foo".to_string(), + alias: "foo".into(), value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); assert_eq!( AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(), AddRmPackageReq { - alias: "alias".to_string(), + alias: "alias".into(), value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); @@ -894,7 +900,7 @@ mod test { .unwrap() .unwrap(), AddRmPackageReq { - alias: "@alias/pkg".to_string(), + alias: "@alias/pkg".into(), value: AddRmPackageReqValue::Npm( PackageReq::from_str("foo@latest").unwrap() ) @@ -905,7 +911,7 @@ mod test { .unwrap() .unwrap(), AddRmPackageReq { - alias: "@alias/pkg".to_string(), + alias: "@alias/pkg".into(), value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); @@ -914,7 +920,7 @@ mod test { .unwrap() .unwrap(), AddRmPackageReq { - alias: "alias".to_string(), + alias: "alias".into(), value: AddRmPackageReqValue::Jsr( PackageReq::from_str("foo@^1.5.0").unwrap() ) diff --git a/cli/tools/registry/pm/deps.rs b/cli/tools/registry/pm/deps.rs index e4c38276f7..ffa53417e9 100644 --- a/cli/tools/registry/pm/deps.rs +++ b/cli/tools/registry/pm/deps.rs @@ -3,7 +3,6 @@ use std::borrow::Cow; use std::collections::HashMap; use std::path::PathBuf; -use std::sync::atomic::AtomicBool; use std::sync::Arc; use deno_ast::ModuleSpecifier; @@ -28,6 +27,8 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; +use deno_semver::StackString; +use deno_semver::Version; use deno_semver::VersionReq; use import_map::ImportMap; use import_map::ImportMapWithDiagnostics; @@ -42,6 +43,7 @@ use crate::jsr::JsrFetchResolver; use crate::module_loader::ModuleLoadPreparer; use crate::npm::CliNpmResolver; use crate::npm::NpmFetchResolver; +use crate::util::sync::AtomicFlag; use super::ConfigUpdater; @@ -138,13 +140,7 @@ pub enum KeyPart { Scopes, Dependencies, DevDependencies, - String(String), -} - -impl From for KeyPart { - fn from(value: String) -> Self { - KeyPart::String(value) - } + String(StackString), } impl From for KeyPart { @@ -163,7 +159,7 @@ impl KeyPart { KeyPart::Scopes => "scopes", KeyPart::Dependencies => "dependencies", KeyPart::DevDependencies => "devDependencies", - KeyPart::String(s) => s, + KeyPart::String(s) => s.as_str(), } } } @@ -216,12 +212,12 @@ fn import_map_entries( .chain(import_map.scopes().flat_map(|scope| { let path = KeyPath::from_parts([ KeyPart::Scopes, - scope.raw_key.to_string().into(), + KeyPart::String(scope.raw_key.into()), ]); scope.imports.entries().map(move |entry| { let mut full_path = path.clone(); - full_path.push(KeyPart::String(entry.raw_key.to_string())); + full_path.push(KeyPart::String(entry.raw_key.into())); (full_path, entry) }) })) @@ -337,7 +333,7 @@ fn add_deps_from_package_json( package_json: &PackageJsonRc, mut filter: impl DepFilter, package_dep_kind: PackageJsonDepKind, - package_json_deps: PackageJsonDepsMap, + package_json_deps: &PackageJsonDepsMap, deps: &mut Vec, ) { for (k, v) in package_json_deps { @@ -352,7 +348,7 @@ fn add_deps_from_package_json( deno_package_json::PackageJsonDepValue::Req(req) => { let alias = k.as_str(); let alias = (alias != req.name).then(|| alias.to_string()); - if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) { + if !filter.should_include(alias.as_deref(), req, DepKind::Npm) { continue; } let id = DepId(deps.len()); @@ -361,9 +357,12 @@ fn add_deps_from_package_json( kind: DepKind::Npm, location: DepLocation::PackageJson( package_json.clone(), - KeyPath::from_parts([package_dep_kind.into(), k.into()]), + KeyPath::from_parts([ + package_dep_kind.into(), + KeyPart::String(k.clone()), + ]), ), - req, + req: req.clone(), alias, }) } @@ -376,14 +375,14 @@ fn add_deps_from_package_json( package_json, filter, PackageJsonDepKind::Normal, - package_json_deps.dependencies, + &package_json_deps.dependencies, deps, ); iterate( package_json, filter, PackageJsonDepKind::Dev, - package_json_deps.dev_dependencies, + &package_json_deps.dev_dependencies, deps, ); } @@ -447,7 +446,7 @@ pub struct DepManager { pending_changes: Vec, - dependencies_resolved: AtomicBool, + dependencies_resolved: AtomicFlag, module_load_preparer: Arc, // TODO(nathanwhit): probably shouldn't be pub pub(crate) jsr_fetch_resolver: Arc, @@ -489,7 +488,7 @@ impl DepManager { resolved_versions: Vec::new(), latest_versions: Vec::new(), jsr_fetch_resolver, - dependencies_resolved: AtomicBool::new(false), + dependencies_resolved: AtomicFlag::lowered(), module_load_preparer, npm_fetch_resolver, npm_resolver, @@ -530,10 +529,7 @@ impl DepManager { } async fn run_dependency_resolution(&self) -> Result<(), AnyError> { - if self - .dependencies_resolved - .load(std::sync::atomic::Ordering::Relaxed) - { + if self.dependencies_resolved.is_raised() { return Ok(()); } @@ -556,9 +552,7 @@ impl DepManager { } DepKind::Jsr => graph.packages.mappings().contains_key(&dep.req), }) { - self - .dependencies_resolved - .store(true, std::sync::atomic::Ordering::Relaxed); + self.dependencies_resolved.raise(); graph_permit.commit(); return Ok(()); } @@ -613,6 +607,7 @@ impl DepManager { ) .await?; + self.dependencies_resolved.raise(); graph_permit.commit(); Ok(()) @@ -655,10 +650,6 @@ impl DepManager { if self.latest_versions.len() == self.deps.len() { return Ok(self.latest_versions.clone()); } - let latest_tag_req = deno_semver::VersionReq::from_raw_text_and_inner( - "latest".into(), - deno_semver::RangeSetOrTag::Tag("latest".into()), - ); let mut latest_versions = Vec::with_capacity(self.deps.len()); let npm_sema = Semaphore::new(32); @@ -670,14 +661,25 @@ impl DepManager { DepKind::Npm => futs.push_back( async { let semver_req = &dep.req; - let latest_req = PackageReq { - name: dep.req.name.clone(), - version_req: latest_tag_req.clone(), - }; let _permit = npm_sema.acquire().await; let semver_compatible = self.npm_fetch_resolver.req_to_nv(semver_req).await; - let latest = self.npm_fetch_resolver.req_to_nv(&latest_req).await; + let info = + self.npm_fetch_resolver.package_info(&semver_req.name).await; + let latest = info + .and_then(|info| { + let latest_tag = info.dist_tags.get("latest")?; + let lower_bound = &semver_compatible.as_ref()?.version; + if latest_tag > lower_bound { + Some(latest_tag.clone()) + } else { + latest_version(Some(latest_tag), info.versions.keys()) + } + }) + .map(|version| PackageNv { + name: semver_req.name.clone(), + version, + }); PackageLatestVersion { latest, semver_compatible, @@ -688,14 +690,29 @@ impl DepManager { DepKind::Jsr => futs.push_back( async { let semver_req = &dep.req; - let latest_req = PackageReq { - name: dep.req.name.clone(), - version_req: deno_semver::WILDCARD_VERSION_REQ.clone(), - }; let _permit = jsr_sema.acquire().await; let semver_compatible = self.jsr_fetch_resolver.req_to_nv(semver_req).await; - let latest = self.jsr_fetch_resolver.req_to_nv(&latest_req).await; + let info = + self.jsr_fetch_resolver.package_info(&semver_req.name).await; + let latest = info + .and_then(|info| { + let lower_bound = &semver_compatible.as_ref()?.version; + latest_version( + Some(lower_bound), + info.versions.iter().filter_map(|(version, version_info)| { + if !version_info.yanked { + Some(version) + } else { + None + } + }), + ) + }) + .map(|version| PackageNv { + name: semver_req.name.clone(), + version, + }); PackageLatestVersion { latest, semver_compatible, @@ -893,3 +910,18 @@ fn parse_req_reference( DepKind::Jsr => JsrPackageReqReference::from_str(input)?.into_inner(), }) } + +fn latest_version<'a>( + start: Option<&Version>, + versions: impl IntoIterator, +) -> Option { + let mut best = start; + for version in versions { + match best { + Some(best_version) if version > best_version => best = Some(version), + None => best = Some(version), + _ => {} + } + } + best.cloned() +} diff --git a/cli/tools/registry/pm/outdated.rs b/cli/tools/registry/pm/outdated.rs index aef65a5de0..bb4c60fde8 100644 --- a/cli/tools/registry/pm/outdated.rs +++ b/cli/tools/registry/pm/outdated.rs @@ -3,19 +3,20 @@ use std::collections::HashSet; use std::sync::Arc; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::VersionReq; use deno_terminal::colors; -use crate::args::CacheSetting; use crate::args::CliOptions; use crate::args::Flags; use crate::args::OutdatedFlags; use crate::factory::CliFactory; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::jsr::JsrFetchResolver; use crate::npm::NpmFetchResolver; use crate::tools::registry::pm::deps::DepKind; @@ -31,7 +32,7 @@ struct OutdatedPackage { latest: String, semver_compatible: String, current: String, - name: String, + name: StackString, } #[allow(clippy::print_stdout)] @@ -181,15 +182,16 @@ pub async fn outdated( let workspace = cli_options.workspace(); let http_client = factory.http_client_provider(); let deps_http_cache = factory.global_http_cache()?; - let mut file_fetcher = FileFetcher::new( + let file_fetcher = CliFileFetcher::new( deps_http_cache.clone(), - CacheSetting::RespectHeaders, - true, http_client.clone(), + factory.sys(), Default::default(), None, + true, + CacheSetting::RespectHeaders, + log::Level::Trace, ); - file_fetcher.set_download_log_level(log::Level::Trace); let file_fetcher = Arc::new(file_fetcher); let npm_fetch_resolver = Arc::new(NpmFetchResolver::new( file_fetcher.clone(), diff --git a/cli/tools/registry/unfurl.rs b/cli/tools/registry/unfurl.rs index bf6aaaf50d..989a6e1ed4 100644 --- a/cli/tools/registry/unfurl.rs +++ b/cli/tools/registry/unfurl.rs @@ -658,12 +658,12 @@ mod tests { use crate::resolver::SloppyImportsCachedFs; use super::*; + use crate::sys::CliSys; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_core::serde_json::json; use deno_core::url::Url; - use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_node::PackageJson; use deno_semver::Version; use import_map::ImportMapWithDiagnostics; @@ -722,10 +722,9 @@ mod tests { vec![Arc::new(package_json)], deno_config::workspace::PackageJsonDepResolution::Enabled, ); - let fs = Arc::new(RealFs); let unfurler = SpecifierUnfurler::new( Some(Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(fs), + SloppyImportsCachedFs::new(CliSys::default()), ))), Arc::new(workspace_resolver), true, @@ -863,10 +862,10 @@ const warn2 = await import(`${expr}`); ], deno_config::workspace::PackageJsonDepResolution::Enabled, ); - let fs = Arc::new(RealFs); + let sys = CliSys::default(); let unfurler = SpecifierUnfurler::new( Some(Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(fs), + SloppyImportsCachedFs::new(sys), ))), Arc::new(workspace_resolver), true, diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index a303046879..9fb4624fa4 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -11,7 +11,8 @@ use crate::args::ReplFlags; use crate::cdp; use crate::colors; use crate::factory::CliFactory; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; +use crate::file_fetcher::TextDecodedFile; use deno_core::error::AnyError; use deno_core::futures::StreamExt; use deno_core::serde_json; @@ -143,7 +144,7 @@ async fn read_line_and_poll( async fn read_eval_file( cli_options: &CliOptions, - file_fetcher: &FileFetcher, + file_fetcher: &CliFileFetcher, eval_file: &str, ) -> Result, AnyError> { let specifier = @@ -151,7 +152,7 @@ async fn read_eval_file( let file = file_fetcher.fetch_bypass_permissions(&specifier).await?; - Ok(file.into_text_decoded()?.source) + Ok(TextDecodedFile::decode(file)?.source) } #[allow(clippy::print_stdout)] diff --git a/cli/tools/run/mod.rs b/cli/tools/run/mod.rs index d3f7b093d4..cd7d1dd6c4 100644 --- a/cli/tools/run/mod.rs +++ b/cli/tools/run/mod.rs @@ -3,6 +3,7 @@ use std::io::Read; use std::sync::Arc; +use deno_cache_dir::file_fetcher::File; use deno_config::deno_json::NodeModulesDirMode; use deno_core::error::AnyError; use deno_runtime::WorkerExecutionMode; @@ -11,7 +12,6 @@ use crate::args::EvalFlags; use crate::args::Flags; use crate::args::WatchFlagsWithPaths; use crate::factory::CliFactory; -use crate::file_fetcher::File; use crate::util; use crate::util::file_watcher::WatcherRestartMode; @@ -97,7 +97,7 @@ pub async fn run_from_stdin(flags: Arc) -> Result { // Save a fake file into file fetcher cache // to allow module access by TS compiler file_fetcher.insert_memory_files(File { - specifier: main_module.clone(), + url: main_module.clone(), maybe_headers: None, source: source.into(), }); @@ -184,7 +184,7 @@ pub async fn eval_command( // Save a fake file into file fetcher cache // to allow module access by TS compiler. file_fetcher.insert_memory_files(File { - specifier: main_module.clone(), + url: main_module.clone(), maybe_headers: None, source: source_code.into_bytes().into(), }); diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 25d1d66710..4d83cc98fd 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -25,7 +25,6 @@ use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::url::Url; use deno_path_util::normalize_path; -use deno_runtime::deno_node::NodeResolver; use deno_task_shell::KillSignal; use deno_task_shell::ShellCommand; use indexmap::IndexMap; @@ -36,6 +35,7 @@ use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; use crate::factory::CliFactory; +use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::task_runner; use crate::task_runner::run_future_forwarding_signals; @@ -94,7 +94,7 @@ pub async fn execute_script( return Ok(0); }; - let task_regex = arg_to_regex(task_name)?; + let task_name_filter = arg_to_task_name_filter(task_name)?; let mut packages_task_info: Vec = vec![]; for folder in workspace.config_folders() { @@ -137,12 +137,20 @@ pub async fn execute_script( // Match tasks in deno.json for name in tasks_config.task_names() { - if task_regex.is_match(name) && !visited.contains(name) { + let matches_filter = match &task_name_filter { + TaskNameFilter::Exact(n) => *n == name, + TaskNameFilter::Regex(re) => re.is_match(name), + }; + if matches_filter && !visited.contains(name) { matched.insert(name.to_string()); visit_task(&tasks_config, &mut visited, name); } } + if matched.is_empty() { + continue; + } + packages_task_info.push(PackageTaskInfo { matched_tasks: matched .iter() @@ -223,7 +231,7 @@ pub async fn execute_script( &Url::from_directory_path(cli_options.initial_cwd()).unwrap(), "", &TaskDefinition { - command: task_flags.task.as_ref().unwrap().to_string(), + command: Some(task_flags.task.as_ref().unwrap().to_string()), dependencies: vec![], description: None, }, @@ -259,7 +267,7 @@ struct RunSingleOptions<'a> { struct TaskRunner<'a> { task_flags: &'a TaskFlags, npm_resolver: &'a dyn CliNpmResolver, - node_resolver: &'a NodeResolver, + node_resolver: &'a CliNodeResolver, env_vars: HashMap, cli_options: &'a CliOptions, concurrency: usize, @@ -440,6 +448,16 @@ impl<'a> TaskRunner<'a> { kill_signal: KillSignal, argv: &'a [String], ) -> Result { + let Some(command) = &definition.command else { + log::info!( + "{} {} {}", + colors::green("Task"), + colors::cyan(task_name), + colors::gray("(no command)") + ); + return Ok(0); + }; + if let Some(npm_resolver) = self.npm_resolver.as_managed() { npm_resolver.ensure_top_level_package_json_install().await?; npm_resolver @@ -461,7 +479,7 @@ impl<'a> TaskRunner<'a> { self .run_single(RunSingleOptions { task_name, - script: &definition.command, + script: command, cwd: &cwd, custom_commands, kill_signal, @@ -829,7 +847,7 @@ fn print_available_tasks( is_deno: false, name: name.to_string(), task: deno_config::deno_json::TaskDefinition { - command: script.to_string(), + command: Some(script.to_string()), dependencies: vec![], description: None, }, @@ -865,11 +883,13 @@ fn print_available_tasks( )?; } } - writeln!( - writer, - " {}", - strip_ansi_codes_and_escape_control_chars(&desc.task.command) - )?; + if let Some(command) = &desc.task.command { + writeln!( + writer, + " {}", + strip_ansi_codes_and_escape_control_chars(command) + )?; + }; if !desc.task.dependencies.is_empty() { let dependencies = desc .task @@ -902,3 +922,41 @@ fn strip_ansi_codes_and_escape_control_chars(s: &str) -> String { }) .collect() } + +fn arg_to_task_name_filter(input: &str) -> Result { + if !input.contains("*") { + return Ok(TaskNameFilter::Exact(input)); + } + + let mut regex_str = regex::escape(input); + regex_str = regex_str.replace("\\*", ".*"); + let re = Regex::new(®ex_str)?; + Ok(TaskNameFilter::Regex(re)) +} + +#[derive(Debug)] +enum TaskNameFilter<'s> { + Exact(&'s str), + Regex(regex::Regex), +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_arg_to_task_name_filter() { + assert!(matches!( + arg_to_task_name_filter("test").unwrap(), + TaskNameFilter::Exact("test") + )); + assert!(matches!( + arg_to_task_name_filter("test-").unwrap(), + TaskNameFilter::Exact("test-") + )); + assert!(matches!( + arg_to_task_name_filter("test*").unwrap(), + TaskNameFilter::Regex(_) + )); + } +} diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index 2e46bdd4da..3745d7c7ec 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -7,10 +7,10 @@ use crate::args::TestReporterConfig; use crate::colors; use crate::display; use crate::factory::CliFactory; -use crate::file_fetcher::File; -use crate::file_fetcher::FileFetcher; +use crate::file_fetcher::CliFileFetcher; use crate::graph_util::has_graph_root_local_dependent_changed; use crate::ops; +use crate::sys::CliSys; use crate::util::extract::extract_doc_tests; use crate::util::file_watcher; use crate::util::fs::collect_specifiers; @@ -21,6 +21,7 @@ use crate::worker::CliMainWorkerFactory; use crate::worker::CoverageCollector; use deno_ast::MediaType; +use deno_cache_dir::file_fetcher::File; use deno_config::glob::FilePatterns; use deno_config::glob::WalkEntry; use deno_core::anyhow; @@ -616,7 +617,10 @@ async fn configure_main_worker( WorkerExecutionMode::Test, specifier.clone(), permissions_container, - vec![ops::testing::deno_test::init_ops(worker_sender.sender)], + vec![ + ops::testing::deno_test::init_ops(worker_sender.sender), + ops::lint::deno_lint::init_ops(), + ], Stdio { stdin: StdioPipe::inherit(), stdout: StdioPipe::file(worker_sender.stdout), @@ -1191,7 +1195,7 @@ static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false); async fn test_specifiers( worker_factory: Arc, permissions: &Permissions, - permission_desc_parser: &Arc, + permission_desc_parser: &Arc>, specifiers: Vec, options: TestSpecifiersOptions, ) -> Result<(), AnyError> { @@ -1514,7 +1518,7 @@ fn collect_specifiers_with_test_mode( /// as well. async fn fetch_specifiers_with_test_mode( cli_options: &CliOptions, - file_fetcher: &FileFetcher, + file_fetcher: &CliFileFetcher, member_patterns: impl Iterator, doc: &bool, ) -> Result, AnyError> { @@ -1822,7 +1826,7 @@ pub async fn run_tests_with_watch( /// Extracts doc tests from files specified by the given specifiers. async fn get_doc_tests( specifiers_with_mode: &[(Url, TestMode)], - file_fetcher: &FileFetcher, + file_fetcher: &CliFileFetcher, ) -> Result, AnyError> { let specifiers_needing_extraction = specifiers_with_mode .iter() @@ -1847,7 +1851,7 @@ fn get_target_specifiers( specifiers_with_mode .into_iter() .filter_map(|(s, mode)| mode.needs_test_run().then_some(s)) - .chain(doc_tests.iter().map(|d| d.specifier.clone())) + .chain(doc_tests.iter().map(|d| d.url.clone())) .collect() } diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index cb85859f7a..b3d7618be9 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -21,6 +21,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::unsync::spawn; use deno_core::url::Url; +use deno_semver::SmallStackString; use deno_semver::Version; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -255,7 +256,7 @@ async fn print_release_notes( let is_deno_2_rc = new_semver.major == 2 && new_semver.minor == 0 && new_semver.patch == 0 - && new_semver.pre.first() == Some(&"rc".to_string()); + && new_semver.pre.first().map(|s| s.as_str()) == Some("rc"); if is_deno_2_rc || is_switching_from_deno1_to_deno2 { log::info!( @@ -674,7 +675,7 @@ impl RequestedVersion { ); }; - if semver.pre.contains(&"rc".to_string()) { + if semver.pre.contains(&SmallStackString::from_static("rc")) { (ReleaseChannel::Rc, passed_version) } else { (ReleaseChannel::Stable, passed_version) diff --git a/cli/tsc/00_typescript.js b/cli/tsc/00_typescript.js index 7d20f92367..b7626fe082 100644 --- a/cli/tsc/00_typescript.js +++ b/cli/tsc/00_typescript.js @@ -136063,6 +136063,7 @@ var unprefixedNodeCoreModuleList = [ "https", "http2", "inspector", + "inspector/promises", "module", "net", "os", diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 7e8a407cf9..f7862c95e4 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -41,6 +41,13 @@ delete Object.prototype.__proto__; "listen", "listenDatagram", "openKv", + "connectQuic", + "listenQuic", + "QuicBidirectionalStream", + "QuicConn", + "QuicListener", + "QuicReceiveStream", + "QuicSendStream", ]); const unstableMsgSuggestion = "If not, try changing the 'lib' compiler option to include 'deno.unstable' " + diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index d3795706eb..e4cc80723f 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -133,6 +133,12 @@ pub struct Diagnostic { pub file_name: Option, #[serde(skip_serializing_if = "Option::is_none")] pub related_information: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub reports_deprecated: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reports_unnecessary: Option, + #[serde(flatten)] + pub other: deno_core::serde_json::Map, } impl Diagnostic { diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index a8e8d73b68..4c84050b5e 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -4,9 +4,10 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; use crate::cache::ModuleInfoCache; -use crate::node; +use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::resolver::CjsTracker; +use crate::sys::CliSys; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; use crate::worker::create_isolate_create_params; @@ -35,12 +36,11 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_fs; -use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::NodeJsErrorCode; use node_resolver::errors::NodeJsErrorCoded; use node_resolver::errors::PackageSubpathResolveError; +use node_resolver::resolve_specifier_into_node_modules; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; use once_cell::sync::Lazy; @@ -380,7 +380,7 @@ impl TypeCheckingCjsTracker { #[derive(Debug)] pub struct RequestNpmState { pub cjs_tracker: Arc, - pub node_resolver: Arc, + pub node_resolver: Arc, pub npm_resolver: Arc, } @@ -660,9 +660,9 @@ fn op_load_inner( None } else { // means it's Deno code importing an npm module - let specifier = node::resolve_specifier_into_node_modules( + let specifier = resolve_specifier_into_node_modules( + &CliSys::default(), &module.specifier, - &deno_fs::RealFs, ); Some(Cow::Owned(load_from_node_modules( &specifier, @@ -924,9 +924,9 @@ fn resolve_graph_specifier_types( Some(Module::External(module)) => { // we currently only use "External" for when the module is in an npm package Ok(state.maybe_npm.as_ref().map(|_| { - let specifier = node::resolve_specifier_into_node_modules( + let specifier = resolve_specifier_into_node_modules( + &CliSys::default(), &module.specifier, - &deno_fs::RealFs, ); into_specifier_and_media_type(Some(specifier)) })) @@ -1444,6 +1444,9 @@ mod tests { source_line: None, file_name: None, related_information: None, + reports_deprecated: None, + reports_unnecessary: None, + other: Default::default(), }]), stats: Stats(vec![("a".to_string(), 12)]) }) diff --git a/cli/util/extract.rs b/cli/util/extract.rs index be68202aa1..c4562060d8 100644 --- a/cli/util/extract.rs +++ b/cli/util/extract.rs @@ -13,6 +13,7 @@ use deno_ast::swc::visit::VisitMut; use deno_ast::swc::visit::VisitWith as _; use deno_ast::MediaType; use deno_ast::SourceRangedForSpanned as _; +use deno_cache_dir::file_fetcher::File; use deno_core::error::AnyError; use deno_core::ModuleSpecifier; use regex::Regex; @@ -20,7 +21,7 @@ use std::collections::BTreeSet; use std::fmt::Write as _; use std::sync::Arc; -use crate::file_fetcher::File; +use crate::file_fetcher::TextDecodedFile; use crate::util::path::mapped_specifier_for_tsc; /// Extracts doc tests from a given file, transforms them into pseudo test @@ -52,7 +53,7 @@ fn extract_inner( file: File, wrap_kind: WrapKind, ) -> Result, AnyError> { - let file = file.into_text_decoded()?; + let file = TextDecodedFile::decode(file)?; let exports = match deno_ast::parse_program(deno_ast::ParseParams { specifier: file.specifier.clone(), @@ -230,7 +231,7 @@ fn extract_files_from_regex_blocks( .unwrap_or(file_specifier); Some(File { - specifier: file_specifier, + url: file_specifier, maybe_headers: None, source: file_source.into_bytes().into(), }) @@ -558,7 +559,7 @@ fn generate_pseudo_file( exports: &ExportCollector, wrap_kind: WrapKind, ) -> Result { - let file = file.into_text_decoded()?; + let file = TextDecodedFile::decode(file)?; let parsed = deno_ast::parse_program(deno_ast::ParseParams { specifier: file.specifier.clone(), @@ -594,7 +595,7 @@ fn generate_pseudo_file( log::debug!("{}:\n{}", file.specifier, source); Ok(File { - specifier: file.specifier, + url: file.specifier, maybe_headers: None, source: source.into_bytes().into(), }) @@ -1199,14 +1200,14 @@ Deno.test("file:///main.ts$3-7.ts", async ()=>{ for test in tests { let file = File { - specifier: ModuleSpecifier::parse(test.input.specifier).unwrap(), + url: ModuleSpecifier::parse(test.input.specifier).unwrap(), maybe_headers: None, source: test.input.source.as_bytes().into(), }; let got_decoded = extract_doc_tests(file) .unwrap() .into_iter() - .map(|f| f.into_text_decoded().unwrap()) + .map(|f| TextDecodedFile::decode(f).unwrap()) .collect::>(); let expected = test .expected @@ -1435,14 +1436,14 @@ add('1', '2'); for test in tests { let file = File { - specifier: ModuleSpecifier::parse(test.input.specifier).unwrap(), + url: ModuleSpecifier::parse(test.input.specifier).unwrap(), maybe_headers: None, source: test.input.source.as_bytes().into(), }; let got_decoded = extract_snippet_files(file) .unwrap() .into_iter() - .map(|f| f.into_text_decoded().unwrap()) + .map(|f| TextDecodedFile::decode(f).unwrap()) .collect::>(); let expected = test .expected diff --git a/cli/util/fs.rs b/cli/util/fs.rs index ba84a0e8f3..e0b9a6f4ee 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -1,9 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use std::fs::OpenOptions; use std::io::Error; use std::io::ErrorKind; -use std::io::Write; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; @@ -19,185 +17,12 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::unsync::spawn_blocking; use deno_core::ModuleSpecifier; -use deno_runtime::deno_fs::FileSystem; -use crate::util::path::get_atomic_file_path; +use crate::sys::CliSys; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressMessagePrompt; -/// Writes the file to the file system at a temporary path, then -/// renames it to the destination in a single sys call in order -/// to never leave the file system in a corrupted state. -/// -/// This also handles creating the directory if a NotFound error -/// occurs. -pub fn atomic_write_file_with_retries>( - file_path: &Path, - data: T, - mode: u32, -) -> std::io::Result<()> { - struct RealAtomicWriteFileFs { - mode: u32, - } - - impl AtomicWriteFileFs for RealAtomicWriteFileFs { - fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> { - write_file(path, bytes, self.mode) - } - fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> { - std::fs::rename(from, to) - } - fn remove_file(&self, path: &Path) -> std::io::Result<()> { - std::fs::remove_file(path) - } - fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> { - std::fs::create_dir_all(dir_path) - } - fn path_exists(&self, path: &Path) -> bool { - path.exists() - } - } - - atomic_write_file_with_retries_and_fs( - &RealAtomicWriteFileFs { mode }, - file_path, - data.as_ref(), - ) -} - -pub trait AtomicWriteFileFs { - fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()>; - fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()>; - fn remove_file(&self, path: &Path) -> std::io::Result<()>; - fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()>; - fn path_exists(&self, path: &Path) -> bool; -} - -pub struct AtomicWriteFileFsAdapter<'a> { - pub fs: &'a dyn FileSystem, - pub write_mode: u32, -} - -impl<'a> AtomicWriteFileFs for AtomicWriteFileFsAdapter<'a> { - fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> { - self - .fs - .write_file_sync( - path, - deno_runtime::deno_fs::OpenOptions::write( - true, - false, - false, - Some(self.write_mode), - ), - None, - bytes, - ) - .map_err(|e| e.into_io_error()) - } - - fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> { - self.fs.rename_sync(from, to).map_err(|e| e.into_io_error()) - } - - fn remove_file(&self, path: &Path) -> std::io::Result<()> { - self - .fs - .remove_sync(path, false) - .map_err(|e| e.into_io_error()) - } - - fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> { - self - .fs - .mkdir_sync(dir_path, /* recursive */ true, None) - .map_err(|e| e.into_io_error()) - } - - fn path_exists(&self, path: &Path) -> bool { - self.fs.exists_sync(path) - } -} - -pub fn atomic_write_file_with_retries_and_fs>( - fs: &impl AtomicWriteFileFs, - file_path: &Path, - data: T, -) -> std::io::Result<()> { - let mut count = 0; - loop { - match atomic_write_file(fs, file_path, data.as_ref()) { - Ok(()) => return Ok(()), - Err(err) => { - if count >= 5 { - // too many retries, return the error - return Err(err); - } - count += 1; - let sleep_ms = std::cmp::min(50, 10 * count); - std::thread::sleep(std::time::Duration::from_millis(sleep_ms)); - } - } - } -} - -/// Writes the file to the file system at a temporary path, then -/// renames it to the destination in a single sys call in order -/// to never leave the file system in a corrupted state. -/// -/// This also handles creating the directory if a NotFound error -/// occurs. -fn atomic_write_file( - fs: &impl AtomicWriteFileFs, - file_path: &Path, - data: &[u8], -) -> std::io::Result<()> { - fn atomic_write_file_raw( - fs: &impl AtomicWriteFileFs, - temp_file_path: &Path, - file_path: &Path, - data: &[u8], - ) -> std::io::Result<()> { - fs.write_file(temp_file_path, data)?; - fs.rename_file(temp_file_path, file_path) - .inspect_err(|_err| { - // clean up the created temp file on error - let _ = fs.remove_file(temp_file_path); - }) - } - - let temp_file_path = get_atomic_file_path(file_path); - - if let Err(write_err) = - atomic_write_file_raw(fs, &temp_file_path, file_path, data) - { - if write_err.kind() == ErrorKind::NotFound { - let parent_dir_path = file_path.parent().unwrap(); - match fs.create_dir_all(parent_dir_path) { - Ok(()) => { - return atomic_write_file_raw(fs, &temp_file_path, file_path, data) - .map_err(|err| add_file_context_to_err(file_path, err)); - } - Err(create_err) => { - if !fs.path_exists(parent_dir_path) { - return Err(Error::new( - create_err.kind(), - format!( - "{:#} (for '{}')\nCheck the permission of the directory.", - create_err, - parent_dir_path.display() - ), - )); - } - } - } - } - return Err(add_file_context_to_err(file_path, write_err)); - } - Ok(()) -} - /// Creates a std::fs::File handling if the parent does not exist. pub fn create_file(file_path: &Path) -> std::io::Result { match std::fs::File::create(file_path) { @@ -236,45 +61,6 @@ fn add_file_context_to_err(file_path: &Path, err: Error) -> Error { ) } -pub fn write_file>( - filename: &Path, - data: T, - mode: u32, -) -> std::io::Result<()> { - write_file_2(filename, data, true, mode, true, false) -} - -pub fn write_file_2>( - filename: &Path, - data: T, - update_mode: bool, - mode: u32, - is_create: bool, - is_append: bool, -) -> std::io::Result<()> { - let mut file = OpenOptions::new() - .read(false) - .write(true) - .append(is_append) - .truncate(!is_append) - .create(is_create) - .open(filename)?; - - if update_mode { - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let mode = mode & 0o777; - let permissions = PermissionsExt::from_mode(mode); - file.set_permissions(permissions)?; - } - #[cfg(not(unix))] - let _ = mode; - } - - file.write_all(data.as_ref()) -} - /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result { Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?)) @@ -289,16 +75,10 @@ pub fn canonicalize_path(path: &Path) -> Result { pub fn canonicalize_path_maybe_not_exists( path: &Path, ) -> Result { - deno_path_util::canonicalize_path_maybe_not_exists(path, &canonicalize_path) -} - -pub fn canonicalize_path_maybe_not_exists_with_fs( - path: &Path, - fs: &dyn FileSystem, -) -> Result { - deno_path_util::canonicalize_path_maybe_not_exists(path, &|path| { - fs.realpath_sync(path).map_err(|err| err.into_io_error()) - }) + deno_path_util::fs::canonicalize_path_maybe_not_exists( + &CliSys::default(), + path, + ) } /// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`. @@ -346,7 +126,7 @@ pub fn collect_specifiers( .ignore_git_folder() .ignore_node_modules() .set_vendor_folder(vendor_folder) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)?; + .collect_file_patterns(&CliSys::default(), files)?; let mut collected_files_as_urls = collected_files .iter() .map(|f| specifier_from_file_path(f).unwrap()) @@ -418,7 +198,11 @@ mod clone_dir_imp { from: &std::path::Path, to: &std::path::Path, ) -> Result<(), deno_core::error::AnyError> { - if let Err(e) = super::hard_link_dir_recursive(from, to) { + use crate::sys::CliSys; + + if let Err(e) = + deno_npm_cache::hard_link_dir_recursive(&CliSys::default(), from, to) + { log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e); super::copy_dir_recursive(from, to)?; } @@ -465,84 +249,6 @@ pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { Ok(()) } -/// Hardlinks the files in one directory to another directory. -/// -/// Note: Does not handle symlinks. -pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { - std::fs::create_dir_all(to) - .with_context(|| format!("Creating {}", to.display()))?; - let read_dir = std::fs::read_dir(from) - .with_context(|| format!("Reading {}", from.display()))?; - - for entry in read_dir { - let entry = entry?; - let file_type = entry.file_type()?; - let new_from = from.join(entry.file_name()); - let new_to = to.join(entry.file_name()); - - if file_type.is_dir() { - hard_link_dir_recursive(&new_from, &new_to).with_context(|| { - format!("Dir {} to {}", new_from.display(), new_to.display()) - })?; - } else if file_type.is_file() { - // note: chance for race conditions here between attempting to create, - // then removing, then attempting to create. There doesn't seem to be - // a way to hard link with overwriting in Rust, but maybe there is some - // way with platform specific code. The workaround here is to handle - // scenarios where something else might create or remove files. - if let Err(err) = std::fs::hard_link(&new_from, &new_to) { - if err.kind() == ErrorKind::AlreadyExists { - if let Err(err) = std::fs::remove_file(&new_to) { - if err.kind() == ErrorKind::NotFound { - // Assume another process/thread created this hard link to the file we are wanting - // to remove then sleep a little bit to let the other process/thread move ahead - // faster to reduce contention. - std::thread::sleep(Duration::from_millis(10)); - } else { - return Err(err).with_context(|| { - format!( - "Removing file to hard link {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - - // Always attempt to recreate the hardlink. In contention scenarios, the other process - // might have been killed or exited after removing the file, but before creating the hardlink - if let Err(err) = std::fs::hard_link(&new_from, &new_to) { - // Assume another process/thread created this hard link to the file we are wanting - // to now create then sleep a little bit to let the other process/thread move ahead - // faster to reduce contention. - if err.kind() == ErrorKind::AlreadyExists { - std::thread::sleep(Duration::from_millis(10)); - } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - } - } - - Ok(()) -} - pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> { let err_mapper = |err: Error, kind: Option| { Error::new( diff --git a/cli/util/path.rs b/cli/util/path.rs index de72843406..539e1235a8 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -1,7 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; -use std::fmt::Write; use std::path::Path; use std::path::PathBuf; @@ -52,19 +51,6 @@ pub fn get_extension(file_path: &Path) -> Option { .map(|e| e.to_lowercase()); } -pub fn get_atomic_file_path(file_path: &Path) -> PathBuf { - let rand = gen_rand_path_component(); - let extension = format!("{rand}.tmp"); - file_path.with_extension(extension) -} - -fn gen_rand_path_component() -> String { - (0..4).fold(String::with_capacity(8), |mut output, _| { - write!(&mut output, "{:02x}", rand::random::()).unwrap(); - output - }) -} - /// TypeScript figures out the type of file based on the extension, but we take /// other factors into account like the file headers. The hack here is to map the /// specifier passed to TypeScript to a new specifier with the file extension. diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index 06b311e150..107b78a213 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -140,23 +140,23 @@ mod tests { #[test] fn test_source_map_from_code() { let to_string = - |bytes: Vec| -> String { String::from_utf8(bytes).unwrap() }; + |bytes: Vec| -> String { String::from_utf8(bytes.to_vec()).unwrap() }; assert_eq!( source_map_from_code( - b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=", + b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=" ).map(to_string), Some("testingtesting".to_string()) ); assert_eq!( source_map_from_code( - b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n \n", + b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n \n" ).map(to_string), Some("testingtesting".to_string()) ); assert_eq!( source_map_from_code( - b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n test\n", - ), + b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n test\n" + ).map(to_string), None ); assert_eq!( @@ -164,7 +164,7 @@ mod tests { b"\"use strict\"; throw new Error(\"Hello world!\"); -//# sourceMappingURL=data:application/json;base64,{", +//# sourceMappingURL=data:application/json;base64,{" ), None ); diff --git a/cli/worker.rs b/cli/worker.rs index 0bbc27b29f..ef519c7278 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -23,8 +23,6 @@ use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeExtInitServices; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeRequireLoaderRc; -use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; @@ -53,7 +51,10 @@ use crate::args::DenoSubcommand; use crate::args::NpmCachingStrategy; use crate::args::StorageKeyResolver; use crate::errors; +use crate::node::CliNodeResolver; +use crate::node::CliPackageJsonResolver; use crate::npm::CliNpmResolver; +use crate::sys::CliSys; use crate::util::checksum; use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; @@ -145,13 +146,14 @@ struct SharedWorkerState { maybe_inspector_server: Option>, maybe_lockfile: Option>, module_loader_factory: Box, - node_resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, - pkg_json_resolver: Arc, + pkg_json_resolver: Arc, root_cert_store_provider: Arc, root_permissions: PermissionsContainer, shared_array_buffer_store: SharedArrayBufferStore, storage_key_resolver: StorageKeyResolver, + sys: CliSys, options: CliMainWorkerOptions, subcommand: DenoSubcommand, otel_config: OtelConfig, @@ -162,12 +164,13 @@ impl SharedWorkerState { pub fn create_node_init_services( &self, node_require_loader: NodeRequireLoaderRc, - ) -> NodeExtInitServices { + ) -> NodeExtInitServices { NodeExtInitServices { node_require_loader, node_resolver: self.node_resolver.clone(), npm_resolver: self.npm_resolver.clone().into_npm_pkg_folder_resolver(), pkg_json_resolver: self.pkg_json_resolver.clone(), + sys: self.sys.clone(), } } @@ -418,12 +421,13 @@ impl CliMainWorkerFactory { maybe_inspector_server: Option>, maybe_lockfile: Option>, module_loader_factory: Box, - node_resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, - pkg_json_resolver: Arc, + pkg_json_resolver: Arc, root_cert_store_provider: Arc, root_permissions: PermissionsContainer, storage_key_resolver: StorageKeyResolver, + sys: CliSys, subcommand: DenoSubcommand, options: CliMainWorkerOptions, otel_config: OtelConfig, @@ -448,6 +452,7 @@ impl CliMainWorkerFactory { root_permissions, shared_array_buffer_store: Default::default(), storage_key_resolver, + sys, options, subcommand, otel_config, @@ -612,6 +617,7 @@ impl CliMainWorkerFactory { serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), otel_config: shared.otel_config.clone(), + close_on_idle: true, }, extensions: custom_extensions, startup_snapshot: crate::js::deno_isolate_init(), @@ -655,7 +661,10 @@ impl CliMainWorkerFactory { "40_test_common.js", "40_test.js", "40_bench.js", - "40_jupyter.js" + "40_jupyter.js", + // TODO(bartlomieju): probably shouldn't include these files here? + "40_lint_selector.js", + "40_lint.js" ); } @@ -812,6 +821,7 @@ fn create_web_worker_callback( serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), otel_config: shared.otel_config.clone(), + close_on_idle: args.close_on_idle, }, extensions: vec![], startup_snapshot: crate::js::deno_isolate_init(), @@ -864,14 +874,15 @@ mod tests { let main_module = resolve_path("./hello.js", &std::env::current_dir().unwrap()).unwrap(); let fs = Arc::new(RealFs); - let permission_desc_parser = - Arc::new(RuntimePermissionDescriptorParser::new(fs.clone())); + let permission_desc_parser = Arc::new( + RuntimePermissionDescriptorParser::new(crate::sys::CliSys::default()), + ); let options = WorkerOptions { startup_snapshot: crate::js::deno_isolate_init(), ..Default::default() }; - MainWorker::bootstrap_from_options( + MainWorker::bootstrap_from_options::( main_module, WorkerServiceOptions { module_loader: Rc::new(FsModuleLoader), diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 5b238aad25..4dea8f21e1 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.176.0" +version = "0.178.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index d03779d364..96aec27576 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.114.0" +version = "0.116.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index c851b7724a..7c7cc49b7c 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_canvas" -version = "0.51.0" +version = "0.53.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index 4a26917933..f68dd7d198 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.182.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index d8f2d949f7..022a8418cf 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cron" -version = "0.62.0" +version = "0.64.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 63656bf642..c283cc9277 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.196.0" +version = "0.198.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index a34758d19a..bb2bee77e2 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -13,6 +13,7 @@ import { core, primordials } from "ext:core/mod.js"; const { + BadResourcePrototype, isAnyArrayBuffer, isArrayBuffer, isStringObject, @@ -26,6 +27,7 @@ const { JSONParse, ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, + PromisePrototypeCatch, TypedArrayPrototypeGetBuffer, TypedArrayPrototypeGetByteLength, TypedArrayPrototypeGetByteOffset, @@ -160,7 +162,18 @@ class InnerBody { ) ) { readableStreamThrowIfErrored(this.stream); - return readableStreamCollectIntoUint8Array(this.stream); + return PromisePrototypeCatch( + readableStreamCollectIntoUint8Array(this.stream), + (e) => { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, e)) { + // TODO(kt3k): We probably like to pass e as `cause` if BadResource supports it. + throw new e.constructor( + "Cannot read body as underlying resource unavailable", + ); + } + throw e; + }, + ); } else { this.streamOrStatic.consumed = true; return this.streamOrStatic.body; diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 716d268a04..e6e4ded4af 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.206.0" +version = "0.208.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -23,6 +23,7 @@ deno_permissions.workspace = true deno_tls.workspace = true dyn-clone = "1" error_reporter = "1" +h2.workspace = true hickory-resolver.workspace = true http.workspace = true http-body-util.workspace = true diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 919c6d3044..103698b3bf 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -10,6 +10,7 @@ use std::borrow::Cow; use std::cell::RefCell; use std::cmp::min; use std::convert::From; +use std::future; use std::path::Path; use std::path::PathBuf; use std::pin::Pin; @@ -66,6 +67,7 @@ use http::header::USER_AGENT; use http::Extensions; use http::Method; use http::Uri; +use http_body_util::combinators::BoxBody; use http_body_util::BodyExt; use hyper::body::Frame; use hyper_util::client::legacy::connect::HttpConnector; @@ -75,6 +77,7 @@ use hyper_util::rt::TokioExecutor; use hyper_util::rt::TokioTimer; use serde::Deserialize; use serde::Serialize; +use tower::retry; use tower::ServiceExt; use tower_http::decompression::Decompression; @@ -476,9 +479,7 @@ where // If a body is passed, we use it, and don't return a body for streaming. con_len = Some(data.len() as u64); - http_body_util::Full::new(data.to_vec().into()) - .map_err(|never| match never {}) - .boxed() + ReqBody::full(data.to_vec().into()) } (_, Some(resource)) => { let resource = state @@ -491,7 +492,7 @@ where } _ => {} } - ReqBody::new(ResourceToBodyAdapter::new(resource)) + ReqBody::streaming(ResourceToBodyAdapter::new(resource)) } (None, None) => unreachable!(), } @@ -501,9 +502,7 @@ where if matches!(method, Method::POST | Method::PUT) { con_len = Some(0); } - http_body_util::Empty::new() - .map_err(|never| match never {}) - .boxed() + ReqBody::empty() }; let mut request = http::Request::new(body); @@ -1066,7 +1065,8 @@ pub fn create_http_client( } let pooled_client = builder.build(connector); - let decompress = Decompression::new(pooled_client).gzip(true).br(true); + let retry_client = retry::Retry::new(FetchRetry, pooled_client); + let decompress = Decompression::new(retry_client).gzip(true).br(true); Ok(Client { inner: decompress, @@ -1083,7 +1083,12 @@ pub fn op_utf8_to_byte_string(#[string] input: String) -> ByteString { #[derive(Clone, Debug)] pub struct Client { - inner: Decompression>, + inner: Decompression< + retry::Retry< + FetchRetry, + hyper_util::client::legacy::Client, + >, + >, // Used to check whether to include a proxy-authorization header proxies: Arc, user_agent: HeaderValue, @@ -1174,10 +1179,70 @@ impl Client { } } -pub type ReqBody = - http_body_util::combinators::BoxBody; -pub type ResBody = - http_body_util::combinators::BoxBody; +// This is a custom enum to allow the retry policy to clone the variants that could be retried. +pub enum ReqBody { + Full(http_body_util::Full), + Empty(http_body_util::Empty), + Streaming(BoxBody), +} + +pub type ResBody = BoxBody; + +impl ReqBody { + pub fn full(bytes: Bytes) -> Self { + ReqBody::Full(http_body_util::Full::new(bytes)) + } + + pub fn empty() -> Self { + ReqBody::Empty(http_body_util::Empty::new()) + } + + pub fn streaming(body: B) -> Self + where + B: hyper::body::Body + + Send + + Sync + + 'static, + { + ReqBody::Streaming(BoxBody::new(body)) + } +} + +impl hyper::body::Body for ReqBody { + type Data = Bytes; + type Error = deno_core::error::AnyError; + + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll, Self::Error>>> { + match &mut *self { + ReqBody::Full(ref mut b) => { + Pin::new(b).poll_frame(cx).map_err(|never| match never {}) + } + ReqBody::Empty(ref mut b) => { + Pin::new(b).poll_frame(cx).map_err(|never| match never {}) + } + ReqBody::Streaming(ref mut b) => Pin::new(b).poll_frame(cx), + } + } + + fn is_end_stream(&self) -> bool { + match self { + ReqBody::Full(ref b) => b.is_end_stream(), + ReqBody::Empty(ref b) => b.is_end_stream(), + ReqBody::Streaming(ref b) => b.is_end_stream(), + } + } + + fn size_hint(&self) -> hyper::body::SizeHint { + match self { + ReqBody::Full(ref b) => b.size_hint(), + ReqBody::Empty(ref b) => b.size_hint(), + ReqBody::Streaming(ref b) => b.size_hint(), + } + } +} /// Copied from https://github.com/seanmonstar/reqwest/blob/b9d62a0323d96f11672a61a17bf8849baec00275/src/async_impl/request.rs#L572 /// Check the request URL for a "username:password" type authority, and if @@ -1214,3 +1279,102 @@ pub fn extract_authority(url: &mut Url) -> Option<(String, Option)> { fn op_fetch_promise_is_settled(promise: v8::Local) -> bool { promise.state() != v8::PromiseState::Pending } + +/// Deno.fetch's retry policy. +#[derive(Clone, Debug)] +struct FetchRetry; + +/// Marker extension that a request has been retried once. +#[derive(Clone, Debug)] +struct Retried; + +impl + retry::Policy, http::Response, E> + for FetchRetry +where + E: std::error::Error + 'static, +{ + /// Don't delay retries. + type Future = future::Ready<()>; + + fn retry( + &mut self, + req: &mut http::Request, + result: &mut Result, E>, + ) -> Option { + if req.extensions().get::().is_some() { + // only retry once + return None; + } + + match result { + Ok(..) => { + // never retry a Response + None + } + Err(err) => { + if is_error_retryable(&*err) { + req.extensions_mut().insert(Retried); + Some(future::ready(())) + } else { + None + } + } + } + } + + fn clone_request( + &mut self, + req: &http::Request, + ) -> Option> { + let body = match req.body() { + ReqBody::Full(b) => ReqBody::Full(b.clone()), + ReqBody::Empty(b) => ReqBody::Empty(*b), + ReqBody::Streaming(..) => return None, + }; + + let mut clone = http::Request::new(body); + *clone.method_mut() = req.method().clone(); + *clone.uri_mut() = req.uri().clone(); + *clone.headers_mut() = req.headers().clone(); + *clone.extensions_mut() = req.extensions().clone(); + Some(clone) + } +} + +fn is_error_retryable(err: &(dyn std::error::Error + 'static)) -> bool { + // Note: hyper doesn't promise it will always be this h2 version. Keep up to date. + if let Some(err) = find_source::(err) { + // They sent us a graceful shutdown, try with a new connection! + if err.is_go_away() + && err.is_remote() + && err.reason() == Some(h2::Reason::NO_ERROR) + { + return true; + } + + // REFUSED_STREAM was sent from the server, which is safe to retry. + // https://www.rfc-editor.org/rfc/rfc9113.html#section-8.7-3.2 + if err.is_reset() + && err.is_remote() + && err.reason() == Some(h2::Reason::REFUSED_STREAM) + { + return true; + } + } + + false +} + +fn find_source<'a, E: std::error::Error + 'static>( + err: &'a (dyn std::error::Error + 'static), +) -> Option<&'a E> { + let mut err = Some(err); + while let Some(src) = err { + if let Some(found) = src.downcast_ref::() { + return Some(found); + } + err = src.source(); + } + None +} diff --git a/ext/fetch/tests.rs b/ext/fetch/tests.rs index 3da29f8aa7..243b80bd90 100644 --- a/ext/fetch/tests.rs +++ b/ext/fetch/tests.rs @@ -133,11 +133,7 @@ async fn rust_test_client_with_resolver( let req = http::Request::builder() .uri(format!("https://{}/foo", src_addr)) - .body( - http_body_util::Empty::new() - .map_err(|err| match err {}) - .boxed(), - ) + .body(crate::ReqBody::empty()) .unwrap(); let resp = client.send(req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::OK); diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index d54249329d..9cd5c77013 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.169.0" +version = "0.171.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index fc2b18be13..4e71acb1b2 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -77,6 +77,7 @@ const { Error, Function, MathTrunc, + Number, ObjectEntries, ObjectDefineProperty, ObjectPrototypeIsPrototypeOf, @@ -373,12 +374,12 @@ function parseFileInfo(response) { isDirectory: response.isDirectory, isSymlink: response.isSymlink, size: response.size, - mtime: response.mtimeSet === true ? new Date(response.mtime) : null, - atime: response.atimeSet === true ? new Date(response.atime) : null, + mtime: response.mtimeSet === true ? new Date(Number(response.mtime)) : null, + atime: response.atimeSet === true ? new Date(Number(response.atime)) : null, birthtime: response.birthtimeSet === true ? new Date(response.birthtime) : null, - ctime: response.ctimeSet === true ? new Date(response.ctime) : null, + ctime: response.ctimeSet === true ? new Date(Number(response.ctime)) : null, dev: response.dev, mode: response.mode, ino: unix ? response.ino : null, diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index d11520ad8c..1d0b623718 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.92.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/in_memory_fs.rs b/ext/fs/in_memory_fs.rs deleted file mode 100644 index b79b0ae984..0000000000 --- a/ext/fs/in_memory_fs.rs +++ /dev/null @@ -1,481 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// Allow using Arc for this module. -#![allow(clippy::disallowed_types)] - -use std::borrow::Cow; -use std::collections::hash_map::Entry; -use std::collections::HashMap; -use std::io::Error; -use std::io::ErrorKind; -use std::path::Path; -use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; - -use deno_core::parking_lot::Mutex; -use deno_io::fs::File; -use deno_io::fs::FsError; -use deno_io::fs::FsResult; -use deno_io::fs::FsStat; -use deno_path_util::normalize_path; - -use crate::interface::AccessCheckCb; -use crate::interface::FsDirEntry; -use crate::interface::FsFileType; -use crate::FileSystem; -use crate::OpenOptions; - -#[derive(Debug)] -enum PathEntry { - Dir, - File(Vec), -} - -/// A very basic in-memory file system useful for swapping out in -/// the place of a RealFs for testing purposes. -/// -/// Please develop this out as you need functionality. -#[derive(Debug, Default)] -pub struct InMemoryFs { - entries: Mutex>>, -} - -impl InMemoryFs { - pub fn setup_text_files(&self, files: Vec<(String, String)>) { - for (path, text) in files { - let path = PathBuf::from(path); - self.mkdir_sync(path.parent().unwrap(), true, None).unwrap(); - self - .write_file_sync( - &path, - OpenOptions::write(true, false, false, None), - None, - &text.into_bytes(), - ) - .unwrap(); - } - } - - fn get_entry(&self, path: &Path) -> Option> { - let path = normalize_path(path); - self.entries.lock().get(&path).cloned() - } -} - -#[async_trait::async_trait(?Send)] -impl FileSystem for InMemoryFs { - fn cwd(&self) -> FsResult { - Err(FsError::NotSupported) - } - - fn tmp_dir(&self) -> FsResult { - Err(FsError::NotSupported) - } - - fn chdir(&self, _path: &Path) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn umask(&self, _mask: Option) -> FsResult { - Err(FsError::NotSupported) - } - - fn open_sync( - &self, - _path: &Path, - _options: OpenOptions, - _access_check: Option, - ) -> FsResult> { - Err(FsError::NotSupported) - } - async fn open_async<'a>( - &'a self, - path: PathBuf, - options: OpenOptions, - access_check: Option>, - ) -> FsResult> { - self.open_sync(&path, options, access_check) - } - - fn mkdir_sync( - &self, - path: &Path, - recursive: bool, - _mode: Option, - ) -> FsResult<()> { - let path = normalize_path(path); - - if let Some(parent) = path.parent() { - let entry = self.entries.lock().get(parent).cloned(); - match entry { - Some(entry) => match &*entry { - PathEntry::File(_) => { - return Err(FsError::Io(Error::new( - ErrorKind::InvalidInput, - "Parent is a file", - ))) - } - PathEntry::Dir => {} - }, - None => { - if recursive { - self.mkdir_sync(parent, true, None)?; - } else { - return Err(FsError::Io(Error::new( - ErrorKind::NotFound, - "Not found", - ))); - } - } - } - } - - let entry = self.entries.lock().get(&path).cloned(); - match entry { - Some(entry) => match &*entry { - PathEntry::File(_) => Err(FsError::Io(Error::new( - ErrorKind::InvalidInput, - "Is a file", - ))), - PathEntry::Dir => Ok(()), - }, - None => { - self.entries.lock().insert(path, Arc::new(PathEntry::Dir)); - Ok(()) - } - } - } - async fn mkdir_async( - &self, - path: PathBuf, - recursive: bool, - mode: Option, - ) -> FsResult<()> { - self.mkdir_sync(&path, recursive, mode) - } - - fn chmod_sync(&self, _path: &Path, _mode: u32) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { - self.chmod_sync(&path, mode) - } - - fn chown_sync( - &self, - _path: &Path, - _uid: Option, - _gid: Option, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn chown_async( - &self, - path: PathBuf, - uid: Option, - gid: Option, - ) -> FsResult<()> { - self.chown_sync(&path, uid, gid) - } - - fn lchown_sync( - &self, - _path: &Path, - _uid: Option, - _gid: Option, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - - async fn lchown_async( - &self, - path: PathBuf, - uid: Option, - gid: Option, - ) -> FsResult<()> { - self.lchown_sync(&path, uid, gid) - } - - fn remove_sync(&self, _path: &Path, _recursive: bool) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { - self.remove_sync(&path, recursive) - } - - fn copy_file_sync(&self, _from: &Path, _to: &Path) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn copy_file_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { - self.copy_file_sync(&from, &to) - } - - fn cp_sync(&self, _from: &Path, _to: &Path) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { - self.cp_sync(&from, &to) - } - - fn stat_sync(&self, path: &Path) -> FsResult { - let entry = self.get_entry(path); - match entry { - Some(entry) => match &*entry { - PathEntry::Dir => Ok(FsStat { - is_file: false, - is_directory: true, - is_symlink: false, - size: 0, - mtime: None, - atime: None, - birthtime: None, - ctime: None, - dev: 0, - ino: 0, - mode: 0, - nlink: 0, - uid: 0, - gid: 0, - rdev: 0, - blksize: 0, - blocks: 0, - is_block_device: false, - is_char_device: false, - is_fifo: false, - is_socket: false, - }), - PathEntry::File(data) => Ok(FsStat { - is_file: true, - is_directory: false, - is_symlink: false, - size: data.len() as u64, - mtime: None, - atime: None, - birthtime: None, - ctime: None, - dev: 0, - ino: 0, - mode: 0, - nlink: 0, - uid: 0, - gid: 0, - rdev: 0, - blksize: 0, - blocks: 0, - is_block_device: false, - is_char_device: false, - is_fifo: false, - is_socket: false, - }), - }, - None => Err(FsError::Io(Error::new(ErrorKind::NotFound, "Not found"))), - } - } - async fn stat_async(&self, path: PathBuf) -> FsResult { - self.stat_sync(&path) - } - - fn lstat_sync(&self, _path: &Path) -> FsResult { - Err(FsError::NotSupported) - } - async fn lstat_async(&self, path: PathBuf) -> FsResult { - self.lstat_sync(&path) - } - - fn realpath_sync(&self, _path: &Path) -> FsResult { - Err(FsError::NotSupported) - } - async fn realpath_async(&self, path: PathBuf) -> FsResult { - self.realpath_sync(&path) - } - - fn read_dir_sync(&self, _path: &Path) -> FsResult> { - Err(FsError::NotSupported) - } - async fn read_dir_async(&self, path: PathBuf) -> FsResult> { - self.read_dir_sync(&path) - } - - fn rename_sync(&self, _oldpath: &Path, _newpath: &Path) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn rename_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - ) -> FsResult<()> { - self.rename_sync(&oldpath, &newpath) - } - - fn link_sync(&self, _oldpath: &Path, _newpath: &Path) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn link_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - ) -> FsResult<()> { - self.link_sync(&oldpath, &newpath) - } - - fn symlink_sync( - &self, - _oldpath: &Path, - _newpath: &Path, - _file_type: Option, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn symlink_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - file_type: Option, - ) -> FsResult<()> { - self.symlink_sync(&oldpath, &newpath, file_type) - } - - fn read_link_sync(&self, _path: &Path) -> FsResult { - Err(FsError::NotSupported) - } - async fn read_link_async(&self, path: PathBuf) -> FsResult { - self.read_link_sync(&path) - } - - fn truncate_sync(&self, _path: &Path, _len: u64) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { - self.truncate_sync(&path, len) - } - - fn utime_sync( - &self, - _path: &Path, - _atime_secs: i64, - _atime_nanos: u32, - _mtime_secs: i64, - _mtime_nanos: u32, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn utime_async( - &self, - path: PathBuf, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - self.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) - } - - fn lutime_sync( - &self, - _path: &Path, - _atime_secs: i64, - _atime_nanos: u32, - _mtime_secs: i64, - _mtime_nanos: u32, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn lutime_async( - &self, - path: PathBuf, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - self.lutime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) - } - - fn write_file_sync( - &self, - path: &Path, - options: OpenOptions, - _access_check: Option, - data: &[u8], - ) -> FsResult<()> { - let path = normalize_path(path); - let has_parent_dir = path - .parent() - .and_then(|parent| self.get_entry(parent)) - .map(|e| matches!(*e, PathEntry::Dir)) - .unwrap_or(false); - if !has_parent_dir { - return Err(FsError::Io(Error::new( - ErrorKind::NotFound, - "Parent directory does not exist", - ))); - } - let mut entries = self.entries.lock(); - let entry = entries.entry(path.clone()); - match entry { - Entry::Occupied(mut entry) => { - if let PathEntry::File(existing_data) = &**entry.get() { - if options.create_new { - return Err(FsError::Io(Error::new( - ErrorKind::AlreadyExists, - "File already exists", - ))); - } - if options.append { - let mut new_data = existing_data.clone(); - new_data.extend_from_slice(data); - entry.insert(Arc::new(PathEntry::File(new_data))); - } else { - entry.insert(Arc::new(PathEntry::File(data.to_vec()))); - } - Ok(()) - } else { - Err(FsError::Io(Error::new( - ErrorKind::InvalidInput, - "Not a file", - ))) - } - } - Entry::Vacant(entry) => { - entry.insert(Arc::new(PathEntry::File(data.to_vec()))); - Ok(()) - } - } - } - - async fn write_file_async<'a>( - &'a self, - path: PathBuf, - options: OpenOptions, - access_check: Option>, - data: Vec, - ) -> FsResult<()> { - self.write_file_sync(&path, options, access_check, &data) - } - - fn read_file_sync( - &self, - path: &Path, - _access_check: Option, - ) -> FsResult> { - let entry = self.get_entry(path); - match entry { - Some(entry) => match &*entry { - PathEntry::File(data) => Ok(Cow::Owned(data.clone())), - PathEntry::Dir => Err(FsError::Io(Error::new( - ErrorKind::InvalidInput, - "Is a directory", - ))), - }, - None => Err(FsError::Io(Error::new(ErrorKind::NotFound, "Not found"))), - } - } - async fn read_file_async<'a>( - &'a self, - path: PathBuf, - access_check: Option>, - ) -> FsResult> { - self.read_file_sync(&path, access_check) - } -} diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 28a49c5d9b..0e753d684c 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -71,7 +71,7 @@ pub enum FsFileType { } /// WARNING: This is part of the public JS Deno API. -#[derive(Serialize)] +#[derive(Debug, Clone, Serialize)] #[serde(rename_all = "camelCase")] pub struct FsDirEntry { pub name: String, diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index 26fac1e79f..360400df0d 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -1,12 +1,10 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -mod in_memory_fs; mod interface; mod ops; mod std_fs; pub mod sync; -pub use crate::in_memory_fs::InMemoryFs; pub use crate::interface::AccessCheckCb; pub use crate::interface::AccessCheckFn; pub use crate::interface::FileSystem; @@ -17,7 +15,6 @@ pub use crate::interface::OpenOptions; pub use crate::ops::FsOpsError; pub use crate::ops::FsOpsErrorKind; pub use crate::ops::OperationError; -pub use crate::ops::V8MaybeStaticStr; pub use crate::std_fs::RealFs; pub use crate::sync::MaybeSend; pub use crate::sync::MaybeSync; diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index 521ff65471..ac0a8901d7 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -19,7 +19,6 @@ use crate::FsPermissions; use crate::OpenOptions; use boxed_error::Boxed; use deno_core::op2; -use deno_core::v8; use deno_core::CancelFuture; use deno_core::CancelHandle; use deno_core::FastString; @@ -27,7 +26,6 @@ use deno_core::JsBuffer; use deno_core::OpState; use deno_core::ResourceId; use deno_core::ToJsBuffer; -use deno_core::ToV8; use deno_io::fs::FileResource; use deno_io::fs::FsError; use deno_io::fs::FsStat; @@ -1384,51 +1382,12 @@ where Ok(buf.into_owned().into_boxed_slice().into()) } -// todo(https://github.com/denoland/deno_core/pull/986): remove -// when upgrading deno_core -#[derive(Debug)] -pub struct FastStringV8AllocationError; - -impl std::error::Error for FastStringV8AllocationError {} - -impl std::fmt::Display for FastStringV8AllocationError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!( - f, - "failed to allocate string; buffer exceeds maximum length" - ) - } -} - -/// Maintains a static reference to the string if possible. -pub struct V8MaybeStaticStr(pub Cow<'static, str>); - -impl<'s> ToV8<'s> for V8MaybeStaticStr { - type Error = FastStringV8AllocationError; - - #[inline] - fn to_v8( - self, - scope: &mut v8::HandleScope<'s>, - ) -> Result, Self::Error> { - Ok( - match self.0 { - Cow::Borrowed(text) => FastString::from_static(text), - Cow::Owned(value) => value.into(), - } - .v8_string(scope) - .map_err(|_| FastStringV8AllocationError)? - .into(), - ) - } -} - #[op2(stack_trace)] #[to_v8] pub fn op_fs_read_file_text_sync

( state: &mut OpState, #[string] path: String, -) -> Result +) -> Result where P: FsPermissions + 'static, { @@ -1440,7 +1399,10 @@ where let str = fs .read_text_file_lossy_sync(&path, Some(&mut access_check)) .map_err(|error| map_permission_error("readfile", error, &path))?; - Ok(V8MaybeStaticStr(str)) + Ok(match str { + Cow::Borrowed(text) => FastString::from_static(text), + Cow::Owned(value) => value.into(), + }) } #[op2(async, stack_trace)] @@ -1449,7 +1411,7 @@ pub async fn op_fs_read_file_text_async

( state: Rc>, #[string] path: String, #[smi] cancel_rid: Option, -) -> Result +) -> Result where P: FsPermissions + 'static, { @@ -1483,7 +1445,10 @@ where .map_err(|error| map_permission_error("readfile", error, &path))? }; - Ok(V8MaybeStaticStr(str)) + Ok(match str { + Cow::Borrowed(text) => FastString::from_static(text), + Cow::Owned(value) => value.into(), + }) } fn to_seek_from(offset: i64, whence: i32) -> Result { diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 86ad213160..c28fe9f915 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -26,7 +26,7 @@ use crate::interface::FsFileType; use crate::FileSystem; use crate::OpenOptions; -#[derive(Debug, Clone)] +#[derive(Debug, Default, Clone)] pub struct RealFs; #[async_trait::async_trait(?Send)] @@ -61,7 +61,7 @@ impl FileSystem for RealFs { umask(Mode::from_bits_truncate(mask as mode_t)) } else { // If no mask provided, we query the current. Requires two syscalls. - let prev = umask(Mode::from_bits_truncate(0o777)); + let prev = umask(Mode::from_bits_truncate(0)); let _ = umask(prev); prev }; @@ -723,30 +723,34 @@ fn cp(from: &Path, to: &Path) -> FsResult<()> { } } - match (fs::metadata(to), fs::symlink_metadata(to)) { - (Ok(m), _) if m.is_dir() => cp_( - source_meta, - from, - &to.join(from.file_name().ok_or_else(|| { - io::Error::new( - io::ErrorKind::InvalidInput, - "the source path is not a valid file", - ) - })?), - )?, - (_, Ok(m)) if is_identical(&source_meta, &m) => { + if let Ok(m) = fs::metadata(to) { + if m.is_dir() { + return cp_( + source_meta, + from, + &to.join(from.file_name().ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + "the source path is not a valid file", + ) + })?), + ); + } + } + + if let Ok(m) = fs::symlink_metadata(to) { + if is_identical(&source_meta, &m) { return Err( io::Error::new( io::ErrorKind::InvalidInput, "the source and destination are the same file", ) .into(), - ) + ); } - _ => cp_(source_meta, from, to)?, } - Ok(()) + cp_(source_meta, from, to) } #[cfg(not(windows))] @@ -757,11 +761,16 @@ fn stat(path: &Path) -> FsResult { #[cfg(windows)] fn stat(path: &Path) -> FsResult { - let metadata = fs::metadata(path)?; - let mut fsstat = FsStat::from_std(metadata); + use std::os::windows::fs::OpenOptionsExt; use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS; - let path = path.canonicalize()?; - stat_extra(&mut fsstat, &path, FILE_FLAG_BACKUP_SEMANTICS)?; + + let mut opts = fs::OpenOptions::new(); + opts.access_mode(0); // no read or write + opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS); + let file = opts.open(path)?; + let metadata = file.metadata()?; + let mut fsstat = FsStat::from_std(metadata); + stat_extra(&file, &mut fsstat)?; Ok(fsstat) } @@ -773,34 +782,24 @@ fn lstat(path: &Path) -> FsResult { #[cfg(windows)] fn lstat(path: &Path) -> FsResult { + use std::os::windows::fs::OpenOptionsExt; + use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS; use winapi::um::winbase::FILE_FLAG_OPEN_REPARSE_POINT; - let metadata = fs::symlink_metadata(path)?; + let mut opts = fs::OpenOptions::new(); + opts.access_mode(0); // no read or write + opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT); + let file = opts.open(path)?; + let metadata = file.metadata()?; let mut fsstat = FsStat::from_std(metadata); - stat_extra( - &mut fsstat, - path, - FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, - )?; + stat_extra(&file, &mut fsstat)?; Ok(fsstat) } #[cfg(windows)] -fn stat_extra( - fsstat: &mut FsStat, - path: &Path, - file_flags: winapi::shared::minwindef::DWORD, -) -> FsResult<()> { - use std::os::windows::prelude::OsStrExt; - - use winapi::um::fileapi::CreateFileW; - use winapi::um::fileapi::OPEN_EXISTING; - use winapi::um::handleapi::CloseHandle; - use winapi::um::handleapi::INVALID_HANDLE_VALUE; - use winapi::um::winnt::FILE_SHARE_DELETE; - use winapi::um::winnt::FILE_SHARE_READ; - use winapi::um::winnt::FILE_SHARE_WRITE; +fn stat_extra(file: &std::fs::File, fsstat: &mut FsStat) -> FsResult<()> { + use std::os::windows::io::AsRawHandle; unsafe fn get_dev( handle: winapi::shared::ntdef::HANDLE, @@ -869,23 +868,9 @@ fn stat_extra( // SAFETY: winapi calls unsafe { - let mut path: Vec<_> = path.as_os_str().encode_wide().collect(); - path.push(0); - let file_handle = CreateFileW( - path.as_ptr(), - 0, - FILE_SHARE_READ | FILE_SHARE_DELETE | FILE_SHARE_WRITE, - std::ptr::null_mut(), - OPEN_EXISTING, - file_flags, - std::ptr::null_mut(), - ); - if file_handle == INVALID_HANDLE_VALUE { - return Err(std::io::Error::last_os_error().into()); - } + let file_handle = file.as_raw_handle(); - let result = get_dev(file_handle); - fsstat.dev = result?; + fsstat.dev = get_dev(file_handle)?; if let Ok(file_info) = query_file_information(file_handle) { fsstat.ctime = Some(windows_time_to_unix_time_msec( @@ -924,7 +909,6 @@ fn stat_extra( } } - CloseHandle(file_handle); Ok(()) } } diff --git a/ext/fs/sync.rs b/ext/fs/sync.rs index 6a913f658a..06694f1dc4 100644 --- a/ext/fs/sync.rs +++ b/ext/fs/sync.rs @@ -21,3 +21,9 @@ mod inner { pub trait MaybeSend {} impl MaybeSend for T where T: ?Sized {} } + +#[allow(clippy::disallowed_types)] +#[inline] +pub fn new_rc(value: T) -> MaybeArc { + MaybeArc::new(value) +} diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 27a91ca61b..e7aaad2fc0 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.180.0" +version = "0.182.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 1b73bad348..9298c654c1 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.92.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index c69a962fa3..c97aa75552 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.90.0" +version = "0.92.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/remote.rs b/ext/kv/remote.rs index 1830aa67ee..891786e319 100644 --- a/ext/kv/remote.rs +++ b/ext/kv/remote.rs @@ -122,9 +122,7 @@ impl RemoteTransport for FetchClient { headers: http::HeaderMap, body: Bytes, ) -> Result<(Url, http::StatusCode, Self::Response), anyhow::Error> { - let body = http_body_util::Full::new(body) - .map_err(|never| match never {}) - .boxed(); + let body = deno_fetch::ReqBody::full(body); let mut req = http::Request::new(body); *req.method_mut() = http::Method::POST; *req.uri_mut() = url.as_str().parse()?; diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index 783b4b7cff..5d726b3e31 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.113.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/sym/Cargo.toml b/ext/napi/sym/Cargo.toml index a3dd56e2bf..22228bd2f6 100644 --- a/ext/napi/sym/Cargo.toml +++ b/ext/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.112.0" +version = "0.114.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/03_quic.js b/ext/net/03_quic.js new file mode 100644 index 0000000000..e100e7bd64 --- /dev/null +++ b/ext/net/03_quic.js @@ -0,0 +1,367 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +import { core, primordials } from "ext:core/mod.js"; +import { + op_quic_accept, + op_quic_accept_bi, + op_quic_accept_incoming, + op_quic_accept_uni, + op_quic_close_connection, + op_quic_close_endpoint, + op_quic_connect, + op_quic_connection_closed, + op_quic_connection_get_protocol, + op_quic_connection_get_remote_addr, + op_quic_endpoint_get_addr, + op_quic_get_send_stream_priority, + op_quic_incoming_accept, + op_quic_incoming_ignore, + op_quic_incoming_local_ip, + op_quic_incoming_refuse, + op_quic_incoming_remote_addr, + op_quic_incoming_remote_addr_validated, + op_quic_listen, + op_quic_max_datagram_size, + op_quic_open_bi, + op_quic_open_uni, + op_quic_read_datagram, + op_quic_send_datagram, + op_quic_set_send_stream_priority, +} from "ext:core/ops"; +import { + getWritableStreamResourceBacking, + ReadableStream, + readableStreamForRid, + WritableStream, + writableStreamForRid, +} from "ext:deno_web/06_streams.js"; +import { loadTlsKeyPair } from "ext:deno_net/02_tls.js"; +const { + BadResourcePrototype, +} = core; +const { + Uint8Array, + TypedArrayPrototypeSubarray, + SymbolAsyncIterator, + SafePromisePrototypeFinally, + ObjectPrototypeIsPrototypeOf, +} = primordials; + +class QuicSendStream extends WritableStream { + get sendOrder() { + return op_quic_get_send_stream_priority( + getWritableStreamResourceBacking(this).rid, + ); + } + + set sendOrder(p) { + op_quic_set_send_stream_priority( + getWritableStreamResourceBacking(this).rid, + p, + ); + } +} + +class QuicReceiveStream extends ReadableStream {} + +function readableStream(rid, closed) { + // stream can be indirectly closed by closing connection. + SafePromisePrototypeFinally(closed, () => { + core.tryClose(rid); + }); + return readableStreamForRid(rid, true, QuicReceiveStream); +} + +function writableStream(rid, closed) { + // stream can be indirectly closed by closing connection. + SafePromisePrototypeFinally(closed, () => { + core.tryClose(rid); + }); + return writableStreamForRid(rid, true, QuicSendStream); +} + +class QuicBidirectionalStream { + #readable; + #writable; + + constructor(txRid, rxRid, closed) { + this.#readable = readableStream(rxRid, closed); + this.#writable = writableStream(txRid, closed); + } + + get readable() { + return this.#readable; + } + + get writable() { + return this.#writable; + } +} + +async function* bidiStream(conn, closed) { + try { + while (true) { + const r = await op_quic_accept_bi(conn); + yield new QuicBidirectionalStream(r[0], r[1], closed); + } + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + return; + } + throw error; + } +} + +async function* uniStream(conn, closed) { + try { + while (true) { + const uniRid = await op_quic_accept_uni(conn); + yield readableStream(uniRid, closed); + } + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + return; + } + throw error; + } +} + +class QuicConn { + #resource; + #bidiStream = null; + #uniStream = null; + #closed; + + constructor(resource) { + this.#resource = resource; + + this.#closed = op_quic_connection_closed(this.#resource); + core.unrefOpPromise(this.#closed); + } + + get protocol() { + return op_quic_connection_get_protocol(this.#resource); + } + + get remoteAddr() { + return op_quic_connection_get_remote_addr(this.#resource); + } + + async createBidirectionalStream( + { sendOrder, waitUntilAvailable } = { __proto__: null }, + ) { + const { 0: txRid, 1: rxRid } = await op_quic_open_bi( + this.#resource, + waitUntilAvailable ?? false, + ); + if (sendOrder !== null && sendOrder !== undefined) { + op_quic_set_send_stream_priority(txRid, sendOrder); + } + return new QuicBidirectionalStream(txRid, rxRid, this.#closed); + } + + async createUnidirectionalStream( + { sendOrder, waitUntilAvailable } = { __proto__: null }, + ) { + const rid = await op_quic_open_uni( + this.#resource, + waitUntilAvailable ?? false, + ); + if (sendOrder !== null && sendOrder !== undefined) { + op_quic_set_send_stream_priority(rid, sendOrder); + } + return writableStream(rid, this.#closed); + } + + get incomingBidirectionalStreams() { + if (this.#bidiStream === null) { + this.#bidiStream = ReadableStream.from( + bidiStream(this.#resource, this.#closed), + ); + } + return this.#bidiStream; + } + + get incomingUnidirectionalStreams() { + if (this.#uniStream === null) { + this.#uniStream = ReadableStream.from( + uniStream(this.#resource, this.#closed), + ); + } + return this.#uniStream; + } + + get maxDatagramSize() { + return op_quic_max_datagram_size(this.#resource); + } + + async readDatagram(p) { + const view = p || new Uint8Array(this.maxDatagramSize); + const nread = await op_quic_read_datagram(this.#resource, view); + return TypedArrayPrototypeSubarray(view, 0, nread); + } + + async sendDatagram(data) { + await op_quic_send_datagram(this.#resource, data); + } + + get closed() { + core.refOpPromise(this.#closed); + return this.#closed; + } + + close({ closeCode, reason }) { + op_quic_close_connection(this.#resource, closeCode, reason); + } +} + +class QuicIncoming { + #incoming; + + constructor(incoming) { + this.#incoming = incoming; + } + + get localIp() { + return op_quic_incoming_local_ip(this.#incoming); + } + + get remoteAddr() { + return op_quic_incoming_remote_addr(this.#incoming); + } + + get remoteAddressValidated() { + return op_quic_incoming_remote_addr_validated(this.#incoming); + } + + async accept() { + const conn = await op_quic_incoming_accept(this.#incoming); + return new QuicConn(conn); + } + + refuse() { + op_quic_incoming_refuse(this.#incoming); + } + + ignore() { + op_quic_incoming_ignore(this.#incoming); + } +} + +class QuicListener { + #endpoint; + + constructor(endpoint) { + this.#endpoint = endpoint; + } + + get addr() { + return op_quic_endpoint_get_addr(this.#endpoint); + } + + async accept() { + const conn = await op_quic_accept(this.#endpoint); + return new QuicConn(conn); + } + + async incoming() { + const incoming = await op_quic_accept_incoming(this.#endpoint); + return new QuicIncoming(incoming); + } + + async next() { + let conn; + try { + conn = await this.accept(); + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + return { value: undefined, done: true }; + } + throw error; + } + return { value: conn, done: false }; + } + + [SymbolAsyncIterator]() { + return this; + } + + close({ closeCode, reason }) { + op_quic_close_endpoint(this.#endpoint, closeCode, reason); + } +} + +async function listenQuic( + { + hostname, + port, + cert, + key, + alpnProtocols, + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + }, +) { + hostname = hostname || "0.0.0.0"; + const keyPair = loadTlsKeyPair("Deno.listenQuic", { cert, key }); + const endpoint = await op_quic_listen( + { hostname, port }, + { alpnProtocols }, + { + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + }, + keyPair, + ); + return new QuicListener(endpoint); +} + +async function connectQuic( + { + hostname, + port, + serverName, + caCerts, + cert, + key, + alpnProtocols, + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + congestionControl, + }, +) { + const keyPair = loadTlsKeyPair("Deno.connectQuic", { cert, key }); + const conn = await op_quic_connect( + { hostname, port }, + { + caCerts, + alpnProtocols, + serverName, + }, + { + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + congestionControl, + }, + keyPair, + ); + return new QuicConn(conn); +} + +export { + connectQuic, + listenQuic, + QuicBidirectionalStream, + QuicConn, + QuicIncoming, + QuicListener, + QuicReceiveStream, + QuicSendStream, +}; diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index f5aa32c8ce..eaee7bfb4b 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.174.0" +version = "0.176.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -20,6 +20,7 @@ deno_tls.workspace = true hickory-proto = "0.25.0-alpha.4" hickory-resolver.workspace = true pin-project.workspace = true +quinn = { version = "0.11.6", default-features = false, features = ["runtime-tokio", "rustls", "ring"] } rustls-tokio-stream.workspace = true serde.workspace = true socket2.workspace = true diff --git a/ext/net/lib.deno_net.d.ts b/ext/net/lib.deno_net.d.ts index 827081f2a4..958474cbbd 100644 --- a/ext/net/lib.deno_net.d.ts +++ b/ext/net/lib.deno_net.d.ts @@ -450,5 +450,293 @@ declare namespace Deno { options?: StartTlsOptions, ): Promise; + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface QuicTransportOptions { + /** Period of inactivity before sending a keep-alive packet. Keep-alive + * packets prevent an inactive but otherwise healthy connection from timing + * out. Only one side of any given connection needs keep-alive enabled for + * the connection to be preserved. + * @default {undefined} + */ + keepAliveInterval?: number; + /** Maximum duration of inactivity to accept before timing out the + * connection. The true idle timeout is the minimum of this and the peer’s + * own max idle timeout. + * @default {undefined} + */ + maxIdleTimeout?: number; + /** Maximum number of incoming bidirectional streams that may be open + * concurrently. + * @default {100} + */ + maxConcurrentBidirectionalStreams?: number; + /** Maximum number of incoming unidirectional streams that may be open + * concurrently. + * @default {100} + */ + maxConcurrentUnidirectionalStreams?: number; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface ListenQuicOptions extends QuicTransportOptions { + /** The port to connect to. */ + port: number; + /** + * A literal IP address or host name that can be resolved to an IP address. + * @default {"0.0.0.0"} + */ + hostname?: string; + /** Server private key in PEM format */ + key: string; + /** Cert chain in PEM format */ + cert: string; + /** Application-Layer Protocol Negotiation (ALPN) protocols to announce to + * the client. QUIC requires the use of ALPN. + */ + alpnProtocols: string[]; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * Listen announces on the local transport address over QUIC. + * + * ```ts + * const lstnr = await Deno.listenQuic({ port: 443, cert: "...", key: "...", alpnProtocols: ["h3"] }); + * ``` + * + * Requires `allow-net` permission. + * + * @experimental + * @tags allow-net + * @category Network + */ + export function listenQuic(options: ListenQuicOptions): Promise; + + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface ConnectQuicOptions extends QuicTransportOptions { + /** The port to connect to. */ + port: number; + /** A literal IP address or host name that can be resolved to an IP address. */ + hostname: string; + /** The name used for validating the certificate provided by the server. If + * not provided, defaults to `hostname`. */ + serverName?: string | undefined; + /** Application-Layer Protocol Negotiation (ALPN) protocols supported by + * the client. QUIC requires the use of ALPN. + */ + alpnProtocols: string[]; + /** A list of root certificates that will be used in addition to the + * default root certificates to verify the peer's certificate. + * + * Must be in PEM format. */ + caCerts?: string[]; + /** + * The congestion control algorithm used when sending data over this connection. + */ + congestionControl?: "throughput" | "low-latency"; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * Establishes a secure connection over QUIC using a hostname and port. The + * cert file is optional and if not included Mozilla's root certificates will + * be used. See also https://github.com/ctz/webpki-roots for specifics. + * + * ```ts + * const caCert = await Deno.readTextFile("./certs/my_custom_root_CA.pem"); + * const conn1 = await Deno.connectQuic({ hostname: "example.com", port: 443, alpnProtocols: ["h3"] }); + * const conn2 = await Deno.connectQuic({ caCerts: [caCert], hostname: "example.com", port: 443, alpnProtocols: ["h3"] }); + * ``` + * + * Requires `allow-net` permission. + * + * @experimental + * @tags allow-net + * @category Network + */ + export function connectQuic(options: ConnectQuicOptions): Promise; + + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface QuicCloseInfo { + /** A number representing the error code for the error. */ + closeCode: number; + /** A string representing the reason for closing the connection. */ + reason: string; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * An incoming connection for which the server has not yet begun its part of the handshake. + * + * @experimental + * @category Network + */ + export interface QuicIncoming { + /** + * The local IP address which was used when the peer established the connection. + */ + readonly localIp: string; + + /** + * The peer’s UDP address. + */ + readonly remoteAddr: NetAddr; + + /** + * Whether the socket address that is initiating this connection has proven that they can receive traffic. + */ + readonly remoteAddressValidated: boolean; + + /** + * Accept this incoming connection. + */ + accept(): Promise; + + /** + * Refuse this incoming connection. + */ + refuse(): void; + + /** + * Ignore this incoming connection attempt, not sending any packet in response. + */ + ignore(): void; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * Specialized listener that accepts QUIC connections. + * + * @experimental + * @category Network + */ + export interface QuicListener extends AsyncIterable { + /** Return the address of the `QuicListener`. */ + readonly addr: NetAddr; + + /** Waits for and resolves to the next connection to the `QuicListener`. */ + accept(): Promise; + + /** Waits for and resolves to the next incoming request to the `QuicListener`. */ + incoming(): Promise; + + /** Close closes the listener. Any pending accept promises will be rejected + * with errors. */ + close(info: QuicCloseInfo): void; + + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicSendStreamOptions { + /** Indicates the send priority of this stream relative to other streams for + * which the value has been set. + * @default {undefined} + */ + sendOrder?: number; + /** Wait until there is sufficient flow credit to create the stream. + * @default {false} + */ + waitUntilAvailable?: boolean; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicConn { + /** Close closes the listener. Any pending accept promises will be rejected + * with errors. */ + close(info: QuicCloseInfo): void; + /** Opens and returns a bidirectional stream. */ + createBidirectionalStream( + options?: QuicSendStreamOptions, + ): Promise; + /** Opens and returns a unidirectional stream. */ + createUnidirectionalStream( + options?: QuicSendStreamOptions, + ): Promise; + /** Send a datagram. The provided data cannot be larger than + * `maxDatagramSize`. */ + sendDatagram(data: Uint8Array): Promise; + /** Receive a datagram. If no buffer is provider, one will be allocated. + * The size of the provided buffer should be at least `maxDatagramSize`. */ + readDatagram(buffer?: Uint8Array): Promise; + + /** Return the remote address for the connection. Clients may change + * addresses at will, for example when switching to a cellular internet + * connection. + */ + readonly remoteAddr: NetAddr; + /** The negotiated ALPN protocol, if provided. */ + readonly protocol: string | undefined; + /** Returns a promise that resolves when the connection is closed. */ + readonly closed: Promise; + /** A stream of bidirectional streams opened by the peer. */ + readonly incomingBidirectionalStreams: ReadableStream< + QuicBidirectionalStream + >; + /** A stream of unidirectional streams opened by the peer. */ + readonly incomingUnidirectionalStreams: ReadableStream; + /** Returns the datagram stream for sending and receiving datagrams. */ + readonly maxDatagramSize: number; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicBidirectionalStream { + /** Returns a QuicReceiveStream instance that can be used to read incoming data. */ + readonly readable: QuicReceiveStream; + /** Returns a QuicSendStream instance that can be used to write outgoing data. */ + readonly writable: QuicSendStream; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicSendStream extends WritableStream { + /** Indicates the send priority of this stream relative to other streams for + * which the value has been set. */ + sendOrder: number; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicReceiveStream extends ReadableStream {} + export {}; // only export exports } diff --git a/ext/net/lib.rs b/ext/net/lib.rs index f482750b38..04b3f80010 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -5,6 +5,7 @@ pub mod ops; pub mod ops_tls; #[cfg(unix)] pub mod ops_unix; +mod quic; pub mod raw; pub mod resolve_addr; pub mod tcp; @@ -158,8 +159,34 @@ deno_core::extension!(deno_net, ops_unix::op_node_unstable_net_listen_unixpacket

, ops_unix::op_net_recv_unixpacket, ops_unix::op_net_send_unixpacket

, + + quic::op_quic_accept, + quic::op_quic_accept_bi, + quic::op_quic_accept_incoming, + quic::op_quic_accept_uni, + quic::op_quic_close_connection, + quic::op_quic_close_endpoint, + quic::op_quic_connection_closed, + quic::op_quic_connection_get_protocol, + quic::op_quic_connection_get_remote_addr, + quic::op_quic_connect

, + quic::op_quic_endpoint_get_addr, + quic::op_quic_get_send_stream_priority, + quic::op_quic_incoming_accept, + quic::op_quic_incoming_refuse, + quic::op_quic_incoming_ignore, + quic::op_quic_incoming_local_ip, + quic::op_quic_incoming_remote_addr, + quic::op_quic_incoming_remote_addr_validated, + quic::op_quic_listen

, + quic::op_quic_max_datagram_size, + quic::op_quic_open_bi, + quic::op_quic_open_uni, + quic::op_quic_read_datagram, + quic::op_quic_send_datagram, + quic::op_quic_set_send_stream_priority, ], - esm = [ "01_net.js", "02_tls.js" ], + esm = [ "01_net.js", "02_tls.js", "03_quic.js" ], options = { root_cert_store_provider: Option>, unsafely_ignore_certificate_errors: Option>, diff --git a/ext/net/quic.rs b/ext/net/quic.rs new file mode 100644 index 0000000000..16f68364be --- /dev/null +++ b/ext/net/quic.rs @@ -0,0 +1,660 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use crate::resolve_addr::resolve_addr; +use crate::DefaultTlsOptions; +use crate::NetPermissions; +use crate::UnsafelyIgnoreCertificateErrors; +use deno_core::error::bad_resource; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::futures::task::noop_waker_ref; +use deno_core::op2; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::GarbageCollected; +use deno_core::JsBuffer; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::WriteOutcome; +use deno_tls::create_client_config; +use deno_tls::SocketUse; +use deno_tls::TlsKeys; +use deno_tls::TlsKeysHolder; +use quinn::crypto::rustls::QuicClientConfig; +use quinn::crypto::rustls::QuicServerConfig; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::net::IpAddr; +use std::net::Ipv4Addr; +use std::net::Ipv6Addr; +use std::net::SocketAddrV4; +use std::net::SocketAddrV6; +use std::pin::pin; +use std::rc::Rc; +use std::sync::Arc; +use std::task::Context; +use std::task::Poll; +use std::time::Duration; + +#[derive(Debug, Deserialize, Serialize)] +struct Addr { + hostname: String, + port: u16, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ListenArgs { + alpn_protocols: Option>, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct TransportConfig { + keep_alive_interval: Option, + max_idle_timeout: Option, + max_concurrent_bidirectional_streams: Option, + max_concurrent_unidirectional_streams: Option, + preferred_address_v4: Option, + preferred_address_v6: Option, + congestion_control: Option, +} + +impl TryInto for TransportConfig { + type Error = AnyError; + + fn try_into(self) -> Result { + let mut cfg = quinn::TransportConfig::default(); + + if let Some(interval) = self.keep_alive_interval { + cfg.keep_alive_interval(Some(Duration::from_millis(interval))); + } + + if let Some(timeout) = self.max_idle_timeout { + cfg.max_idle_timeout(Some(Duration::from_millis(timeout).try_into()?)); + } + + if let Some(max) = self.max_concurrent_bidirectional_streams { + cfg.max_concurrent_bidi_streams(max.into()); + } + + if let Some(max) = self.max_concurrent_unidirectional_streams { + cfg.max_concurrent_uni_streams(max.into()); + } + + if let Some(v) = self.congestion_control { + let controller: Option< + Arc, + > = match v.as_str() { + "low-latency" => { + Some(Arc::new(quinn::congestion::BbrConfig::default())) + } + "throughput" => { + Some(Arc::new(quinn::congestion::CubicConfig::default())) + } + _ => None, + }; + if let Some(controller) = controller { + cfg.congestion_controller_factory(controller); + } + } + + Ok(cfg) + } +} + +struct EndpointResource(quinn::Endpoint, Arc); + +impl GarbageCollected for EndpointResource {} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_listen( + state: Rc>, + #[serde] addr: Addr, + #[serde] args: ListenArgs, + #[serde] transport_config: TransportConfig, + #[cppgc] keys: &TlsKeysHolder, +) -> Result +where + NP: NetPermissions + 'static, +{ + state + .borrow_mut() + .borrow_mut::() + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenQuic()")?; + + let addr = resolve_addr(&addr.hostname, addr.port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + + let TlsKeys::Static(deno_tls::TlsKey(cert, key)) = keys.take() else { + unreachable!() + }; + + let mut crypto = + quinn::rustls::ServerConfig::builder_with_protocol_versions(&[ + &quinn::rustls::version::TLS13, + ]) + .with_no_client_auth() + .with_single_cert(cert.clone(), key.clone_key())?; + + if let Some(alpn_protocols) = args.alpn_protocols { + crypto.alpn_protocols = alpn_protocols + .into_iter() + .map(|alpn| alpn.into_bytes()) + .collect(); + } + + let server_config = Arc::new(QuicServerConfig::try_from(crypto)?); + let mut config = quinn::ServerConfig::with_crypto(server_config.clone()); + config.preferred_address_v4(transport_config.preferred_address_v4); + config.preferred_address_v6(transport_config.preferred_address_v6); + config.transport_config(Arc::new(transport_config.try_into()?)); + let endpoint = quinn::Endpoint::server(config, addr)?; + + Ok(EndpointResource(endpoint, server_config)) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_endpoint_get_addr( + #[cppgc] endpoint: &EndpointResource, +) -> Result { + let addr = endpoint.0.local_addr()?; + let addr = Addr { + hostname: format!("{}", addr.ip()), + port: addr.port(), + }; + Ok(addr) +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CloseInfo { + close_code: u64, + reason: String, +} + +#[op2(fast)] +pub(crate) fn op_quic_close_endpoint( + #[cppgc] endpoint: &EndpointResource, + #[bigint] close_code: u64, + #[string] reason: String, +) -> Result<(), AnyError> { + endpoint + .0 + .close(quinn::VarInt::from_u64(close_code)?, reason.as_bytes()); + Ok(()) +} + +struct ConnectionResource(quinn::Connection); + +impl GarbageCollected for ConnectionResource {} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_accept( + #[cppgc] endpoint: &EndpointResource, +) -> Result { + match endpoint.0.accept().await { + Some(incoming) => { + let conn = incoming.accept()?.await?; + Ok(ConnectionResource(conn)) + } + None => Err(bad_resource("QuicListener is closed")), + } +} + +struct IncomingResource( + RefCell>, + Arc, +); + +impl GarbageCollected for IncomingResource {} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_accept_incoming( + #[cppgc] endpoint: &EndpointResource, +) -> Result { + match endpoint.0.accept().await { + Some(incoming) => Ok(IncomingResource( + RefCell::new(Some(incoming)), + endpoint.1.clone(), + )), + None => Err(bad_resource("QuicListener is closed")), + } +} + +#[op2] +#[string] +pub(crate) fn op_quic_incoming_local_ip( + #[cppgc] incoming_resource: &IncomingResource, +) -> Result, AnyError> { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + Ok(incoming.local_ip().map(|ip| ip.to_string())) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_incoming_remote_addr( + #[cppgc] incoming_resource: &IncomingResource, +) -> Result { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + let addr = incoming.remote_address(); + Ok(Addr { + hostname: format!("{}", addr.ip()), + port: addr.port(), + }) +} + +#[op2(fast)] +pub(crate) fn op_quic_incoming_remote_addr_validated( + #[cppgc] incoming_resource: &IncomingResource, +) -> Result { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + Ok(incoming.remote_address_validated()) +} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_incoming_accept( + #[cppgc] incoming_resource: &IncomingResource, + #[serde] transport_config: Option, +) -> Result { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + let conn = match transport_config { + Some(transport_config) => { + let mut config = + quinn::ServerConfig::with_crypto(incoming_resource.1.clone()); + config.preferred_address_v4(transport_config.preferred_address_v4); + config.preferred_address_v6(transport_config.preferred_address_v6); + config.transport_config(Arc::new(transport_config.try_into()?)); + incoming.accept_with(Arc::new(config))?.await? + } + None => incoming.accept()?.await?, + }; + Ok(ConnectionResource(conn)) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_incoming_refuse( + #[cppgc] incoming: &IncomingResource, +) -> Result<(), AnyError> { + let Some(incoming) = incoming.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + incoming.refuse(); + Ok(()) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_incoming_ignore( + #[cppgc] incoming: &IncomingResource, +) -> Result<(), AnyError> { + let Some(incoming) = incoming.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + incoming.ignore(); + Ok(()) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ConnectArgs { + ca_certs: Option>, + alpn_protocols: Option>, + server_name: Option, +} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_connect( + state: Rc>, + #[serde] addr: Addr, + #[serde] args: ConnectArgs, + #[serde] transport_config: TransportConfig, + #[cppgc] key_pair: &TlsKeysHolder, +) -> Result +where + NP: NetPermissions + 'static, +{ + state + .borrow_mut() + .borrow_mut::() + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.connectQuic()")?; + + let sock_addr = resolve_addr(&addr.hostname, addr.port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + + let root_cert_store = state + .borrow() + .borrow::() + .root_cert_store()?; + + let unsafely_ignore_certificate_errors = state + .borrow() + .try_borrow::() + .and_then(|it| it.0.clone()); + + let ca_certs = args + .ca_certs + .unwrap_or_default() + .into_iter() + .map(|s| s.into_bytes()) + .collect::>(); + + let mut tls_config = create_client_config( + root_cert_store, + ca_certs, + unsafely_ignore_certificate_errors, + key_pair.take(), + SocketUse::GeneralSsl, + )?; + + if let Some(alpn_protocols) = args.alpn_protocols { + tls_config.alpn_protocols = + alpn_protocols.into_iter().map(|s| s.into_bytes()).collect(); + } + + let client_config = QuicClientConfig::try_from(tls_config)?; + let mut client_config = quinn::ClientConfig::new(Arc::new(client_config)); + client_config.transport_config(Arc::new(transport_config.try_into()?)); + + let local_addr = match sock_addr.ip() { + IpAddr::V4(_) => IpAddr::from(Ipv4Addr::new(0, 0, 0, 0)), + IpAddr::V6(_) => IpAddr::from(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)), + }; + + let conn = quinn::Endpoint::client((local_addr, 0).into())? + .connect_with( + client_config, + sock_addr, + &args.server_name.unwrap_or(addr.hostname), + )? + .await?; + + Ok(ConnectionResource(conn)) +} + +#[op2] +#[string] +pub(crate) fn op_quic_connection_get_protocol( + #[cppgc] connection: &ConnectionResource, +) -> Option { + connection + .0 + .handshake_data() + .and_then(|h| h.downcast::().ok()) + .and_then(|h| h.protocol) + .map(|p| String::from_utf8_lossy(&p).into_owned()) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_connection_get_remote_addr( + #[cppgc] connection: &ConnectionResource, +) -> Result { + let addr = connection.0.remote_address(); + Ok(Addr { + hostname: format!("{}", addr.ip()), + port: addr.port(), + }) +} + +#[op2(fast)] +pub(crate) fn op_quic_close_connection( + #[cppgc] connection: &ConnectionResource, + #[bigint] close_code: u64, + #[string] reason: String, +) -> Result<(), AnyError> { + connection + .0 + .close(quinn::VarInt::from_u64(close_code)?, reason.as_bytes()); + Ok(()) +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_connection_closed( + #[cppgc] connection: &ConnectionResource, +) -> Result { + let e = connection.0.closed().await; + match e { + quinn::ConnectionError::LocallyClosed => Ok(CloseInfo { + close_code: 0, + reason: "".into(), + }), + quinn::ConnectionError::ApplicationClosed(i) => Ok(CloseInfo { + close_code: i.error_code.into(), + reason: String::from_utf8_lossy(&i.reason).into_owned(), + }), + e => Err(e.into()), + } +} + +struct SendStreamResource(AsyncRefCell); + +impl SendStreamResource { + fn new(stream: quinn::SendStream) -> Self { + Self(AsyncRefCell::new(stream)) + } +} + +impl Resource for SendStreamResource { + fn name(&self) -> Cow { + "quicSendStream".into() + } + + fn write(self: Rc, view: BufView) -> AsyncResult { + Box::pin(async move { + let mut r = RcRef::map(self, |r| &r.0).borrow_mut().await; + let nwritten = r.write(&view).await?; + Ok(WriteOutcome::Partial { nwritten, view }) + }) + } +} + +struct RecvStreamResource(AsyncRefCell); + +impl RecvStreamResource { + fn new(stream: quinn::RecvStream) -> Self { + Self(AsyncRefCell::new(stream)) + } +} + +impl Resource for RecvStreamResource { + fn name(&self) -> Cow { + "quicReceiveStream".into() + } + + fn read(self: Rc, limit: usize) -> AsyncResult { + Box::pin(async move { + let mut r = RcRef::map(self, |r| &r.0).borrow_mut().await; + let mut data = vec![0; limit]; + let nread = r.read(&mut data).await?.unwrap_or(0); + data.truncate(nread); + Ok(BufView::from(data)) + }) + } +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_accept_bi( + #[cppgc] connection: &ConnectionResource, + state: Rc>, +) -> Result<(ResourceId, ResourceId), AnyError> { + match connection.0.accept_bi().await { + Ok((tx, rx)) => { + let mut state = state.borrow_mut(); + let tx_rid = state.resource_table.add(SendStreamResource::new(tx)); + let rx_rid = state.resource_table.add(RecvStreamResource::new(rx)); + Ok((tx_rid, rx_rid)) + } + Err(e) => match e { + quinn::ConnectionError::LocallyClosed + | quinn::ConnectionError::ApplicationClosed(..) => { + Err(bad_resource("QuicConn is closed")) + } + _ => Err(e.into()), + }, + } +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_open_bi( + #[cppgc] connection: &ConnectionResource, + state: Rc>, + wait_for_available: bool, +) -> Result<(ResourceId, ResourceId), AnyError> { + let (tx, rx) = if wait_for_available { + connection.0.open_bi().await? + } else { + let waker = noop_waker_ref(); + let mut cx = Context::from_waker(waker); + match pin!(connection.0.open_bi()).poll(&mut cx) { + Poll::Ready(r) => r?, + Poll::Pending => { + return Err(generic_error("Connection has reached the maximum number of outgoing concurrent bidirectional streams")); + } + } + }; + let mut state = state.borrow_mut(); + let tx_rid = state.resource_table.add(SendStreamResource::new(tx)); + let rx_rid = state.resource_table.add(RecvStreamResource::new(rx)); + Ok((tx_rid, rx_rid)) +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_accept_uni( + #[cppgc] connection: &ConnectionResource, + state: Rc>, +) -> Result { + match connection.0.accept_uni().await { + Ok(rx) => { + let rid = state + .borrow_mut() + .resource_table + .add(RecvStreamResource::new(rx)); + Ok(rid) + } + Err(e) => match e { + quinn::ConnectionError::LocallyClosed + | quinn::ConnectionError::ApplicationClosed(..) => { + Err(bad_resource("QuicConn is closed")) + } + _ => Err(e.into()), + }, + } +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_open_uni( + #[cppgc] connection: &ConnectionResource, + state: Rc>, + wait_for_available: bool, +) -> Result { + let tx = if wait_for_available { + connection.0.open_uni().await? + } else { + let waker = noop_waker_ref(); + let mut cx = Context::from_waker(waker); + match pin!(connection.0.open_uni()).poll(&mut cx) { + Poll::Ready(r) => r?, + Poll::Pending => { + return Err(generic_error("Connection has reached the maximum number of outgoing concurrent unidirectional streams")); + } + } + }; + let rid = state + .borrow_mut() + .resource_table + .add(SendStreamResource::new(tx)); + Ok(rid) +} + +#[op2(async)] +pub(crate) async fn op_quic_send_datagram( + #[cppgc] connection: &ConnectionResource, + #[buffer] buf: JsBuffer, +) -> Result<(), AnyError> { + connection.0.send_datagram_wait(buf.to_vec().into()).await?; + Ok(()) +} + +#[op2(async)] +pub(crate) async fn op_quic_read_datagram( + #[cppgc] connection: &ConnectionResource, + #[buffer] mut buf: JsBuffer, +) -> Result { + let data = connection.0.read_datagram().await?; + buf[0..data.len()].copy_from_slice(&data); + Ok(data.len() as _) +} + +#[op2(fast)] +pub(crate) fn op_quic_max_datagram_size( + #[cppgc] connection: &ConnectionResource, +) -> Result { + Ok(connection.0.max_datagram_size().unwrap_or(0) as _) +} + +#[op2(fast)] +pub(crate) fn op_quic_get_send_stream_priority( + state: Rc>, + #[smi] rid: ResourceId, +) -> Result { + let resource = state + .borrow() + .resource_table + .get::(rid)?; + let r = RcRef::map(resource, |r| &r.0).try_borrow(); + match r { + Some(s) => Ok(s.priority()?), + None => Err(generic_error("Unable to get priority")), + } +} + +#[op2(fast)] +pub(crate) fn op_quic_set_send_stream_priority( + state: Rc>, + #[smi] rid: ResourceId, + priority: i32, +) -> Result<(), AnyError> { + let resource = state + .borrow() + .resource_table + .get::(rid)?; + let r = RcRef::map(resource, |r| &r.0).try_borrow(); + match r { + Some(s) => { + s.set_priority(priority)?; + Ok(()) + } + None => Err(generic_error("Unable to set priority")), + } +} diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 22180bf952..1f8104d978 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.119.0" +version = "0.122.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -14,10 +14,10 @@ description = "Node compatibility for Deno" path = "lib.rs" [features] -sync_fs = ["deno_package_json/sync", "node_resolver/sync"] +sync_fs = ["deno_fs/sync_fs", "deno_package_json/sync", "node_resolver/sync"] [dependencies] -aead-gcm-stream = "0.3" +aead-gcm-stream = "0.4" aes.workspace = true async-trait.workspace = true base64.workspace = true @@ -95,6 +95,7 @@ simd-json = "0.14.0" sm3 = "0.4.2" spki.workspace = true stable_deref_trait = "1.2.0" +sys_traits = { workspace = true, features = ["real"] } thiserror.workspace = true tokio.workspace = true tokio-eld = "0.2" diff --git a/ext/node/lib.rs b/ext/node/lib.rs index d1090f6576..5c7cea75ed 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -15,7 +15,9 @@ use deno_core::url::Url; use deno_core::v8; use deno_core::v8::ExternalReference; use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NpmPackageFolderResolverRc; +use node_resolver::PackageJsonResolverRc; use once_cell::sync::Lazy; extern crate libz_sys as zlib; @@ -183,16 +185,17 @@ fn op_node_build_os() -> String { } #[derive(Clone)] -pub struct NodeExtInitServices { +pub struct NodeExtInitServices { pub node_require_loader: NodeRequireLoaderRc, - pub node_resolver: NodeResolverRc, + pub node_resolver: NodeResolverRc, pub npm_resolver: NpmPackageFolderResolverRc, - pub pkg_json_resolver: PackageJsonResolverRc, + pub pkg_json_resolver: PackageJsonResolverRc, + pub sys: TSys, } deno_core::extension!(deno_node, deps = [ deno_io, deno_fs ], - parameters = [P: NodePermissions], + parameters = [P: NodePermissions, TSys: ExtNodeSys], ops = [ ops::blocklist::op_socket_address_parse, ops::blocklist::op_socket_address_get_serialization, @@ -260,6 +263,7 @@ deno_core::extension!(deno_node, ops::crypto::keys::op_node_derive_public_key_from_private_key, ops::crypto::keys::op_node_dh_keys_generate_and_export, ops::crypto::keys::op_node_export_private_key_der, + ops::crypto::keys::op_node_export_private_key_jwk, ops::crypto::keys::op_node_export_private_key_pem, ops::crypto::keys::op_node_export_public_key_der, ops::crypto::keys::op_node_export_public_key_pem, @@ -389,29 +393,29 @@ deno_core::extension!(deno_node, op_node_build_os, ops::require::op_require_can_parse_as_esm, ops::require::op_require_init_paths, - ops::require::op_require_node_module_paths

, + ops::require::op_require_node_module_paths, ops::require::op_require_proxy_path, - ops::require::op_require_is_deno_dir_package, + ops::require::op_require_is_deno_dir_package, ops::require::op_require_resolve_deno_dir, ops::require::op_require_is_maybe_cjs, ops::require::op_require_is_request_relative, ops::require::op_require_resolve_lookup_paths, - ops::require::op_require_try_self_parent_path

, - ops::require::op_require_try_self

, - ops::require::op_require_real_path

, + ops::require::op_require_try_self_parent_path, + ops::require::op_require_try_self, + ops::require::op_require_real_path, ops::require::op_require_path_is_absolute, ops::require::op_require_path_dirname, - ops::require::op_require_stat

, + ops::require::op_require_stat, ops::require::op_require_path_resolve, ops::require::op_require_path_basename, ops::require::op_require_read_file

, ops::require::op_require_as_file_path, - ops::require::op_require_resolve_exports

, - ops::require::op_require_read_package_scope

, - ops::require::op_require_package_imports_resolve

, + ops::require::op_require_resolve_exports, + ops::require::op_require_read_package_scope, + ops::require::op_require_package_imports_resolve, ops::require::op_require_break_on_next_statement, ops::util::op_node_guess_handle_type, - ops::worker_threads::op_worker_threads_filename

, + ops::worker_threads::op_worker_threads_filename, ops::ipc::op_node_child_ipc_pipe, ops::ipc::op_node_ipc_write, ops::ipc::op_node_ipc_read, @@ -680,13 +684,14 @@ deno_core::extension!(deno_node, "node:zlib" = "zlib.ts", ], options = { - maybe_init: Option, + maybe_init: Option>, fs: deno_fs::FileSystemRc, }, state = |state, options| { state.put(options.fs.clone()); if let Some(init) = &options.maybe_init { + state.put(init.sys.clone()); state.put(init.node_require_loader.clone()); state.put(init.node_resolver.clone()); state.put(init.npm_resolver.clone()); @@ -810,92 +815,32 @@ deno_core::extension!(deno_node, }, ); -pub type NodeResolver = node_resolver::NodeResolver; -#[allow(clippy::disallowed_types)] -pub type NodeResolverRc = - deno_fs::sync::MaybeArc>; -pub type PackageJsonResolver = - node_resolver::PackageJsonResolver; -#[allow(clippy::disallowed_types)] -pub type PackageJsonResolverRc = deno_fs::sync::MaybeArc< - node_resolver::PackageJsonResolver, ->; - #[derive(Debug)] -pub struct DenoFsNodeResolverEnv { - fs: deno_fs::FileSystemRc, -} +pub struct RealIsBuiltInNodeModuleChecker; -impl DenoFsNodeResolverEnv { - pub fn new(fs: deno_fs::FileSystemRc) -> Self { - Self { fs } - } -} - -impl node_resolver::env::NodeResolverEnv for DenoFsNodeResolverEnv { +impl IsBuiltInNodeModuleChecker for RealIsBuiltInNodeModuleChecker { + #[inline] fn is_builtin_node_module(&self, specifier: &str) -> bool { is_builtin_node_module(specifier) } - - fn realpath_sync( - &self, - path: &std::path::Path, - ) -> std::io::Result { - self - .fs - .realpath_sync(path) - .map_err(|err| err.into_io_error()) - } - - fn stat_sync( - &self, - path: &std::path::Path, - ) -> std::io::Result { - self - .fs - .stat_sync(path) - .map(|stat| node_resolver::env::NodeResolverFsStat { - is_file: stat.is_file, - is_dir: stat.is_directory, - is_symlink: stat.is_symlink, - }) - .map_err(|err| err.into_io_error()) - } - - fn exists_sync(&self, path: &std::path::Path) -> bool { - self.fs.exists_sync(path) - } - - fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs { - self - } } -impl deno_package_json::fs::DenoPkgJsonFs for DenoFsNodeResolverEnv { - fn read_to_string_lossy( - &self, - path: &std::path::Path, - ) -> Result, std::io::Error> { - self - .fs - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } +pub trait ExtNodeSys: + sys_traits::BaseFsCanonicalize + + sys_traits::BaseFsMetadata + + sys_traits::BaseFsRead + + sys_traits::EnvCurrentDir + + Clone +{ } -pub struct DenoPkgJsonFsAdapter<'a>(pub &'a dyn deno_fs::FileSystem); +impl ExtNodeSys for sys_traits::impls::RealSys {} -impl<'a> deno_package_json::fs::DenoPkgJsonFs for DenoPkgJsonFsAdapter<'a> { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> Result, std::io::Error> { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } -} +pub type NodeResolver = + node_resolver::NodeResolver; +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = deno_fs::sync::MaybeArc>; +#[allow(clippy::disallowed_types)] pub fn create_host_defined_options<'s>( scope: &mut v8::HandleScope<'s>, diff --git a/ext/node/ops/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs index ec45146b49..7f5b108a04 100644 --- a/ext/node/ops/crypto/cipher.rs +++ b/ext/node/ops/crypto/cipher.rs @@ -172,27 +172,19 @@ impl Cipher { ) -> Result { use Cipher::*; Ok(match algorithm_name { - "aes-128-cbc" => { + "aes128" | "aes-128-cbc" => { Aes128Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into()))) } "aes-128-ecb" => Aes128Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-192-ecb" => Aes192Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-128-gcm" => { - if iv.len() != 12 { - return Err(CipherError::InvalidIvLength); - } - let cipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); Aes128Gcm(Box::new(cipher)) } "aes-256-gcm" => { - if iv.len() != 12 { - return Err(CipherError::InvalidIvLength); - } - let cipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); @@ -395,20 +387,12 @@ impl Decipher { "aes-192-ecb" => Aes192Ecb(Box::new(ecb::Decryptor::new(key.into()))), "aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))), "aes-128-gcm" => { - if iv.len() != 12 { - return Err(DecipherError::InvalidIvLength); - } - let decipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); Aes128Gcm(Box::new(decipher)) } "aes-256-gcm" => { - if iv.len() != 12 { - return Err(DecipherError::InvalidIvLength); - } - let decipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); diff --git a/ext/node/ops/crypto/keys.rs b/ext/node/ops/crypto/keys.rs index f164972d48..dfcd3d11bf 100644 --- a/ext/node/ops/crypto/keys.rs +++ b/ext/node/ops/crypto/keys.rs @@ -26,6 +26,7 @@ use rsa::pkcs1::DecodeRsaPrivateKey as _; use rsa::pkcs1::DecodeRsaPublicKey; use rsa::pkcs1::EncodeRsaPrivateKey as _; use rsa::pkcs1::EncodeRsaPublicKey; +use rsa::traits::PrivateKeyParts; use rsa::traits::PublicKeyParts; use rsa::RsaPrivateKey; use rsa::RsaPublicKey; @@ -255,6 +256,16 @@ impl EcPrivateKey { EcPrivateKey::P384(key) => EcPublicKey::P384(key.public_key()), } } + + pub fn to_jwk(&self) -> Result { + match self { + EcPrivateKey::P224(_) => { + Err(AsymmetricPrivateKeyJwkError::UnsupportedJwkEcCurveP224) + } + EcPrivateKey::P256(key) => Ok(key.to_jwk()), + EcPrivateKey::P384(key) => Ok(key.to_jwk()), + } + } } // https://oidref.com/ @@ -1107,6 +1118,16 @@ fn bytes_to_b64(bytes: &[u8]) -> String { BASE64_URL_SAFE_NO_PAD.encode(bytes) } +#[derive(Debug, thiserror::Error)] +pub enum AsymmetricPrivateKeyJwkError { + #[error("key is not an asymmetric private key")] + KeyIsNotAsymmetricPrivateKey, + #[error("Unsupported JWK EC curve: P224")] + UnsupportedJwkEcCurveP224, + #[error("jwk export not implemented for this key type")] + JwkExportNotImplementedForKeyType, +} + #[derive(Debug, thiserror::Error)] pub enum AsymmetricPublicKeyJwkError { #[error("key is not an asymmetric public key")] @@ -1328,7 +1349,73 @@ pub enum AsymmetricPrivateKeyDerError { UnsupportedKeyType(String), } +// https://datatracker.ietf.org/doc/html/rfc7518#section-6.3.2 +fn rsa_private_to_jwk(key: &RsaPrivateKey) -> deno_core::serde_json::Value { + let n = key.n(); + let e = key.e(); + let d = key.d(); + let p = &key.primes()[0]; + let q = &key.primes()[1]; + let dp = key.dp(); + let dq = key.dq(); + let qi = key.crt_coefficient(); + let oth = &key.primes()[2..]; + + let mut obj = deno_core::serde_json::json!({ + "kty": "RSA", + "n": bytes_to_b64(&n.to_bytes_be()), + "e": bytes_to_b64(&e.to_bytes_be()), + "d": bytes_to_b64(&d.to_bytes_be()), + "p": bytes_to_b64(&p.to_bytes_be()), + "q": bytes_to_b64(&q.to_bytes_be()), + "dp": dp.map(|dp| bytes_to_b64(&dp.to_bytes_be())), + "dq": dq.map(|dq| bytes_to_b64(&dq.to_bytes_be())), + "qi": qi.map(|qi| bytes_to_b64(&qi.to_bytes_be())), + }); + + if !oth.is_empty() { + obj["oth"] = deno_core::serde_json::json!(oth + .iter() + .map(|o| o.to_bytes_be()) + .collect::>()); + } + + obj +} + impl AsymmetricPrivateKey { + fn export_jwk( + &self, + ) -> Result { + match self { + AsymmetricPrivateKey::Rsa(key) => Ok(rsa_private_to_jwk(key)), + AsymmetricPrivateKey::RsaPss(key) => Ok(rsa_private_to_jwk(&key.key)), + AsymmetricPrivateKey::Ec(key) => { + let jwk = key.to_jwk()?; + Ok(deno_core::serde_json::json!(jwk)) + } + AsymmetricPrivateKey::X25519(static_secret) => { + let bytes = static_secret.to_bytes(); + + Ok(deno_core::serde_json::json!({ + "kty": "OKP", + "crv": "X25519", + "d": bytes_to_b64(&bytes), + })) + } + AsymmetricPrivateKey::Ed25519(key) => { + let bytes = key.to_bytes(); + + Ok(deno_core::serde_json::json!({ + "kty": "OKP", + "crv": "Ed25519", + "d": bytes_to_b64(&bytes), + })) + } + _ => Err(AsymmetricPrivateKeyJwkError::JwkExportNotImplementedForKeyType), + } + } + fn export_der( &self, typ: &str, @@ -2329,6 +2416,28 @@ pub fn op_node_export_private_key_pem( Ok(String::from_utf8(out).expect("invalid pem is not possible")) } +#[derive(Debug, thiserror::Error)] +pub enum ExportPrivateKeyJwkError { + #[error(transparent)] + AsymmetricPublicKeyJwk(#[from] AsymmetricPrivateKeyJwkError), + #[error("very large data")] + VeryLargeData, + #[error(transparent)] + Der(#[from] der::Error), +} + +#[op2] +#[serde] +pub fn op_node_export_private_key_jwk( + #[cppgc] handle: &KeyObjectHandle, +) -> Result { + let private_key = handle + .as_private_key() + .ok_or(AsymmetricPrivateKeyJwkError::KeyIsNotAsymmetricPrivateKey)?; + + Ok(private_key.export_jwk()?) +} + #[op2] #[buffer] pub fn op_node_export_private_key_der( diff --git a/ext/node/ops/os/cpus.rs b/ext/node/ops/os/cpus.rs index 3f5f430f65..2b931884c3 100644 --- a/ext/node/ops/os/cpus.rs +++ b/ext/node/ops/os/cpus.rs @@ -73,12 +73,17 @@ pub fn cpu_info() -> Option> { cpu_speed = 2_400_000_000; } + extern "C" { + fn mach_host_self() -> std::ffi::c_uint; + static mut mach_task_self_: std::ffi::c_uint; + } + let mut num_cpus: libc::natural_t = 0; let mut info: *mut libc::processor_cpu_load_info_data_t = std::ptr::null_mut(); let mut msg_type: libc::mach_msg_type_number_t = 0; if libc::host_processor_info( - libc::mach_host_self(), + mach_host_self(), libc::PROCESSOR_CPU_LOAD_INFO, &mut num_cpus, &mut info as *mut _ as *mut libc::processor_info_array_t, @@ -111,7 +116,7 @@ pub fn cpu_info() -> Option> { } libc::vm_deallocate( - libc::mach_task_self(), + mach_task_self_, info.as_ptr() as libc::vm_address_t, msg_type as _, ); diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index ddcdec0bbd..c5e3afa87d 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -1,14 +1,19 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::cell::RefCell; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; + use boxed_error::Boxed; use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::Url; use deno_core::v8; +use deno_core::FastString; use deno_core::JsRuntimeInspector; use deno_core::OpState; -use deno_fs::FileSystemRc; -use deno_fs::V8MaybeStaticStr; use deno_package_json::PackageJsonRc; use deno_path_util::normalize_path; use deno_path_util::url_from_file_path; @@ -17,12 +22,11 @@ use node_resolver::errors::ClosestPkgJsonError; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; use node_resolver::REQUIRE_CONDITIONS; -use std::borrow::Cow; -use std::cell::RefCell; -use std::path::Path; -use std::path::PathBuf; -use std::rc::Rc; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; +use crate::ExtNodeSys; use crate::NodePermissions; use crate::NodeRequireLoaderRc; use crate::NodeResolverRc; @@ -68,11 +72,11 @@ pub enum RequireErrorKind { #[error(transparent)] UrlConversion(#[from] deno_path_util::PathToUrlError), #[error(transparent)] - Fs(#[from] deno_io::fs::FsError), + Fs(#[from] std::io::Error), #[error(transparent)] ReadModule(deno_core::error::AnyError), #[error("Unable to get CWD: {0}")] - UnableToGetCwd(deno_io::fs::FsError), + UnableToGetCwd(std::io::Error), } #[op2] @@ -128,19 +132,21 @@ pub fn op_require_init_paths() -> Vec { #[op2(stack_trace)] #[serde] -pub fn op_require_node_module_paths

( +pub fn op_require_node_module_paths< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] from: String, -) -> Result, RequireError> -where - P: NodePermissions + 'static, -{ - let fs = state.borrow::(); +) -> Result, RequireError> { + let sys = state.borrow::(); // Guarantee that "from" is absolute. let from = if from.starts_with("file:///") { url_to_file_path(&Url::parse(&from)?)? } else { - let current_dir = &fs.cwd().map_err(RequireErrorKind::UnableToGetCwd)?; + let current_dir = &sys + .env_current_dir() + .map_err(RequireErrorKind::UnableToGetCwd)?; normalize_path(current_dir.join(from)) }; @@ -238,11 +244,11 @@ pub fn op_require_resolve_deno_dir( } #[op2(fast)] -pub fn op_require_is_deno_dir_package( +pub fn op_require_is_deno_dir_package( state: &mut OpState, #[string] path: String, ) -> bool { - let resolver = state.borrow::(); + let resolver = state.borrow::>(); match deno_path_util::url_from_file_path(&PathBuf::from(path)) { Ok(specifier) => resolver.in_npm_package(&specifier), Err(_) => false, @@ -297,18 +303,18 @@ pub fn op_require_path_is_absolute(#[string] p: String) -> bool { } #[op2(fast, stack_trace)] -pub fn op_require_stat

( +pub fn op_require_stat< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] path: String, -) -> Result -where - P: NodePermissions + 'static, -{ +) -> Result { let path = PathBuf::from(path); let path = ensure_read_permission::

(state, &path)?; - let fs = state.borrow::(); - if let Ok(metadata) = fs.stat_sync(&path) { - if metadata.is_file { + let sys = state.borrow::(); + if let Ok(metadata) = sys.fs_metadata(&path) { + if metadata.file_type().is_file() { return Ok(0); } else { return Ok(1); @@ -320,19 +326,19 @@ where #[op2(stack_trace)] #[string] -pub fn op_require_real_path

( +pub fn op_require_real_path< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] request: String, -) -> Result -where - P: NodePermissions + 'static, -{ +) -> Result { let path = PathBuf::from(request); let path = ensure_read_permission::

(state, &path) .map_err(RequireErrorKind::Permission)?; - let fs = state.borrow::(); + let sys = state.borrow::(); let canonicalized_path = - deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?); + deno_path_util::strip_unc_prefix(sys.fs_canonicalize(&path)?); Ok(canonicalized_path.to_string_lossy().into_owned()) } @@ -384,15 +390,15 @@ pub fn op_require_path_basename( #[op2(stack_trace)] #[string] -pub fn op_require_try_self_parent_path

( +pub fn op_require_try_self_parent_path< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, has_parent: bool, #[string] maybe_parent_filename: Option, #[string] maybe_parent_id: Option, -) -> Result, deno_core::error::AnyError> -where - P: NodePermissions + 'static, -{ +) -> Result, deno_core::error::AnyError> { if !has_parent { return Ok(None); } @@ -403,8 +409,8 @@ where if let Some(parent_id) = maybe_parent_id { if parent_id == "" || parent_id == "internal/preload" { - let fs = state.borrow::(); - if let Ok(cwd) = fs.cwd() { + let sys = state.borrow::(); + if let Ok(cwd) = sys.env_current_dir() { let cwd = ensure_read_permission::

(state, &cwd)?; return Ok(Some(cwd.to_string_lossy().into_owned())); } @@ -415,19 +421,19 @@ where #[op2(stack_trace)] #[string] -pub fn op_require_try_self

( +pub fn op_require_try_self< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] parent_path: Option, #[string] request: String, -) -> Result, RequireError> -where - P: NodePermissions + 'static, -{ +) -> Result, RequireError> { if parent_path.is_none() { return Ok(None); } - let pkg_json_resolver = state.borrow::(); + let pkg_json_resolver = state.borrow::>(); let pkg = pkg_json_resolver .get_closest_package_json_from_file_path(&PathBuf::from( parent_path.unwrap(), @@ -459,7 +465,7 @@ where let referrer = deno_core::url::Url::from_file_path(&pkg.path).unwrap(); if let Some(exports) = &pkg.exports { - let node_resolver = state.borrow::(); + let node_resolver = state.borrow::>(); let r = node_resolver.package_exports_resolve( &pkg.path, &expansion, @@ -484,7 +490,7 @@ where pub fn op_require_read_file

( state: &mut OpState, #[string] file_path: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -495,7 +501,10 @@ where let loader = state.borrow::(); loader .load_text_file_lossy(&file_path) - .map(V8MaybeStaticStr) + .map(|s| match s { + Cow::Borrowed(s) => FastString::from_static(s), + Cow::Owned(s) => s.into(), + }) .map_err(|e| RequireErrorKind::ReadModule(e).into_box()) } @@ -513,7 +522,10 @@ pub fn op_require_as_file_path(#[string] file_or_url: String) -> String { #[op2(stack_trace)] #[string] -pub fn op_require_resolve_exports

( +pub fn op_require_resolve_exports< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, uses_local_node_modules_dir: bool, #[string] modules_path_str: String, @@ -521,13 +533,10 @@ pub fn op_require_resolve_exports

( #[string] name: String, #[string] expansion: String, #[string] parent_path: String, -) -> Result, RequireError> -where - P: NodePermissions + 'static, -{ - let fs = state.borrow::(); - let node_resolver = state.borrow::(); - let pkg_json_resolver = state.borrow::(); +) -> Result, RequireError> { + let sys = state.borrow::(); + let node_resolver = state.borrow::>(); + let pkg_json_resolver = state.borrow::>(); let modules_path = PathBuf::from(&modules_path_str); let modules_specifier = deno_path_util::url_from_file_path(&modules_path)?; @@ -538,7 +547,7 @@ where } else { let mod_dir = path_resolve([modules_path_str.as_str(), name.as_str()].into_iter()); - if fs.is_dir_sync(&mod_dir) { + if sys.fs_is_dir_no_err(&mod_dir) { mod_dir } else { modules_path @@ -589,14 +598,14 @@ pub fn op_require_is_maybe_cjs( #[op2(stack_trace)] #[serde] -pub fn op_require_read_package_scope

( +pub fn op_require_read_package_scope< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] package_json_path: String, -) -> Option -where - P: NodePermissions + 'static, -{ - let pkg_json_resolver = state.borrow::(); +) -> Option { + let pkg_json_resolver = state.borrow::>(); let package_json_path = PathBuf::from(package_json_path); if package_json_path.file_name() != Some("package.json".as_ref()) { // permissions: do not allow reading a non-package.json file @@ -610,18 +619,18 @@ where #[op2(stack_trace)] #[string] -pub fn op_require_package_imports_resolve

( +pub fn op_require_package_imports_resolve< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] referrer_filename: String, #[string] request: String, -) -> Result, RequireError> -where - P: NodePermissions + 'static, -{ +) -> Result, RequireError> { let referrer_path = PathBuf::from(&referrer_filename); let referrer_path = ensure_read_permission::

(state, &referrer_path) .map_err(RequireErrorKind::Permission)?; - let pkg_json_resolver = state.borrow::(); + let pkg_json_resolver = state.borrow::>(); let Some(pkg) = pkg_json_resolver .get_closest_package_json_from_file_path(&referrer_path)? else { @@ -629,7 +638,7 @@ where }; if pkg.imports.is_some() { - let node_resolver = state.borrow::(); + let node_resolver = state.borrow::>(); let referrer_url = Url::from_file_path(&referrer_filename).unwrap(); let url = node_resolver.package_imports_resolve( &request, diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs index 37a7b477d0..48683be1e7 100644 --- a/ext/node/ops/worker_threads.rs +++ b/ext/node/ops/worker_threads.rs @@ -1,13 +1,16 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::op2; -use deno_core::url::Url; -use deno_core::OpState; -use deno_fs::FileSystemRc; use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; +use deno_core::op2; +use deno_core::url::Url; +use deno_core::OpState; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; + +use crate::ExtNodeSys; use crate::NodePermissions; use crate::NodeRequireLoaderRc; @@ -41,19 +44,19 @@ pub enum WorkerThreadsFilenameError { #[error("File not found [{0:?}]")] FileNotFound(PathBuf), #[error(transparent)] - Fs(#[from] deno_io::fs::FsError), + Fs(#[from] std::io::Error), } // todo(dsherret): we should remove this and do all this work inside op_create_worker #[op2(stack_trace)] #[string] -pub fn op_worker_threads_filename

( +pub fn op_worker_threads_filename< + P: NodePermissions + 'static, + TSys: ExtNodeSys + 'static, +>( state: &mut OpState, #[string] specifier: String, -) -> Result -where - P: NodePermissions + 'static, -{ +) -> Result { if specifier.starts_with("data:") { return Ok(specifier); } @@ -66,9 +69,9 @@ where } let path = ensure_read_permission::

(state, &path) .map_err(WorkerThreadsFilenameError::Permission)?; - let fs = state.borrow::(); + let sys = state.borrow::(); let canonicalized_path = - deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?); + deno_path_util::strip_unc_prefix(sys.fs_canonicalize(&path)?); Url::from_file_path(canonicalized_path) .map_err(|_| WorkerThreadsFilenameError::UrlFromPathString)? }; @@ -77,8 +80,8 @@ where .map_err(|_| WorkerThreadsFilenameError::UrlToPathString)?; let url_path = ensure_read_permission::

(state, &url_path) .map_err(WorkerThreadsFilenameError::Permission)?; - let fs = state.borrow::(); - if !fs.exists_sync(&url_path) { + let sys = state.borrow::(); + if !sys.fs_exists_no_err(&url_path) { return Err(WorkerThreadsFilenameError::FileNotFound( url_path.to_path_buf(), )); diff --git a/ext/node/polyfill.rs b/ext/node/polyfill.rs index fc95d708eb..f16f16dea3 100644 --- a/ext/node/polyfill.rs +++ b/ext/node/polyfill.rs @@ -57,6 +57,7 @@ generate_builtin_node_module_lists! { "http2", "https", "inspector", + "inspector/promises", "module", "net", "os", @@ -67,9 +68,9 @@ generate_builtin_node_module_lists! { "process", "punycode", "querystring", - "repl", "readline", "readline/promises", + "repl", "sqlite", "stream", "stream/consumers", @@ -91,3 +92,10 @@ generate_builtin_node_module_lists! { "worker_threads", "zlib", } + +#[test] +fn test_builtins_are_sorted() { + let mut builtins_list = SUPPORTED_BUILTIN_NODE_MODULES.to_vec(); + builtins_list.sort(); + assert_eq!(SUPPORTED_BUILTIN_NODE_MODULES, builtins_list); +} diff --git a/ext/node/polyfills/_brotli.js b/ext/node/polyfills/_brotli.js index ebd0351561..108e5319a9 100644 --- a/ext/node/polyfills/_brotli.js +++ b/ext/node/polyfills/_brotli.js @@ -10,9 +10,12 @@ const { ArrayPrototypeMap, TypedArrayPrototypeSlice, TypedArrayPrototypeSubarray, - TypedArrayPrototypeGetByteLength, - DataViewPrototypeGetBuffer, TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + DataViewPrototypeGetBuffer, + DataViewPrototypeGetByteLength, + DataViewPrototypeGetByteOffset, } = primordials; const { isTypedArray, isDataView, close } = core; import { @@ -40,9 +43,17 @@ const toU8 = (input) => { } if (isTypedArray(input)) { - return new Uint8Array(TypedArrayPrototypeGetBuffer(input)); + return new Uint8Array( + TypedArrayPrototypeGetBuffer(input), + TypedArrayPrototypeGetByteOffset(input), + TypedArrayPrototypeGetByteLength(input), + ); } else if (isDataView(input)) { - return new Uint8Array(DataViewPrototypeGetBuffer(input)); + return new Uint8Array( + DataViewPrototypeGetBuffer(input), + DataViewPrototypeGetByteOffset(input), + DataViewPrototypeGetByteLength(input), + ); } return input; diff --git a/ext/node/polyfills/_fs/_fs_access.ts b/ext/node/polyfills/_fs/_fs_access.ts index b501bcbcae..824386e64b 100644 --- a/ext/node/polyfills/_fs/_fs_access.ts +++ b/ext/node/polyfills/_fs/_fs_access.ts @@ -30,50 +30,58 @@ export function access( mode = getValidMode(mode, "access"); const cb = makeCallback(callback); - Deno.lstat(path).then((info) => { - if (info.mode === null) { - // If the file mode is unavailable, we pretend it has - // the permission - cb(null); - return; - } - const m = +mode || 0; - let fileMode = +info.mode || 0; - if (Deno.build.os !== "windows" && info.uid === Deno.uid()) { - // If the user is the owner of the file, then use the owner bits of - // the file permission - fileMode >>= 6; - } - // TODO(kt3k): Also check the case when the user belong to the group - // of the file - if ((m & fileMode) === m) { - // all required flags exist - cb(null); - } else { - // some required flags don't - // deno-lint-ignore no-explicit-any - const e: any = new Error(`EACCES: permission denied, access '${path}'`); - e.path = path; - e.syscall = "access"; - e.errno = codeMap.get("EACCES"); - e.code = "EACCES"; - cb(e); - } - }, (err) => { - if (err instanceof Deno.errors.NotFound) { - // deno-lint-ignore no-explicit-any - const e: any = new Error( - `ENOENT: no such file or directory, access '${path}'`, - ); - e.path = path; - e.syscall = "access"; - e.errno = codeMap.get("ENOENT"); - e.code = "ENOENT"; - cb(e); - } else { - cb(err); - } - }); + Deno.lstat(path).then( + (info) => { + if (info.mode === null) { + // If the file mode is unavailable, we pretend it has + // the permission + cb(null); + return; + } + let m = +mode || 0; + let fileMode = +info.mode || 0; + + if (Deno.build.os === "windows") { + m &= ~fs.X_OK; // Ignore the X_OK bit on Windows + } else if (info.uid === Deno.uid()) { + // If the user is the owner of the file, then use the owner bits of + // the file permission + fileMode >>= 6; + } + + // TODO(kt3k): Also check the case when the user belong to the group + // of the file + + if ((m & fileMode) === m) { + // all required flags exist + cb(null); + } else { + // some required flags don't + // deno-lint-ignore no-explicit-any + const e: any = new Error(`EACCES: permission denied, access '${path}'`); + e.path = path; + e.syscall = "access"; + e.errno = codeMap.get("EACCES"); + e.code = "EACCES"; + cb(e); + } + }, + (err) => { + if (err instanceof Deno.errors.NotFound) { + // deno-lint-ignore no-explicit-any + const e: any = new Error( + `ENOENT: no such file or directory, access '${path}'`, + ); + e.path = path; + e.syscall = "access"; + e.errno = codeMap.get("ENOENT"); + e.code = "ENOENT"; + cb(e); + } else { + cb(err); + } + }, + ); } export const accessPromise = promisify(access) as ( @@ -91,9 +99,11 @@ export function accessSync(path: string | Buffer | URL, mode?: number) { // the permission return; } - const m = +mode! || 0; + let m = +mode! || 0; let fileMode = +info.mode! || 0; - if (Deno.build.os !== "windows" && info.uid === Deno.uid()) { + if (Deno.build.os === "windows") { + m &= ~fs.X_OK; // Ignore the X_OK bit on Windows + } else if (info.uid === Deno.uid()) { // If the user is the owner of the file, then use the owner bits of // the file permission fileMode >>= 6; diff --git a/ext/node/polyfills/_fs/_fs_ftruncate.ts b/ext/node/polyfills/_fs/_fs_ftruncate.ts index 92af46f521..79320137f9 100644 --- a/ext/node/polyfills/_fs/_fs_ftruncate.ts +++ b/ext/node/polyfills/_fs/_fs_ftruncate.ts @@ -16,16 +16,24 @@ export function ftruncate( : undefined; const callback: CallbackWithError = typeof lenOrCallback === "function" ? lenOrCallback - : maybeCallback as CallbackWithError; + : (maybeCallback as CallbackWithError); if (!callback) throw new Error("No callback function supplied"); - new FsFile(fd, Symbol.for("Deno.internal.FsFile")).truncate(len).then( - () => callback(null), - callback, - ); + new FsFile(fd, Symbol.for("Deno.internal.FsFile")) + .truncate(len) + .then(() => callback(null), callback); } export function ftruncateSync(fd: number, len?: number) { new FsFile(fd, Symbol.for("Deno.internal.FsFile")).truncateSync(len); } + +export function ftruncatePromise(fd: number, len?: number): Promise { + return new Promise((resolve, reject) => { + ftruncate(fd, len, (err) => { + if (err) reject(err); + else resolve(); + }); + }); +} diff --git a/ext/node/polyfills/internal/crypto/keys.ts b/ext/node/polyfills/internal/crypto/keys.ts index c91c23cc3d..932856df0e 100644 --- a/ext/node/polyfills/internal/crypto/keys.ts +++ b/ext/node/polyfills/internal/crypto/keys.ts @@ -20,6 +20,7 @@ import { op_node_create_secret_key, op_node_derive_public_key_from_private_key, op_node_export_private_key_der, + op_node_export_private_key_jwk, op_node_export_private_key_pem, op_node_export_public_key_der, op_node_export_public_key_jwk, @@ -791,7 +792,7 @@ export class PrivateKeyObject extends AsymmetricKeyObject { export(options: JwkKeyExportOptions | KeyExportOptions) { if (options && options.format === "jwk") { - notImplemented("jwk private key export not implemented"); + return op_node_export_private_key_jwk(this[kHandle]); } const { format, diff --git a/ext/node/polyfills/internal/crypto/util.ts b/ext/node/polyfills/internal/crypto/util.ts index a39b031ee3..6c925f6577 100644 --- a/ext/node/polyfills/internal/crypto/util.ts +++ b/ext/node/polyfills/internal/crypto/util.ts @@ -67,22 +67,16 @@ export const ellipticCurves: Array = [ }, // NIST P-224 EC ]; -// deno-fmt-ignore const supportedCiphers = [ - "aes-128-ecb", "aes-192-ecb", - "aes-256-ecb", "aes-128-cbc", - "aes-192-cbc", "aes-256-cbc", - "aes128", "aes192", - "aes256", "aes-128-cfb", - "aes-192-cfb", "aes-256-cfb", - "aes-128-cfb8", "aes-192-cfb8", - "aes-256-cfb8", "aes-128-cfb1", - "aes-192-cfb1", "aes-256-cfb1", - "aes-128-ofb", "aes-192-ofb", - "aes-256-ofb", "aes-128-ctr", - "aes-192-ctr", "aes-256-ctr", - "aes-128-gcm", "aes-192-gcm", - "aes-256-gcm" + "aes-128-ecb", + "aes-192-ecb", + "aes-256-ecb", + "aes-128-cbc", + "aes-256-cbc", + "aes128", + "aes256", + "aes-128-gcm", + "aes-256-gcm", ]; export function getCiphers(): string[] { diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 61b53fa968..d79232aed7 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -624,6 +624,15 @@ function createInvalidArgType( return msg; } +export class ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH extends NodeRangeError { + constructor() { + super( + "ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH", + "Input buffers must have the same length", + ); + } +} + export class ERR_INVALID_ARG_TYPE_RANGE extends NodeRangeError { constructor(name: string, expected: string | string[], actual: unknown) { const msg = createInvalidArgType(name, expected); @@ -2842,6 +2851,7 @@ export default { ERR_INVALID_ADDRESS_FAMILY, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_TYPE_RANGE, + ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH, ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_VALUE_RANGE, ERR_INVALID_ASYNC_ID, diff --git a/ext/node/polyfills/internal/fs/handle.ts b/ext/node/polyfills/internal/fs/handle.ts index 9ec0fc97e2..ee035f2f5c 100644 --- a/ext/node/polyfills/internal/fs/handle.ts +++ b/ext/node/polyfills/internal/fs/handle.ts @@ -13,6 +13,7 @@ import { ReadOptions, TextOptionsArgument, } from "ext:deno_node/_fs/_fs_common.ts"; +import { ftruncatePromise } from "ext:deno_node/_fs/_fs_ftruncate.ts"; import { core } from "ext:core/mod.js"; interface WriteResult { @@ -73,6 +74,10 @@ export class FileHandle extends EventEmitter { } } + truncate(len?: number): Promise { + return fsCall(ftruncatePromise, this, len); + } + readFile( opt?: TextOptionsArgument | BinaryOptionsArgument | FileOptionsArgument, ): Promise { @@ -85,11 +90,7 @@ export class FileHandle extends EventEmitter { length: number, position: number, ): Promise; - write( - str: string, - position: number, - encoding: string, - ): Promise; + write(str: string, position: number, encoding: string): Promise; write( bufferOrStr: Uint8Array | string, offsetOrPosition: number, @@ -120,16 +121,10 @@ export class FileHandle extends EventEmitter { const encoding = lengthOrEncoding; return new Promise((resolve, reject) => { - write( - this.fd, - str, - position, - encoding, - (err, bytesWritten, buffer) => { - if (err) reject(err); - else resolve({ buffer, bytesWritten }); - }, - ); + write(this.fd, str, position, encoding, (err, bytesWritten, buffer) => { + if (err) reject(err); + else resolve({ buffer, bytesWritten }); + }); }); } } diff --git a/ext/node/polyfills/internal_binding/_timingSafeEqual.ts b/ext/node/polyfills/internal_binding/_timingSafeEqual.ts index 559b7685b8..d9811c5505 100644 --- a/ext/node/polyfills/internal_binding/_timingSafeEqual.ts +++ b/ext/node/polyfills/internal_binding/_timingSafeEqual.ts @@ -4,6 +4,7 @@ // deno-lint-ignore-file prefer-primordials import { Buffer } from "node:buffer"; +import { ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH } from "ext:deno_node/internal/errors.ts"; function toDataView(ab: ArrayBufferLike | ArrayBufferView): DataView { if (ArrayBuffer.isView(ab)) { @@ -19,7 +20,7 @@ function stdTimingSafeEqual( b: ArrayBufferView | ArrayBufferLike | DataView, ): boolean { if (a.byteLength !== b.byteLength) { - return false; + throw new ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH(); } if (!(a instanceof DataView)) { a = toDataView(a); diff --git a/ext/node/polyfills/worker_threads.ts b/ext/node/polyfills/worker_threads.ts index 1b175fb1dd..dc844169c5 100644 --- a/ext/node/polyfills/worker_threads.ts +++ b/ext/node/polyfills/worker_threads.ts @@ -21,7 +21,7 @@ import { nodeWorkerThreadCloseCb, refMessagePort, serializeJsMessageData, - unrefPollForMessages, + unrefParentPort, } from "ext:deno_web/13_message_port.js"; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { notImplemented } from "ext:deno_node/_utils.ts"; @@ -451,10 +451,10 @@ internals.__initWorkerThreads = ( parentPort.emit("close"); }); parentPort.unref = () => { - parentPort[unrefPollForMessages] = true; + parentPort[unrefParentPort] = true; }; parentPort.ref = () => { - parentPort[unrefPollForMessages] = false; + parentPort[unrefParentPort] = false; }; if (isWorkerThread) { diff --git a/ext/telemetry/Cargo.toml b/ext/telemetry/Cargo.toml index f3d4bbd336..fedaed6656 100644 --- a/ext/telemetry/Cargo.toml +++ b/ext/telemetry/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_telemetry" -version = "0.4.0" +version = "0.6.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/telemetry/lib.rs b/ext/telemetry/lib.rs index 816e838743..8018843dc4 100644 --- a/ext/telemetry/lib.rs +++ b/ext/telemetry/lib.rs @@ -6,16 +6,22 @@ use deno_core::futures::channel::mpsc; use deno_core::futures::channel::mpsc::UnboundedSender; use deno_core::futures::future::BoxFuture; use deno_core::futures::stream; +use deno_core::futures::FutureExt; use deno_core::futures::Stream; use deno_core::futures::StreamExt; use deno_core::op2; use deno_core::v8; +use deno_core::GarbageCollected; use deno_core::OpState; use once_cell::sync::Lazy; use once_cell::sync::OnceCell; use opentelemetry::logs::AnyValue; use opentelemetry::logs::LogRecord as LogRecordTrait; use opentelemetry::logs::Severity; +use opentelemetry::metrics::AsyncInstrumentBuilder; +use opentelemetry::metrics::InstrumentBuilder; +use opentelemetry::metrics::MeterProvider; +use opentelemetry::otel_debug; use opentelemetry::otel_error; use opentelemetry::trace::SpanContext; use opentelemetry::trace::SpanId; @@ -28,7 +34,6 @@ use opentelemetry::KeyValue; use opentelemetry::StringValue; use opentelemetry::Value; use opentelemetry_otlp::HttpExporterBuilder; -use opentelemetry_otlp::MetricExporter; use opentelemetry_otlp::Protocol; use opentelemetry_otlp::WithExportConfig; use opentelemetry_otlp::WithHttpConfig; @@ -36,10 +41,11 @@ use opentelemetry_sdk::export::trace::SpanData; use opentelemetry_sdk::logs::BatchLogProcessor; use opentelemetry_sdk::logs::LogProcessor; use opentelemetry_sdk::logs::LogRecord; -use opentelemetry_sdk::metrics::data::Metric; -use opentelemetry_sdk::metrics::data::ResourceMetrics; -use opentelemetry_sdk::metrics::data::ScopeMetrics; use opentelemetry_sdk::metrics::exporter::PushMetricExporter; +use opentelemetry_sdk::metrics::reader::MetricReader; +use opentelemetry_sdk::metrics::ManualReader; +use opentelemetry_sdk::metrics::MetricResult; +use opentelemetry_sdk::metrics::SdkMeterProvider; use opentelemetry_sdk::metrics::Temporality; use opentelemetry_sdk::trace::BatchSpanProcessor; use opentelemetry_sdk::trace::SpanProcessor; @@ -52,14 +58,21 @@ use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_VERSION; use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; +use std::cell::RefCell; +use std::collections::HashMap; use std::env; use std::fmt::Debug; use std::pin::Pin; +use std::rc::Rc; +use std::sync::Arc; +use std::sync::Mutex; use std::task::Context; use std::task::Poll; use std::thread; use std::time::Duration; use std::time::SystemTime; +use tokio::sync::oneshot; +use tokio::task::JoinSet; deno_core::extension!( deno_telemetry, @@ -75,23 +88,24 @@ deno_core::extension!( op_otel_span_attribute3, op_otel_span_set_dropped, op_otel_span_flush, - op_otel_metrics_resource_attribute, - op_otel_metrics_resource_attribute2, - op_otel_metrics_resource_attribute3, - op_otel_metrics_scope, - op_otel_metrics_sum, - op_otel_metrics_gauge, - op_otel_metrics_sum_or_gauge_data_point, - op_otel_metrics_histogram, - op_otel_metrics_histogram_data_point, - op_otel_metrics_histogram_data_point_entry_final, - op_otel_metrics_histogram_data_point_entry1, - op_otel_metrics_histogram_data_point_entry2, - op_otel_metrics_histogram_data_point_entry3, - op_otel_metrics_data_point_attribute, - op_otel_metrics_data_point_attribute2, - op_otel_metrics_data_point_attribute3, - op_otel_metrics_submit, + op_otel_metric_create_counter, + op_otel_metric_create_up_down_counter, + op_otel_metric_create_gauge, + op_otel_metric_create_histogram, + op_otel_metric_create_observable_counter, + op_otel_metric_create_observable_gauge, + op_otel_metric_create_observable_up_down_counter, + op_otel_metric_attribute3, + op_otel_metric_record0, + op_otel_metric_record1, + op_otel_metric_record2, + op_otel_metric_record3, + op_otel_metric_observable_record0, + op_otel_metric_observable_record1, + op_otel_metric_observable_record2, + op_otel_metric_observable_record3, + op_otel_metric_wait_to_observe, + op_otel_metric_observation_done, ], esm = ["telemetry.ts", "util.ts"], ); @@ -105,6 +119,7 @@ pub struct OtelRuntimeConfig { #[derive(Default, Debug, Clone, Serialize, Deserialize)] pub struct OtelConfig { pub tracing_enabled: bool, + pub metrics_enabled: bool, pub console: OtelConsoleConfig, pub deterministic: bool, } @@ -113,6 +128,7 @@ impl OtelConfig { pub fn as_v8(&self) -> Box<[u8]> { Box::new([ self.tracing_enabled as u8, + self.metrics_enabled as u8, self.console as u8, self.deterministic as u8, ]) @@ -137,6 +153,10 @@ static OTEL_SHARED_RUNTIME_SPAWN_TASK_TX: Lazy< UnboundedSender>, > = Lazy::new(otel_create_shared_runtime); +static OTEL_PRE_COLLECT_CALLBACKS: Lazy< + Mutex>>>, +> = Lazy::new(Default::default); + fn otel_create_shared_runtime() -> UnboundedSender> { let (spawn_task_tx, mut spawn_task_rx) = mpsc::unbounded::>(); @@ -273,6 +293,181 @@ impl Stream for BatchMessageChannelReceiver { } } +enum DenoPeriodicReaderMessage { + Register(std::sync::Weak), + Export, + ForceFlush(oneshot::Sender>), + Shutdown(oneshot::Sender>), +} + +#[derive(Debug)] +struct DenoPeriodicReader { + tx: tokio::sync::mpsc::Sender, + temporality: Temporality, +} + +impl MetricReader for DenoPeriodicReader { + fn register_pipeline( + &self, + pipeline: std::sync::Weak, + ) { + let _ = self + .tx + .try_send(DenoPeriodicReaderMessage::Register(pipeline)); + } + + fn collect( + &self, + _rm: &mut opentelemetry_sdk::metrics::data::ResourceMetrics, + ) -> opentelemetry_sdk::metrics::MetricResult<()> { + unreachable!("collect should not be called on DenoPeriodicReader"); + } + + fn force_flush(&self) -> opentelemetry_sdk::metrics::MetricResult<()> { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.try_send(DenoPeriodicReaderMessage::ForceFlush(tx)); + deno_core::futures::executor::block_on(rx).unwrap()?; + Ok(()) + } + + fn shutdown(&self) -> opentelemetry_sdk::metrics::MetricResult<()> { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.try_send(DenoPeriodicReaderMessage::Shutdown(tx)); + deno_core::futures::executor::block_on(rx).unwrap()?; + Ok(()) + } + + fn temporality( + &self, + _kind: opentelemetry_sdk::metrics::InstrumentKind, + ) -> Temporality { + self.temporality + } +} + +const METRIC_EXPORT_INTERVAL_NAME: &str = "OTEL_METRIC_EXPORT_INTERVAL"; +const DEFAULT_INTERVAL: Duration = Duration::from_secs(60); + +impl DenoPeriodicReader { + fn new(exporter: opentelemetry_otlp::MetricExporter) -> Self { + let interval = env::var(METRIC_EXPORT_INTERVAL_NAME) + .ok() + .and_then(|v| v.parse().map(Duration::from_millis).ok()) + .unwrap_or(DEFAULT_INTERVAL); + + let (tx, mut rx) = tokio::sync::mpsc::channel(256); + + let temporality = PushMetricExporter::temporality(&exporter); + + let worker = async move { + let inner = ManualReader::builder() + .with_temporality(PushMetricExporter::temporality(&exporter)) + .build(); + + let collect_and_export = |collect_observed: bool| { + let inner = &inner; + let exporter = &exporter; + async move { + let mut resource_metrics = + opentelemetry_sdk::metrics::data::ResourceMetrics { + resource: Default::default(), + scope_metrics: Default::default(), + }; + if collect_observed { + let callbacks = { + let mut callbacks = OTEL_PRE_COLLECT_CALLBACKS.lock().unwrap(); + std::mem::take(&mut *callbacks) + }; + let mut futures = JoinSet::new(); + for callback in callbacks { + let (tx, rx) = oneshot::channel(); + if let Ok(()) = callback.send(tx) { + futures.spawn(rx); + } + } + while futures.join_next().await.is_some() {} + } + inner.collect(&mut resource_metrics)?; + if resource_metrics.scope_metrics.is_empty() { + return Ok(()); + } + exporter.export(&mut resource_metrics).await?; + Ok(()) + } + }; + + let mut ticker = tokio::time::interval(interval); + ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Delay); + ticker.tick().await; + + loop { + let message = tokio::select! { + _ = ticker.tick() => DenoPeriodicReaderMessage::Export, + message = rx.recv() => if let Some(message) = message { + message + } else { + break; + }, + }; + + match message { + DenoPeriodicReaderMessage::Register(new_pipeline) => { + inner.register_pipeline(new_pipeline); + } + DenoPeriodicReaderMessage::Export => { + otel_debug!( + name: "DenoPeriodicReader.ExportTriggered", + message = "Export message received.", + ); + if let Err(err) = collect_and_export(true).await { + otel_error!( + name: "DenoPeriodicReader.ExportFailed", + message = "Failed to export metrics", + reason = format!("{}", err)); + } + } + DenoPeriodicReaderMessage::ForceFlush(sender) => { + otel_debug!( + name: "DenoPeriodicReader.ForceFlushCalled", + message = "Flush message received.", + ); + let res = collect_and_export(false).await; + if let Err(send_error) = sender.send(res) { + otel_debug!( + name: "DenoPeriodicReader.Flush.SendResultError", + message = "Failed to send flush result.", + reason = format!("{:?}", send_error), + ); + } + } + DenoPeriodicReaderMessage::Shutdown(sender) => { + otel_debug!( + name: "DenoPeriodicReader.ShutdownCalled", + message = "Shutdown message received", + ); + let res = collect_and_export(false).await; + let _ = exporter.shutdown(); + if let Err(send_error) = sender.send(res) { + otel_debug!( + name: "DenoPeriodicReader.Shutdown.SendResultError", + message = "Failed to send shutdown result", + reason = format!("{:?}", send_error), + ); + } + break; + } + } + } + }; + + (*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX) + .unbounded_send(worker.boxed()) + .expect("failed to send task to shared OpenTelemetry runtime"); + + DenoPeriodicReader { tx, temporality } + } +} + mod hyper_client { use http_body_util::BodyExt; use http_body_util::Full; @@ -353,66 +548,10 @@ mod hyper_client { } } -enum MetricProcessorMessage { - ResourceMetrics(ResourceMetrics), - Flush(tokio::sync::oneshot::Sender<()>), -} - -struct MetricProcessor { - tx: tokio::sync::mpsc::Sender, -} - -impl MetricProcessor { - fn new(exporter: MetricExporter) -> Self { - let (tx, mut rx) = tokio::sync::mpsc::channel(2048); - let future = async move { - while let Some(message) = rx.recv().await { - match message { - MetricProcessorMessage::ResourceMetrics(mut rm) => { - if let Err(err) = exporter.export(&mut rm).await { - otel_error!( - name: "MetricProcessor.Export.Error", - error = format!("{}", err) - ); - } - } - MetricProcessorMessage::Flush(tx) => { - if let Err(()) = tx.send(()) { - otel_error!( - name: "MetricProcessor.Flush.SendResultError", - error = "()", - ); - } - } - } - } - }; - - (*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX) - .unbounded_send(Box::pin(future)) - .expect("failed to send task to shared OpenTelemetry runtime"); - - Self { tx } - } - - fn submit(&self, rm: ResourceMetrics) { - let _ = self - .tx - .try_send(MetricProcessorMessage::ResourceMetrics(rm)); - } - - fn force_flush(&self) -> Result<(), anyhow::Error> { - let (tx, rx) = tokio::sync::oneshot::channel(); - self.tx.try_send(MetricProcessorMessage::Flush(tx))?; - deno_core::futures::executor::block_on(rx)?; - Ok(()) - } -} - struct Processors { spans: BatchSpanProcessor, logs: BatchLogProcessor, - metrics: MetricProcessor, + meter_provider: SdkMeterProvider, } static OTEL_PROCESSORS: OnceCell = OnceCell::new(); @@ -421,7 +560,7 @@ static BUILT_IN_INSTRUMENTATION_SCOPE: OnceCell< opentelemetry::InstrumentationScope, > = OnceCell::new(); -pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { +pub fn init(rt_config: OtelRuntimeConfig) -> anyhow::Result<()> { // Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_* // crates don't do this automatically. // TODO(piscisaureus): enable GRPC support. @@ -454,8 +593,8 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { // Add the runtime name and version to the resource attributes. Also override // the `telemetry.sdk` attributes to include the Deno runtime. resource = resource.merge(&Resource::new(vec![ - KeyValue::new(PROCESS_RUNTIME_NAME, config.runtime_name), - KeyValue::new(PROCESS_RUNTIME_VERSION, config.runtime_version.clone()), + KeyValue::new(PROCESS_RUNTIME_NAME, rt_config.runtime_name), + KeyValue::new(PROCESS_RUNTIME_VERSION, rt_config.runtime_version.clone()), KeyValue::new( TELEMETRY_SDK_LANGUAGE, format!( @@ -474,7 +613,7 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { TELEMETRY_SDK_VERSION, format!( "{}-{}", - config.runtime_version, + rt_config.runtime_version, resource.get(Key::new(TELEMETRY_SDK_VERSION)).unwrap() ), ), @@ -494,11 +633,30 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { BatchSpanProcessor::builder(span_exporter, OtelSharedRuntime).build(); span_processor.set_resource(&resource); + let temporality_preference = + env::var("OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE") + .ok() + .map(|s| s.to_lowercase()); + let temporality = match temporality_preference.as_deref() { + None | Some("cumulative") => Temporality::Cumulative, + Some("delta") => Temporality::Delta, + Some("lowmemory") => Temporality::LowMemory, + Some(other) => { + return Err(anyhow!( + "Invalid value for OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: {}", + other + )); + } + }; let metric_exporter = HttpExporterBuilder::default() .with_http_client(client.clone()) .with_protocol(protocol) - .build_metrics_exporter(Temporality::Cumulative)?; - let metric_processor = MetricProcessor::new(metric_exporter); + .build_metrics_exporter(temporality)?; + let metric_reader = DenoPeriodicReader::new(metric_exporter); + let meter_provider = SdkMeterProvider::builder() + .with_reader(metric_reader) + .with_resource(resource.clone()) + .build(); let log_exporter = HttpExporterBuilder::default() .with_http_client(client) @@ -512,13 +670,13 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { .set(Processors { spans: span_processor, logs: log_processor, - metrics: metric_processor, + meter_provider, }) .map_err(|_| anyhow!("failed to init otel"))?; let builtin_instrumentation_scope = opentelemetry::InstrumentationScope::builder("deno") - .with_version(config.runtime_version.clone()) + .with_version(rt_config.runtime_version.clone()) .build(); BUILT_IN_INSTRUMENTATION_SCOPE .set(builtin_instrumentation_scope) @@ -534,12 +692,12 @@ pub fn flush() { if let Some(Processors { spans, logs, - metrics, + meter_provider, }) = OTEL_PROCESSORS.get() { let _ = spans.force_flush(); let _ = logs.force_flush(); - let _ = metrics.force_flush(); + let _ = meter_provider.force_flush(); } } @@ -659,8 +817,8 @@ fn parse_span_id( } } -macro_rules! attr { - ($scope:ident, $attributes:expr $(=> $dropped_attributes_count:expr)?, $name:expr, $value:expr) => { +macro_rules! attr_raw { + ($scope:ident, $name:expr, $value:expr) => {{ let name = if let Ok(name) = $name.try_cast() { let view = v8::ValueView::new($scope, name); match view.data() { @@ -695,7 +853,18 @@ macro_rules! attr { None }; if let (Some(name), Some(value)) = (name, value) { - $attributes.push(KeyValue::new(name, value)); + Some(KeyValue::new(name, value)) + } else { + None + } + }}; +} + +macro_rules! attr { + ($scope:ident, $attributes:expr $(=> $dropped_attributes_count:expr)?, $name:expr, $value:expr) => { + let attr = attr_raw!($scope, $name, $value); + if let Some(kv) = attr { + $attributes.push(kv); } $( else { @@ -909,7 +1078,8 @@ fn op_otel_span_attribute<'s>( ) { if let Some(temporary_span) = state.try_borrow_mut::() { temporary_span.0.attributes.reserve_exact( - (capacity as usize) - temporary_span.0.attributes.capacity(), + (capacity as usize) + .saturating_sub(temporary_span.0.attributes.capacity()), ); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key, value); } @@ -927,7 +1097,8 @@ fn op_otel_span_attribute2<'s>( ) { if let Some(temporary_span) = state.try_borrow_mut::() { temporary_span.0.attributes.reserve_exact( - (capacity as usize) - temporary_span.0.attributes.capacity(), + (capacity as usize) + .saturating_sub(temporary_span.0.attributes.capacity()), ); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key1, value1); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key2, value2); @@ -949,7 +1120,8 @@ fn op_otel_span_attribute3<'s>( ) { if let Some(temporary_span) = state.try_borrow_mut::() { temporary_span.0.attributes.reserve_exact( - (capacity as usize) - temporary_span.0.attributes.capacity(), + (capacity as usize) + .saturating_sub(temporary_span.0.attributes.capacity()), ); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key1, value1); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key2, value2); @@ -984,538 +1156,572 @@ fn op_otel_span_flush(state: &mut OpState) { spans.on_end(temporary_span.0); } -// Holds data being built from JS before -// it is submitted to the rust processor. -struct TemporaryMetricsExport { - resource_attributes: Vec, - scope_metrics: Vec, - metric: Option, +enum Instrument { + Counter(opentelemetry::metrics::Counter), + UpDownCounter(opentelemetry::metrics::UpDownCounter), + Gauge(opentelemetry::metrics::Gauge), + Histogram(opentelemetry::metrics::Histogram), + Observable(Arc, f64>>>), } -struct TemporaryMetric { - name: String, - description: String, - unit: String, - data: TemporaryMetricData, -} +impl GarbageCollected for Instrument {} -enum TemporaryMetricData { - Sum(opentelemetry_sdk::metrics::data::Sum), - Gauge(opentelemetry_sdk::metrics::data::Gauge), - Histogram(opentelemetry_sdk::metrics::data::Histogram), -} - -impl From for Metric { - fn from(value: TemporaryMetric) -> Self { - Metric { - name: Cow::Owned(value.name), - description: Cow::Owned(value.description), - unit: Cow::Owned(value.unit), - data: match value.data { - TemporaryMetricData::Sum(sum) => Box::new(sum), - TemporaryMetricData::Gauge(gauge) => Box::new(gauge), - TemporaryMetricData::Histogram(histogram) => Box::new(histogram), - }, - } - } -} - -#[op2(fast)] -fn op_otel_metrics_resource_attribute<'s>( - scope: &mut v8::HandleScope<'s>, +fn create_instrument<'a, T>( + cb: impl FnOnce( + &'_ opentelemetry::metrics::Meter, + String, + ) -> InstrumentBuilder<'_, T>, + cb2: impl FnOnce(InstrumentBuilder<'_, T>) -> Instrument, state: &mut OpState, - #[smi] capacity: u32, - key: v8::Local<'s, v8::Value>, - value: v8::Local<'s, v8::Value>, -) { - let metrics_export = if let Some(metrics_export) = - state.try_borrow_mut::() - { - metrics_export.resource_attributes.reserve_exact( - (capacity as usize) - metrics_export.resource_attributes.capacity(), - ); - metrics_export - } else { - state.put(TemporaryMetricsExport { - resource_attributes: Vec::with_capacity(capacity as usize), - scope_metrics: vec![], - metric: None, - }); - state.borrow_mut() + scope: &mut v8::HandleScope<'a>, + name: v8::Local<'a, v8::Value>, + description: v8::Local<'a, v8::Value>, + unit: v8::Local<'a, v8::Value>, +) -> Result { + let Some(InstrumentationScope(instrumentation_scope)) = + state.try_borrow::() + else { + return Err(anyhow!("instrumentation scope not available")); }; - attr!(scope, metrics_export.resource_attributes, key, value); -} -#[op2(fast)] -fn op_otel_metrics_resource_attribute2<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key1: v8::Local<'s, v8::Value>, - value1: v8::Local<'s, v8::Value>, - key2: v8::Local<'s, v8::Value>, - value2: v8::Local<'s, v8::Value>, -) { - let metrics_export = if let Some(metrics_export) = - state.try_borrow_mut::() - { - metrics_export.resource_attributes.reserve_exact( - (capacity as usize) - metrics_export.resource_attributes.capacity(), - ); - metrics_export - } else { - state.put(TemporaryMetricsExport { - resource_attributes: Vec::with_capacity(capacity as usize), - scope_metrics: vec![], - metric: None, - }); - state.borrow_mut() + let meter = OTEL_PROCESSORS + .get() + .unwrap() + .meter_provider + .meter_with_scope(instrumentation_scope.clone()); + + let name = owned_string(scope, name.try_cast()?); + let mut builder = cb(&meter, name); + if !description.is_null_or_undefined() { + let description = owned_string(scope, description.try_cast()?); + builder = builder.with_description(description); }; - attr!(scope, metrics_export.resource_attributes, key1, value1); - attr!(scope, metrics_export.resource_attributes, key2, value2); -} - -#[allow(clippy::too_many_arguments)] -#[op2(fast)] -fn op_otel_metrics_resource_attribute3<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key1: v8::Local<'s, v8::Value>, - value1: v8::Local<'s, v8::Value>, - key2: v8::Local<'s, v8::Value>, - value2: v8::Local<'s, v8::Value>, - key3: v8::Local<'s, v8::Value>, - value3: v8::Local<'s, v8::Value>, -) { - let metrics_export = if let Some(metrics_export) = - state.try_borrow_mut::() - { - metrics_export.resource_attributes.reserve_exact( - (capacity as usize) - metrics_export.resource_attributes.capacity(), - ); - metrics_export - } else { - state.put(TemporaryMetricsExport { - resource_attributes: Vec::with_capacity(capacity as usize), - scope_metrics: vec![], - metric: None, - }); - state.borrow_mut() + if !unit.is_null_or_undefined() { + let unit = owned_string(scope, unit.try_cast()?); + builder = builder.with_unit(unit); }; - attr!(scope, metrics_export.resource_attributes, key1, value1); - attr!(scope, metrics_export.resource_attributes, key2, value2); - attr!(scope, metrics_export.resource_attributes, key3, value3); + + Ok(cb2(builder)) } -#[op2(fast)] -fn op_otel_metrics_scope<'s>( - scope: &mut v8::HandleScope<'s>, +#[op2] +#[cppgc] +fn op_otel_metric_create_counter<'s>( state: &mut OpState, + scope: &mut v8::HandleScope<'s>, name: v8::Local<'s, v8::Value>, - schema_url: v8::Local<'s, v8::Value>, - version: v8::Local<'s, v8::Value>, -) { - let name = owned_string(scope, name.cast()); - - let scope_builder = opentelemetry::InstrumentationScope::builder(name); - let scope_builder = if schema_url.is_null_or_undefined() { - scope_builder - } else { - scope_builder.with_schema_url(owned_string(scope, schema_url.cast())) - }; - let scope_builder = if version.is_null_or_undefined() { - scope_builder - } else { - scope_builder.with_version(owned_string(scope, version.cast())) - }; - let scope = scope_builder.build(); - let scope_metric = ScopeMetrics { + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_instrument( + |meter, name| meter.f64_counter(name), + |i| Instrument::Counter(i.build()), + state, scope, - metrics: vec![], - }; - - match state.try_borrow_mut::() { - Some(temp) => { - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - temp.scope_metrics.push(scope_metric); - } - None => { - state.put(TemporaryMetricsExport { - resource_attributes: vec![], - scope_metrics: vec![scope_metric], - metric: None, - }); - } - } -} - -#[op2(fast)] -fn op_otel_metrics_sum<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - name: v8::Local<'s, v8::Value>, - description: v8::Local<'s, v8::Value>, - unit: v8::Local<'s, v8::Value>, - #[smi] temporality: u8, - is_monotonic: bool, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - - let name = owned_string(scope, name.cast()); - let description = owned_string(scope, description.cast()); - let unit = owned_string(scope, unit.cast()); - let temporality = match temporality { - 0 => Temporality::Delta, - 1 => Temporality::Cumulative, - _ => return, - }; - let sum = opentelemetry_sdk::metrics::data::Sum { - data_points: vec![], - temporality, - is_monotonic, - }; - - temp.metric = Some(TemporaryMetric { name, description, unit, - data: TemporaryMetricData::Sum(sum), - }); + ) } -#[op2(fast)] -fn op_otel_metrics_gauge<'s>( - scope: &mut v8::HandleScope<'s>, +#[op2] +#[cppgc] +fn op_otel_metric_create_up_down_counter<'s>( state: &mut OpState, + scope: &mut v8::HandleScope<'s>, name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - - let name = owned_string(scope, name.cast()); - let description = owned_string(scope, description.cast()); - let unit = owned_string(scope, unit.cast()); - - let gauge = opentelemetry_sdk::metrics::data::Gauge { - data_points: vec![], - }; - - temp.metric = Some(TemporaryMetric { +) -> Result { + create_instrument( + |meter, name| meter.f64_up_down_counter(name), + |i| Instrument::UpDownCounter(i.build()), + state, + scope, name, description, unit, - data: TemporaryMetricData::Gauge(gauge), + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_gauge<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_instrument( + |meter, name| meter.f64_gauge(name), + |i| Instrument::Gauge(i.build()), + state, + scope, + name, + description, + unit, + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_histogram<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, + #[serde] boundaries: Option>, +) -> Result { + let Some(InstrumentationScope(instrumentation_scope)) = + state.try_borrow::() + else { + return Err(anyhow!("instrumentation scope not available")); + }; + + let meter = OTEL_PROCESSORS + .get() + .unwrap() + .meter_provider + .meter_with_scope(instrumentation_scope.clone()); + + let name = owned_string(scope, name.try_cast()?); + let mut builder = meter.f64_histogram(name); + if !description.is_null_or_undefined() { + let description = owned_string(scope, description.try_cast()?); + builder = builder.with_description(description); + }; + if !unit.is_null_or_undefined() { + let unit = owned_string(scope, unit.try_cast()?); + builder = builder.with_unit(unit); + }; + if let Some(boundaries) = boundaries { + builder = builder.with_boundaries(boundaries); + } + + Ok(Instrument::Histogram(builder.build())) +} + +fn create_async_instrument<'a, T>( + cb: impl FnOnce( + &'_ opentelemetry::metrics::Meter, + String, + ) -> AsyncInstrumentBuilder<'_, T, f64>, + cb2: impl FnOnce(AsyncInstrumentBuilder<'_, T, f64>), + state: &mut OpState, + scope: &mut v8::HandleScope<'a>, + name: v8::Local<'a, v8::Value>, + description: v8::Local<'a, v8::Value>, + unit: v8::Local<'a, v8::Value>, +) -> Result { + let Some(InstrumentationScope(instrumentation_scope)) = + state.try_borrow::() + else { + return Err(anyhow!("instrumentation scope not available")); + }; + + let meter = OTEL_PROCESSORS + .get() + .unwrap() + .meter_provider + .meter_with_scope(instrumentation_scope.clone()); + + let name = owned_string(scope, name.try_cast()?); + let mut builder = cb(&meter, name); + if !description.is_null_or_undefined() { + let description = owned_string(scope, description.try_cast()?); + builder = builder.with_description(description); + }; + if !unit.is_null_or_undefined() { + let unit = owned_string(scope, unit.try_cast()?); + builder = builder.with_unit(unit); + }; + + let data_share = Arc::new(Mutex::new(HashMap::new())); + let data_share_: Arc, f64>>> = data_share.clone(); + builder = builder.with_callback(move |i| { + let data = { + let mut data = data_share_.lock().unwrap(); + std::mem::take(&mut *data) + }; + for (attributes, value) in data { + i.observe(value, &attributes); + } }); + cb2(builder); + + Ok(Instrument::Observable(data_share)) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_observable_counter<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_async_instrument( + |meter, name| meter.f64_observable_counter(name), + |i| { + i.build(); + }, + state, + scope, + name, + description, + unit, + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_observable_up_down_counter<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_async_instrument( + |meter, name| meter.f64_observable_up_down_counter(name), + |i| { + i.build(); + }, + state, + scope, + name, + description, + unit, + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_observable_gauge<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_async_instrument( + |meter, name| meter.f64_observable_gauge(name), + |i| { + i.build(); + }, + state, + scope, + name, + description, + unit, + ) +} + +struct MetricAttributes { + attributes: Vec, } #[op2(fast)] -fn op_otel_metrics_sum_or_gauge_data_point( +fn op_otel_metric_record0( state: &mut OpState, + #[cppgc] instrument: &Instrument, value: f64, - start_time: f64, - time: f64, ) { - let Some(temp) = state.try_borrow_mut::() else { - return; + let values = state.try_take::(); + let attributes = match &values { + Some(values) => &*values.attributes, + None => &[], }; - - let start_time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(start_time)) - .unwrap(); - let time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(time)) - .unwrap(); - - let data_point = opentelemetry_sdk::metrics::data::DataPoint { - value, - start_time: Some(start_time), - time: Some(time), - attributes: vec![], - exemplars: vec![], - }; - - match &mut temp.metric { - Some(TemporaryMetric { - data: TemporaryMetricData::Sum(sum), - .. - }) => sum.data_points.push(data_point), - Some(TemporaryMetric { - data: TemporaryMetricData::Gauge(gauge), - .. - }) => gauge.data_points.push(data_point), + match instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), _ => {} } } #[op2(fast)] -fn op_otel_metrics_histogram<'s>( - scope: &mut v8::HandleScope<'s>, +fn op_otel_metric_record1( state: &mut OpState, - name: v8::Local<'s, v8::Value>, - description: v8::Local<'s, v8::Value>, - unit: v8::Local<'s, v8::Value>, - #[smi] temporality: u8, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, ) { - let Some(temp) = state.try_borrow_mut::() else { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { return; }; - - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - - let name = owned_string(scope, name.cast()); - let description = owned_string(scope, description.cast()); - let unit = owned_string(scope, unit.cast()); - - let temporality = match temporality { - 0 => Temporality::Delta, - 1 => Temporality::Cumulative, - _ => return, - }; - let histogram = opentelemetry_sdk::metrics::data::Histogram { - data_points: vec![], - temporality, - }; - - temp.metric = Some(TemporaryMetric { - name, - description, - unit, - data: TemporaryMetricData::Histogram(histogram), - }); -} - -#[allow(clippy::too_many_arguments)] -#[op2(fast)] -fn op_otel_metrics_histogram_data_point( - state: &mut OpState, - #[number] count: u64, - min: f64, - max: f64, - sum: f64, - start_time: f64, - time: f64, - #[smi] buckets: u32, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - let min = if min.is_nan() { None } else { Some(min) }; - let max = if max.is_nan() { None } else { Some(max) }; - - let start_time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(start_time)) - .unwrap(); - let time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(time)) - .unwrap(); - - let data_point = opentelemetry_sdk::metrics::data::HistogramDataPoint { - bounds: Vec::with_capacity(buckets as usize), - bucket_counts: Vec::with_capacity((buckets as usize) + 1), - count, - sum, - min, - max, - start_time, - time, - attributes: vec![], - exemplars: vec![], - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - histogram.data_points.push(data_point); - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry_final( - state: &mut OpState, - #[number] count1: u64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - histogram - .data_points - .last_mut() - .unwrap() - .bucket_counts - .push(count1) - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry1( - state: &mut OpState, - #[number] count1: u64, - bound1: f64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - let data_point = histogram.data_points.last_mut().unwrap(); - data_point.bucket_counts.push(count1); - data_point.bounds.push(bound1); - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry2( - state: &mut OpState, - #[number] count1: u64, - bound1: f64, - #[number] count2: u64, - bound2: f64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - let data_point = histogram.data_points.last_mut().unwrap(); - data_point.bucket_counts.push(count1); - data_point.bounds.push(bound1); - data_point.bucket_counts.push(count2); - data_point.bounds.push(bound2); - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry3( - state: &mut OpState, - #[number] count1: u64, - bound1: f64, - #[number] count2: u64, - bound2: f64, - #[number] count3: u64, - bound3: f64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - let data_point = histogram.data_points.last_mut().unwrap(); - data_point.bucket_counts.push(count1); - data_point.bounds.push(bound1); - data_point.bucket_counts.push(count2); - data_point.bounds.push(bound2); - data_point.bucket_counts.push(count3); - data_point.bounds.push(bound3); - } -} - -#[op2(fast)] -fn op_otel_metrics_data_point_attribute<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key: v8::Local<'s, v8::Value>, - value: v8::Local<'s, v8::Value>, -) { - if let Some(TemporaryMetricsExport { - metric: Some(metric), - .. - }) = state.try_borrow_mut::() - { - let attributes = match &mut metric.data { - TemporaryMetricData::Sum(sum) => { - &mut sum.data_points.last_mut().unwrap().attributes + let mut values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let attributes = match &mut values { + Some(values) => { + if let Some(kv) = attr1 { + values.attributes.reserve_exact(1); + values.attributes.push(kv); } - TemporaryMetricData::Gauge(gauge) => { - &mut gauge.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Histogram(histogram) => { - &mut histogram.data_points.last_mut().unwrap().attributes - } - }; - attributes.reserve_exact((capacity as usize) - attributes.capacity()); - attr!(scope, attributes, key, value); - } -} - -#[op2(fast)] -fn op_otel_metrics_data_point_attribute2<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key1: v8::Local<'s, v8::Value>, - value1: v8::Local<'s, v8::Value>, - key2: v8::Local<'s, v8::Value>, - value2: v8::Local<'s, v8::Value>, -) { - if let Some(TemporaryMetricsExport { - metric: Some(metric), - .. - }) = state.try_borrow_mut::() - { - let attributes = match &mut metric.data { - TemporaryMetricData::Sum(sum) => { - &mut sum.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Gauge(gauge) => { - &mut gauge.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Histogram(histogram) => { - &mut histogram.data_points.last_mut().unwrap().attributes - } - }; - attributes.reserve_exact((capacity as usize) - attributes.capacity()); - attr!(scope, attributes, key1, value1); - attr!(scope, attributes, key2, value2); + &*values.attributes + } + None => match attr1 { + Some(kv1) => &[kv1] as &[KeyValue], + None => &[], + }, + }; + match &*instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), + _ => {} } } #[allow(clippy::too_many_arguments)] #[op2(fast)] -fn op_otel_metrics_data_point_attribute3<'s>( +fn op_otel_metric_record2( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let mut values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attributes = match &mut values { + Some(values) => { + values.attributes.reserve_exact(2); + if let Some(kv1) = attr1 { + values.attributes.push(kv1); + } + if let Some(kv2) = attr2 { + values.attributes.push(kv2); + } + &*values.attributes + } + None => match (attr1, attr2) { + (Some(kv1), Some(kv2)) => &[kv1, kv2] as &[KeyValue], + (Some(kv1), None) => &[kv1], + (None, Some(kv2)) => &[kv2], + (None, None) => &[], + }, + }; + match &*instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), + _ => {} + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_record3( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, + key3: v8::Local<'_, v8::Value>, + value3: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let mut values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attr3 = attr_raw!(scope, key3, value3); + let attributes = match &mut values { + Some(values) => { + values.attributes.reserve_exact(3); + if let Some(kv1) = attr1 { + values.attributes.push(kv1); + } + if let Some(kv2) = attr2 { + values.attributes.push(kv2); + } + if let Some(kv3) = attr3 { + values.attributes.push(kv3); + } + &*values.attributes + } + None => match (attr1, attr2, attr3) { + (Some(kv1), Some(kv2), Some(kv3)) => &[kv1, kv2, kv3] as &[KeyValue], + (Some(kv1), Some(kv2), None) => &[kv1, kv2], + (Some(kv1), None, Some(kv3)) => &[kv1, kv3], + (None, Some(kv2), Some(kv3)) => &[kv2, kv3], + (Some(kv1), None, None) => &[kv1], + (None, Some(kv2), None) => &[kv2], + (None, None, Some(kv3)) => &[kv3], + (None, None, None) => &[], + }, + }; + match &*instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), + _ => {} + } +} + +#[op2(fast)] +fn op_otel_metric_observable_record0( + state: &mut OpState, + #[cppgc] instrument: &Instrument, + value: f64, +) { + let values = state.try_take::(); + let attributes = values.map(|attr| attr.attributes).unwrap_or_default(); + if let Instrument::Observable(data_share) = instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[op2(fast)] +fn op_otel_metric_observable_record1( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let mut attributes = values + .map(|mut attr| { + attr.attributes.reserve_exact(1); + attr.attributes + }) + .unwrap_or_else(|| Vec::with_capacity(1)); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Instrument::Observable(data_share) = &*instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_observable_record2( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let values = state.try_take::(); + let mut attributes = values + .map(|mut attr| { + attr.attributes.reserve_exact(2); + attr.attributes + }) + .unwrap_or_else(|| Vec::with_capacity(2)); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Some(kv2) = attr2 { + attributes.push(kv2); + } + if let Instrument::Observable(data_share) = &*instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_observable_record3( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, + key3: v8::Local<'_, v8::Value>, + value3: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let values = state.try_take::(); + let mut attributes = values + .map(|mut attr| { + attr.attributes.reserve_exact(3); + attr.attributes + }) + .unwrap_or_else(|| Vec::with_capacity(3)); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attr3 = attr_raw!(scope, key3, value3); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Some(kv2) = attr2 { + attributes.push(kv2); + } + if let Some(kv3) = attr3 { + attributes.push(kv3); + } + if let Instrument::Observable(data_share) = &*instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_attribute3<'s>( scope: &mut v8::HandleScope<'s>, state: &mut OpState, #[smi] capacity: u32, @@ -1526,49 +1732,60 @@ fn op_otel_metrics_data_point_attribute3<'s>( key3: v8::Local<'s, v8::Value>, value3: v8::Local<'s, v8::Value>, ) { - if let Some(TemporaryMetricsExport { - metric: Some(metric), - .. - }) = state.try_borrow_mut::() + let mut values = state.try_borrow_mut::(); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attr3 = attr_raw!(scope, key3, value3); + if let Some(values) = &mut values { + values.attributes.reserve_exact( + (capacity as usize).saturating_sub(values.attributes.capacity()), + ); + if let Some(kv1) = attr1 { + values.attributes.push(kv1); + } + if let Some(kv2) = attr2 { + values.attributes.push(kv2); + } + if let Some(kv3) = attr3 { + values.attributes.push(kv3); + } + } else { + let mut attributes = Vec::with_capacity(capacity as usize); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Some(kv2) = attr2 { + attributes.push(kv2); + } + if let Some(kv3) = attr3 { + attributes.push(kv3); + } + state.put(MetricAttributes { attributes }); + } +} + +struct ObservationDone(oneshot::Sender<()>); + +#[op2(async)] +async fn op_otel_metric_wait_to_observe(state: Rc>) -> bool { + let (tx, rx) = oneshot::channel(); { - let attributes = match &mut metric.data { - TemporaryMetricData::Sum(sum) => { - &mut sum.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Gauge(gauge) => { - &mut gauge.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Histogram(histogram) => { - &mut histogram.data_points.last_mut().unwrap().attributes - } - }; - attributes.reserve_exact((capacity as usize) - attributes.capacity()); - attr!(scope, attributes, key1, value1); - attr!(scope, attributes, key2, value2); - attr!(scope, attributes, key3, value3); + OTEL_PRE_COLLECT_CALLBACKS + .lock() + .expect("mutex poisoned") + .push(tx); + } + if let Ok(done) = rx.await { + state.borrow_mut().put(ObservationDone(done)); + true + } else { + false } } #[op2(fast)] -fn op_otel_metrics_submit(state: &mut OpState) { - let Some(mut temp) = state.try_take::() else { - return; - }; - - let Some(Processors { metrics, .. }) = OTEL_PROCESSORS.get() else { - return; - }; - - if let Some(current_metric) = temp.metric { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); +fn op_otel_metric_observation_done(state: &mut OpState) { + if let Some(ObservationDone(done)) = state.try_take::() { + let _ = done.send(()); } - - let resource = Resource::new(temp.resource_attributes); - let scope_metrics = temp.scope_metrics; - - metrics.submit(ResourceMetrics { - resource, - scope_metrics, - }); } diff --git a/ext/telemetry/telemetry.ts b/ext/telemetry/telemetry.ts index d1335f65b5..86b4fe059d 100644 --- a/ext/telemetry/telemetry.ts +++ b/ext/telemetry/telemetry.ts @@ -7,23 +7,24 @@ import { op_otel_instrumentation_scope_enter, op_otel_instrumentation_scope_enter_builtin, op_otel_log, - op_otel_metrics_data_point_attribute, - op_otel_metrics_data_point_attribute2, - op_otel_metrics_data_point_attribute3, - op_otel_metrics_gauge, - op_otel_metrics_histogram, - op_otel_metrics_histogram_data_point, - op_otel_metrics_histogram_data_point_entry1, - op_otel_metrics_histogram_data_point_entry2, - op_otel_metrics_histogram_data_point_entry3, - op_otel_metrics_histogram_data_point_entry_final, - op_otel_metrics_resource_attribute, - op_otel_metrics_resource_attribute2, - op_otel_metrics_resource_attribute3, - op_otel_metrics_scope, - op_otel_metrics_submit, - op_otel_metrics_sum, - op_otel_metrics_sum_or_gauge_data_point, + op_otel_metric_attribute3, + op_otel_metric_create_counter, + op_otel_metric_create_gauge, + op_otel_metric_create_histogram, + op_otel_metric_create_observable_counter, + op_otel_metric_create_observable_gauge, + op_otel_metric_create_observable_up_down_counter, + op_otel_metric_create_up_down_counter, + op_otel_metric_observable_record0, + op_otel_metric_observable_record1, + op_otel_metric_observable_record2, + op_otel_metric_observable_record3, + op_otel_metric_observation_done, + op_otel_metric_record0, + op_otel_metric_record1, + op_otel_metric_record2, + op_otel_metric_record3, + op_otel_metric_wait_to_observe, op_otel_span_attribute, op_otel_span_attribute2, op_otel_span_attribute3, @@ -36,25 +37,32 @@ import { Console } from "ext:deno_console/01_console.js"; import { performance } from "ext:deno_web/15_performance.js"; const { - SafeWeakMap, Array, - ObjectEntries, - ReflectApply, - SymbolFor, + ArrayPrototypePush, Error, - Uint8Array, - TypedArrayPrototypeSubarray, ObjectAssign, ObjectDefineProperty, - WeakRefPrototypeDeref, + ObjectEntries, + ObjectPrototypeIsPrototypeOf, + ReflectApply, + SafeIterator, + SafeMap, + SafePromiseAll, + SafeSet, + SafeWeakMap, + SafeWeakRef, + SafeWeakSet, String, StringPrototypePadStart, - ObjectPrototypeIsPrototypeOf, - SafeWeakRef, + SymbolFor, + TypedArrayPrototypeSubarray, + Uint8Array, + WeakRefPrototypeDeref, } = primordials; const { AsyncVariable, setAsyncContext } = core; export let TRACING_ENABLED = false; +export let METRICS_ENABLED = false; let DETERMINISTIC = false; // Note: These start at 0 in the JS library, @@ -202,30 +210,9 @@ const instrumentationScopes = new SafeWeakMap< >(); let activeInstrumentationLibrary: WeakRef | null = null; -function submitSpan( - spanId: string | Uint8Array, - traceId: string | Uint8Array, - traceFlags: number, - parentSpanId: string | Uint8Array | null, - span: Omit< - ReadableSpan, - | "spanContext" - | "startTime" - | "endTime" - | "parentSpanId" - | "duration" - | "ended" - | "resource" - >, - startTime: number, - endTime: number, +function activateInstrumentationLibrary( + instrumentationLibrary: InstrumentationLibrary, ) { - if (!TRACING_ENABLED) return; - if (!(traceFlags & TRACE_FLAG_SAMPLED)) return; - - // TODO(@lucacasonato): `resource` is ignored for now, should we implement it? - - const instrumentationLibrary = span.instrumentationLibrary; if ( !activeInstrumentationLibrary || WeakRefPrototypeDeref(activeInstrumentationLibrary) !== @@ -255,6 +242,32 @@ function submitSpan( } } } +} + +function submitSpan( + spanId: string | Uint8Array, + traceId: string | Uint8Array, + traceFlags: number, + parentSpanId: string | Uint8Array | null, + span: Omit< + ReadableSpan, + | "spanContext" + | "startTime" + | "endTime" + | "parentSpanId" + | "duration" + | "ended" + | "resource" + >, + startTime: number, + endTime: number, +) { + if (!TRACING_ENABLED) return; + if (!(traceFlags & TRACE_FLAG_SAMPLED)) return; + + // TODO(@lucacasonato): `resource` is ignored for now, should we implement it? + + activateInstrumentationLibrary(span.instrumentationLibrary); op_otel_span_start( traceId, @@ -368,7 +381,7 @@ export let endSpan: (span: Span) => void; export class Span { #traceId: string | Uint8Array; - #spanId: Uint8Array; + #spanId: string | Uint8Array; #traceFlags = TRACE_FLAG_SAMPLED; #spanContext: SpanContext | null = null; @@ -687,260 +700,510 @@ class ContextManager { } } -function attributeValue(value: IAnyValue) { - return value.boolValue ?? value.stringValue ?? value.doubleValue ?? - value.intValue; +// metrics + +interface MeterOptions { + schemaUrl?: string; } -function submitMetrics(resource, scopeMetrics) { - let i = 0; - while (i < resource.attributes.length) { - if (i + 2 < resource.attributes.length) { - op_otel_metrics_resource_attribute3( - resource.attributes.length, - resource.attributes[i].key, - attributeValue(resource.attributes[i].value), - resource.attributes[i + 1].key, - attributeValue(resource.attributes[i + 1].value), - resource.attributes[i + 2].key, - attributeValue(resource.attributes[i + 2].value), - ); - i += 3; - } else if (i + 1 < resource.attributes.length) { - op_otel_metrics_resource_attribute2( - resource.attributes.length, - resource.attributes[i].key, - attributeValue(resource.attributes[i].value), - resource.attributes[i + 1].key, - attributeValue(resource.attributes[i + 1].value), - ); - i += 2; - } else { - op_otel_metrics_resource_attribute( - resource.attributes.length, - resource.attributes[i].key, - attributeValue(resource.attributes[i].value), - ); - i += 1; - } +interface MetricOptions { + description?: string; + + unit?: string; + + valueType?: ValueType; + + advice?: MetricAdvice; +} + +enum ValueType { + INT = 0, + DOUBLE = 1, +} + +interface MetricAdvice { + /** + * Hint the explicit bucket boundaries for SDK if the metric is been + * aggregated with a HistogramAggregator. + */ + explicitBucketBoundaries?: number[]; +} + +export class MeterProvider { + getMeter(name: string, version?: string, options?: MeterOptions): Meter { + return new Meter({ name, version, schemaUrl: options?.schemaUrl }); + } +} + +type MetricAttributes = Attributes; + +type Instrument = { __key: "instrument" }; + +let batchResultHasObservables: ( + res: BatchObservableResult, + observables: Observable[], +) => boolean; + +class BatchObservableResult { + #observables: WeakSet; + + constructor(observables: WeakSet) { + this.#observables = observables; } - for (let smi = 0; smi < scopeMetrics.length; smi += 1) { - const { scope, metrics } = scopeMetrics[smi]; + static { + batchResultHasObservables = (cb, observables) => { + for (const observable of new SafeIterator(observables)) { + if (!cb.#observables.has(observable)) return false; + } + return true; + }; + } - op_otel_metrics_scope(scope.name, scope.schemaUrl, scope.version); + observe( + metric: Observable, + value: number, + attributes?: MetricAttributes, + ): void { + if (!this.#observables.has(metric)) return; + getObservableResult(metric).observe(value, attributes); + } +} - for (let mi = 0; mi < metrics.length; mi += 1) { - const metric = metrics[mi]; - switch (metric.dataPointType) { - case 3: - op_otel_metrics_sum( - metric.descriptor.name, - // deno-lint-ignore prefer-primordials - metric.descriptor.description, - metric.descriptor.unit, - metric.aggregationTemporality, - metric.isMonotonic, - ); - for (let di = 0; di < metric.dataPoints.length; di += 1) { - const dataPoint = metric.dataPoints[di]; - op_otel_metrics_sum_or_gauge_data_point( - dataPoint.value, - hrToSecs(dataPoint.startTime), - hrToSecs(dataPoint.endTime), - ); - const attributes = ObjectEntries(dataPoint.attributes); - let i = 0; - while (i < attributes.length) { - if (i + 2 < attributes.length) { - op_otel_metrics_data_point_attribute3( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - attributes[i + 2][0], - attributes[i + 2][1], - ); - i += 3; - } else if (i + 1 < attributes.length) { - op_otel_metrics_data_point_attribute2( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - ); - i += 2; - } else { - op_otel_metrics_data_point_attribute( - attributes.length, - attributes[i][0], - attributes[i][1], - ); - i += 1; - } - } - } - break; - case 2: - op_otel_metrics_gauge( - metric.descriptor.name, - // deno-lint-ignore prefer-primordials - metric.descriptor.description, - metric.descriptor.unit, - ); - for (let di = 0; di < metric.dataPoints.length; di += 1) { - const dataPoint = metric.dataPoints[di]; - op_otel_metrics_sum_or_gauge_data_point( - dataPoint.value, - hrToSecs(dataPoint.startTime), - hrToSecs(dataPoint.endTime), - ); - const attributes = ObjectEntries(dataPoint.attributes); - let i = 0; - while (i < attributes.length) { - if (i + 2 < attributes.length) { - op_otel_metrics_data_point_attribute3( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - attributes[i + 2][0], - attributes[i + 2][1], - ); - i += 3; - } else if (i + 1 < attributes.length) { - op_otel_metrics_data_point_attribute2( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - ); - i += 2; - } else { - op_otel_metrics_data_point_attribute( - attributes.length, - attributes[i][0], - attributes[i][1], - ); - i += 1; - } - } - } - break; - case 0: - op_otel_metrics_histogram( - metric.descriptor.name, - // deno-lint-ignore prefer-primordials - metric.descriptor.description, - metric.descriptor.unit, - metric.aggregationTemporality, - ); - for (let di = 0; di < metric.dataPoints.length; di += 1) { - const dataPoint = metric.dataPoints[di]; - const { boundaries, counts } = dataPoint.value.buckets; - op_otel_metrics_histogram_data_point( - dataPoint.value.count, - dataPoint.value.min ?? NaN, - dataPoint.value.max ?? NaN, - dataPoint.value.sum, - hrToSecs(dataPoint.startTime), - hrToSecs(dataPoint.endTime), - boundaries.length, - ); - let j = 0; - while (j < boundaries.length) { - if (j + 3 < boundaries.length) { - op_otel_metrics_histogram_data_point_entry3( - counts[j], - boundaries[j], - counts[j + 1], - boundaries[j + 1], - counts[j + 2], - boundaries[j + 2], - ); - j += 3; - } else if (j + 2 < boundaries.length) { - op_otel_metrics_histogram_data_point_entry2( - counts[j], - boundaries[j], - counts[j + 1], - boundaries[j + 1], - ); - j += 2; - } else { - op_otel_metrics_histogram_data_point_entry1( - counts[j], - boundaries[j], - ); - j += 1; - } - } - op_otel_metrics_histogram_data_point_entry_final(counts[j]); - const attributes = ObjectEntries(dataPoint.attributes); - let i = 0; - while (i < attributes.length) { - if (i + 2 < attributes.length) { - op_otel_metrics_data_point_attribute3( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - attributes[i + 2][0], - attributes[i + 2][1], - ); - i += 3; - } else if (i + 1 < attributes.length) { - op_otel_metrics_data_point_attribute2( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - ); - i += 2; - } else { - op_otel_metrics_data_point_attribute( - attributes.length, - attributes[i][0], - attributes[i][1], - ); - i += 1; - } - } - } - break; - default: - continue; +const BATCH_CALLBACKS = new SafeMap< + BatchObservableCallback, + BatchObservableResult +>(); +const INDIVIDUAL_CALLBACKS = new SafeMap>(); + +class Meter { + #instrumentationLibrary: InstrumentationLibrary; + + constructor(instrumentationLibrary: InstrumentationLibrary) { + this.#instrumentationLibrary = instrumentationLibrary; + } + + createCounter( + name: string, + options?: MetricOptions, + ): Counter { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Counter(null, false); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Counter(instrument, false); + } + + createUpDownCounter( + name: string, + options?: MetricOptions, + ): Counter { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Counter(null, true); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_up_down_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Counter(instrument, true); + } + + createGauge( + name: string, + options?: MetricOptions, + ): Gauge { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Gauge(null); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_gauge( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Gauge(instrument); + } + + createHistogram( + name: string, + options?: MetricOptions, + ): Histogram { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Histogram(null); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_histogram( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + options?.advice?.explicitBucketBoundaries, + ) as Instrument; + return new Histogram(instrument); + } + + createObservableCounter( + name: string, + options?: MetricOptions, + ): Observable { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) new Observable(new ObservableResult(null, true)); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_observable_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Observable(new ObservableResult(instrument, true)); + } + + createObservableGauge( + name: string, + options?: MetricOptions, + ): Observable { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) new Observable(new ObservableResult(null, false)); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_observable_gauge( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Observable(new ObservableResult(instrument, false)); + } + + createObservableUpDownCounter( + name: string, + options?: MetricOptions, + ): Observable { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) new Observable(new ObservableResult(null, false)); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_observable_up_down_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Observable(new ObservableResult(instrument, false)); + } + + addBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void { + if (!METRICS_ENABLED) return; + const result = new BatchObservableResult(new SafeWeakSet(observables)); + startObserving(); + BATCH_CALLBACKS.set(callback, result); + } + + removeBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void { + if (!METRICS_ENABLED) return; + const result = BATCH_CALLBACKS.get(callback); + if (result && batchResultHasObservables(result, observables)) { + BATCH_CALLBACKS.delete(callback); + } + } +} + +type BatchObservableCallback = ( + observableResult: BatchObservableResult, +) => void | Promise; + +function record( + instrument: Instrument | null, + value: number, + attributes?: MetricAttributes, +) { + if (instrument === null) return; + if (attributes === undefined) { + op_otel_metric_record0(instrument, value); + } else { + const attrs = ObjectEntries(attributes); + if (attrs.length === 0) { + op_otel_metric_record0(instrument, value); + } + let i = 0; + while (i < attrs.length) { + const remaining = attrs.length - i; + if (remaining > 3) { + op_otel_metric_attribute3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 3) { + op_otel_metric_record3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 2) { + op_otel_metric_record2( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + ); + i += 2; + } else if (remaining === 1) { + op_otel_metric_record1( + instrument, + value, + attrs[i][0], + attrs[i][1], + ); + i += 1; } } } - - op_otel_metrics_submit(); } -class MetricExporter { - export(metrics, resultCallback: (result: ExportResult) => void) { - try { - submitMetrics(metrics.resource, metrics.scopeMetrics); - resultCallback({ code: 0 }); - } catch (error) { - resultCallback({ - code: 1, - error: ObjectPrototypeIsPrototypeOf(error, Error) - ? error as Error - : new Error(String(error)), - }); +function recordObservable( + instrument: Instrument | null, + value: number, + attributes?: MetricAttributes, +) { + if (instrument === null) return; + if (attributes === undefined) { + op_otel_metric_observable_record0(instrument, value); + } else { + const attrs = ObjectEntries(attributes); + if (attrs.length === 0) { + op_otel_metric_observable_record0(instrument, value); + } + let i = 0; + while (i < attrs.length) { + const remaining = attrs.length - i; + if (remaining > 3) { + op_otel_metric_attribute3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 3) { + op_otel_metric_observable_record3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 2) { + op_otel_metric_observable_record2( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + ); + i += 2; + } else if (remaining === 1) { + op_otel_metric_observable_record1( + instrument, + value, + attrs[i][0], + attrs[i][1], + ); + i += 1; + } } } +} - async forceFlush() {} +class Counter { + #instrument: Instrument | null; + #upDown: boolean; - async shutdown() {} + constructor(instrument: Instrument | null, upDown: boolean) { + this.#instrument = instrument; + this.#upDown = upDown; + } + + add(value: number, attributes?: MetricAttributes, _context?: Context): void { + if (value < 0 && !this.#upDown) { + throw new Error("Counter can only be incremented"); + } + record(this.#instrument, value, attributes); + } +} + +class Gauge { + #instrument: Instrument | null; + + constructor(instrument: Instrument | null) { + this.#instrument = instrument; + } + + record( + value: number, + attributes?: MetricAttributes, + _context?: Context, + ): void { + record(this.#instrument, value, attributes); + } +} + +class Histogram { + #instrument: Instrument | null; + + constructor(instrument: Instrument | null) { + this.#instrument = instrument; + } + + record( + value: number, + attributes?: MetricAttributes, + _context?: Context, + ): void { + record(this.#instrument, value, attributes); + } +} + +type ObservableCallback = ( + observableResult: ObservableResult, +) => void | Promise; + +let getObservableResult: (observable: Observable) => ObservableResult; + +class Observable { + #result: ObservableResult; + + constructor(result: ObservableResult) { + this.#result = result; + } + + static { + getObservableResult = (observable) => observable.#result; + } + + addCallback(callback: ObservableCallback): void { + const res = INDIVIDUAL_CALLBACKS.get(this); + if (res) res.add(callback); + else INDIVIDUAL_CALLBACKS.set(this, new SafeSet([callback])); + startObserving(); + } + + removeCallback(callback: ObservableCallback): void { + const res = INDIVIDUAL_CALLBACKS.get(this); + if (res) res.delete(callback); + if (res?.size === 0) INDIVIDUAL_CALLBACKS.delete(this); + } +} + +class ObservableResult { + #instrument: Instrument | null; + #isRegularCounter: boolean; + + constructor(instrument: Instrument | null, isRegularCounter: boolean) { + this.#instrument = instrument; + this.#isRegularCounter = isRegularCounter; + } + + observe( + this: ObservableResult, + value: number, + attributes?: MetricAttributes, + ): void { + if (this.#isRegularCounter) { + if (value < 0) { + throw new Error("Observable counters can only be incremented"); + } + } + recordObservable(this.#instrument, value, attributes); + } +} + +async function observe(): Promise { + const promises: Promise[] = []; + // Primordials are not needed, because this is a SafeMap. + // deno-lint-ignore prefer-primordials + for (const { 0: observable, 1: callbacks } of INDIVIDUAL_CALLBACKS) { + const result = getObservableResult(observable); + // Primordials are not needed, because this is a SafeSet. + // deno-lint-ignore prefer-primordials + for (const callback of callbacks) { + // PromiseTry is not in primordials? + // deno-lint-ignore prefer-primordials + ArrayPrototypePush(promises, Promise.try(callback, result)); + } + } + // Primordials are not needed, because this is a SafeMap. + // deno-lint-ignore prefer-primordials + for (const { 0: callback, 1: result } of BATCH_CALLBACKS) { + // PromiseTry is not in primordials? + // deno-lint-ignore prefer-primordials + ArrayPrototypePush(promises, Promise.try(callback, result)); + } + await SafePromiseAll(promises); +} + +let isObserving = false; +function startObserving() { + if (!isObserving) { + isObserving = true; + (async () => { + while (true) { + const promise = op_otel_metric_wait_to_observe(); + core.unrefOpPromise(promise); + const ok = await promise; + if (!ok) break; + await observe(); + op_otel_metric_observation_done(); + } + })(); + } } const otelConsoleConfig = { @@ -951,14 +1214,21 @@ const otelConsoleConfig = { export function bootstrap( config: [ + 0 | 1, 0 | 1, typeof otelConsoleConfig[keyof typeof otelConsoleConfig], 0 | 1, ], ): void { - const { 0: tracingEnabled, 1: consoleConfig, 2: deterministic } = config; + const { + 0: tracingEnabled, + 1: metricsEnabled, + 2: consoleConfig, + 3: deterministic, + } = config; TRACING_ENABLED = tracingEnabled === 1; + METRICS_ENABLED = metricsEnabled === 1; DETERMINISTIC = deterministic === 1; switch (consoleConfig) { @@ -980,5 +1250,5 @@ export function bootstrap( export const telemetry = { SpanExporter, ContextManager, - MetricExporter, + MeterProvider, }; diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index fc75b0a05d..6bf1b8ea03 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.169.0" +version = "0.171.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index d30332d0ff..9ca3ce6752 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.182.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index e673ee2bb4..f3ac711fc7 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -908,8 +908,8 @@ const _original = Symbol("[[original]]"); * @param {boolean=} autoClose If the resource should be auto-closed when the stream closes. Defaults to true. * @returns {ReadableStream} */ -function readableStreamForRid(rid, autoClose = true) { - const stream = new ReadableStream(_brand); +function readableStreamForRid(rid, autoClose = true, Super) { + const stream = new (Super ?? ReadableStream)(_brand); stream[_resourceBacking] = { rid, autoClose }; const tryClose = () => { @@ -1130,8 +1130,8 @@ async function readableStreamCollectIntoUint8Array(stream) { * @param {boolean=} autoClose If the resource should be auto-closed when the stream closes. Defaults to true. * @returns {ReadableStream} */ -function writableStreamForRid(rid, autoClose = true) { - const stream = new WritableStream(_brand); +function writableStreamForRid(rid, autoClose = true, Super) { + const stream = new (Super ?? WritableStream)(_brand); stream[_resourceBacking] = { rid, autoClose }; const tryClose = () => { diff --git a/ext/web/13_message_port.js b/ext/web/13_message_port.js index cf72c43e6f..79fec9de2f 100644 --- a/ext/web/13_message_port.js +++ b/ext/web/13_message_port.js @@ -102,8 +102,8 @@ const nodeWorkerThreadCloseCb = Symbol("nodeWorkerThreadCloseCb"); const nodeWorkerThreadCloseCbInvoked = Symbol("nodeWorkerThreadCloseCbInvoked"); export const refMessagePort = Symbol("refMessagePort"); /** It is used by 99_main.js and worker_threads to - * unref/ref on the global pollForMessages promise. */ -export const unrefPollForMessages = Symbol("unrefPollForMessages"); + * unref/ref on the global message event handler count. */ +export const unrefParentPort = Symbol("unrefParentPort"); /** * @param {number} id diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index ac2b14fbed..44fb2e46bf 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.213.0" +version = "0.215.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webgpu/Cargo.toml b/ext/webgpu/Cargo.toml index 9621c085e8..3a491afcf8 100644 --- a/ext/webgpu/Cargo.toml +++ b/ext/webgpu/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webgpu" -version = "0.149.0" +version = "0.151.0" authors = ["the Deno authors"] edition.workspace = true license = "MIT" diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 38eff7b66f..60cb9f29f8 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.182.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index cb72618cad..8b8359f074 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.187.0" +version = "0.189.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 700a252016..4f9795d098 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.177.0" +version = "0.179.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/resolvers/deno/Cargo.toml b/resolvers/deno/Cargo.toml index 4dca044377..12c18d4452 100644 --- a/resolvers/deno/Cargo.toml +++ b/resolvers/deno/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_resolver" -version = "0.14.0" +version = "0.15.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -29,6 +29,7 @@ deno_path_util.workspace = true deno_semver.workspace = true node_resolver.workspace = true node_resolver.features = ["sync"] +sys_traits.workspace = true thiserror.workspace = true url.workspace = true diff --git a/resolvers/deno/cjs.rs b/resolvers/deno/cjs.rs index 6ae648deab..2ec253d41a 100644 --- a/resolvers/deno/cjs.rs +++ b/resolvers/deno/cjs.rs @@ -1,29 +1,30 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::sync::MaybeDashMap; use deno_media_type::MediaType; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::ClosestPkgJsonError; use node_resolver::InNpmPackageCheckerRc; use node_resolver::PackageJsonResolverRc; use node_resolver::ResolutionMode; +use sys_traits::FsRead; use url::Url; +use crate::sync::MaybeDashMap; + /// Keeps track of what module specifiers were resolved as CJS. /// /// Modules that are `.js`, `.ts`, `.jsx`, and `tsx` are only known to /// be CJS or ESM after they're loaded based on their contents. So these /// files will be "maybe CJS" until they're loaded. #[derive(Debug)] -pub struct CjsTracker { - is_cjs_resolver: IsCjsResolver, +pub struct CjsTracker { + is_cjs_resolver: IsCjsResolver, known: MaybeDashMap, } -impl CjsTracker { +impl CjsTracker { pub fn new( in_npm_pkg_checker: InNpmPackageCheckerRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, mode: IsCjsResolutionMode, ) -> Self { Self { @@ -124,16 +125,16 @@ pub enum IsCjsResolutionMode { /// Resolves whether a module is CJS or ESM. #[derive(Debug)] -pub struct IsCjsResolver { +pub struct IsCjsResolver { in_npm_pkg_checker: InNpmPackageCheckerRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, mode: IsCjsResolutionMode, } -impl IsCjsResolver { +impl IsCjsResolver { pub fn new( in_npm_pkg_checker: InNpmPackageCheckerRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, mode: IsCjsResolutionMode, ) -> Self { Self { diff --git a/resolvers/deno/fs.rs b/resolvers/deno/fs.rs deleted file mode 100644 index f2021a73a9..0000000000 --- a/resolvers/deno/fs.rs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::borrow::Cow; -use std::path::Path; -use std::path::PathBuf; - -pub struct DirEntry { - pub name: String, - pub is_file: bool, - pub is_directory: bool, -} - -pub trait DenoResolverFs { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> std::io::Result>; - fn realpath_sync(&self, path: &Path) -> std::io::Result; - fn exists_sync(&self, path: &Path) -> bool; - fn is_dir_sync(&self, path: &Path) -> bool; - fn read_dir_sync(&self, dir_path: &Path) -> std::io::Result>; -} diff --git a/resolvers/deno/lib.rs b/resolvers/deno/lib.rs index 05fa416da1..c943aacdae 100644 --- a/resolvers/deno/lib.rs +++ b/resolvers/deno/lib.rs @@ -14,11 +14,10 @@ use deno_config::workspace::WorkspaceResolver; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValueParseError; use deno_semver::npm::NpmPackageReqReference; -use fs::DenoResolverFs; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::NodeResolveError; use node_resolver::errors::PackageSubpathResolveError; use node_resolver::InNpmPackageCheckerRc; +use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NodeResolution; use node_resolver::NodeResolutionKind; use node_resolver::NodeResolverRc; @@ -32,11 +31,14 @@ use npm::ResolveReqWithSubPathErrorKind; use sloppy_imports::SloppyImportResolverFs; use sloppy_imports::SloppyImportsResolutionKind; use sloppy_imports::SloppyImportsResolverRc; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; use thiserror::Error; use url::Url; pub mod cjs; -pub mod fs; pub mod npm; pub mod sloppy_imports; mod sync; @@ -80,22 +82,22 @@ pub enum DenoResolveErrorKind { #[derive(Debug)] pub struct NodeAndNpmReqResolver< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { - pub node_resolver: NodeResolverRc, - pub npm_req_resolver: NpmReqResolverRc, + pub node_resolver: NodeResolverRc, + pub npm_req_resolver: NpmReqResolverRc, } pub struct DenoResolverOptions< 'a, - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TSloppyImportResolverFs: SloppyImportResolverFs, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { pub in_npm_pkg_checker: InNpmPackageCheckerRc, pub node_and_req_resolver: - Option>, + Option>, pub sloppy_imports_resolver: Option>, pub workspace_resolver: WorkspaceResolverRc, @@ -110,12 +112,13 @@ pub struct DenoResolverOptions< /// import map, JSX settings. #[derive(Debug)] pub struct DenoResolver< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TSloppyImportResolverFs: SloppyImportResolverFs, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { in_npm_pkg_checker: InNpmPackageCheckerRc, - node_and_npm_resolver: Option>, + node_and_npm_resolver: + Option>, sloppy_imports_resolver: Option>, workspace_resolver: WorkspaceResolverRc, @@ -124,13 +127,17 @@ pub struct DenoResolver< } impl< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TSloppyImportResolverFs: SloppyImportResolverFs, - > DenoResolver + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, + > DenoResolver { pub fn new( - options: DenoResolverOptions, + options: DenoResolverOptions< + TIsBuiltInNodeModuleChecker, + TSloppyImportResolverFs, + TSys, + >, ) -> Self { Self { in_npm_pkg_checker: options.in_npm_pkg_checker, diff --git a/resolvers/deno/npm/byonm.rs b/resolvers/deno/npm/byonm.rs index 08d06f9cac..3056a70f61 100644 --- a/resolvers/deno/npm/byonm.rs +++ b/resolvers/deno/npm/byonm.rs @@ -9,8 +9,8 @@ use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonRc; use deno_path_util::url_to_file_path; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::Version; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageJsonLoadError; @@ -18,11 +18,14 @@ use node_resolver::errors::PackageNotFoundError; use node_resolver::InNpmPackageChecker; use node_resolver::NpmPackageFolderResolver; use node_resolver::PackageJsonResolverRc; +use sys_traits::FsCanonicalize; +use sys_traits::FsDirEntry; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; use thiserror::Error; use url::Url; -use crate::fs::DenoResolverFs; - use super::local::normalize_pkg_name_for_node_modules_deno_folder; use super::CliNpmReqResolver; use super::ResolvePkgFolderFromDenoReqError; @@ -30,7 +33,7 @@ use super::ResolvePkgFolderFromDenoReqError; #[derive(Debug, Error)] pub enum ByonmResolvePkgFolderFromDenoReqError { #[error("Could not find \"{}\" in a node_modules folder. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", .0)] - MissingAlias(String), + MissingAlias(StackString), #[error(transparent)] PackageJson(#[from] PackageJsonLoadError), #[error("Could not find a matching package for 'npm:{}' in the node_modules directory. Ensure you have all your JSR and npm dependencies listed in your deno.json or package.json, then run `deno install`. Alternatively, turn on auto-install by specifying `\"nodeModulesDir\": \"auto\"` in your deno.json file.", .0)] @@ -39,44 +42,45 @@ pub enum ByonmResolvePkgFolderFromDenoReqError { Io(#[from] std::io::Error), } -pub struct ByonmNpmResolverCreateOptions< - Fs: DenoResolverFs, - TEnv: NodeResolverEnv, -> { +pub struct ByonmNpmResolverCreateOptions { // todo(dsherret): investigate removing this pub root_node_modules_dir: Option, - pub fs: Fs, - pub pkg_json_resolver: PackageJsonResolverRc, + pub sys: TSys, + pub pkg_json_resolver: PackageJsonResolverRc, } #[allow(clippy::disallowed_types)] -pub type ByonmNpmResolverRc = - crate::sync::MaybeArc>; +pub type ByonmNpmResolverRc = + crate::sync::MaybeArc>; #[derive(Debug)] -pub struct ByonmNpmResolver { - fs: Fs, - pkg_json_resolver: PackageJsonResolverRc, +pub struct ByonmNpmResolver< + TSys: FsCanonicalize + FsRead + FsMetadata + FsReadDir, +> { + sys: TSys, + pkg_json_resolver: PackageJsonResolverRc, root_node_modules_dir: Option, } -impl Clone - for ByonmNpmResolver +impl Clone + for ByonmNpmResolver { fn clone(&self) -> Self { Self { - fs: self.fs.clone(), + sys: self.sys.clone(), pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.root_node_modules_dir.clone(), } } } -impl ByonmNpmResolver { - pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { +impl + ByonmNpmResolver +{ + pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { Self { root_node_modules_dir: options.root_node_modules_dir, - fs: options.fs, + sys: options.sys, pkg_json_resolver: options.pkg_json_resolver, } } @@ -128,19 +132,20 @@ impl ByonmNpmResolver { req: &PackageReq, referrer: &Url, ) -> Result { - fn node_resolve_dir( - fs: &Fs, + fn node_resolve_dir( + sys: &TSys, alias: &str, start_dir: &Path, ) -> std::io::Result> { for ancestor in start_dir.ancestors() { let node_modules_folder = ancestor.join("node_modules"); let sub_dir = join_package_name(&node_modules_folder, alias); - if fs.is_dir_sync(&sub_dir) { - return Ok(Some(deno_path_util::canonicalize_path_maybe_not_exists( - &sub_dir, - &|path| fs.realpath_sync(path), - )?)); + if sys.fs_is_dir_no_err(&sub_dir) { + return Ok(Some( + deno_path_util::fs::canonicalize_path_maybe_not_exists( + sys, &sub_dir, + )?, + )); } } Ok(None) @@ -153,7 +158,7 @@ impl ByonmNpmResolver { Some((pkg_json, alias)) => { // now try node resolution if let Some(resolved) = - node_resolve_dir(&self.fs, &alias, pkg_json.dir_path())? + node_resolve_dir(&self.sys, &alias, pkg_json.dir_path())? { return Ok(resolved); } @@ -177,16 +182,14 @@ impl ByonmNpmResolver { &self, req: &PackageReq, referrer: &Url, - ) -> Result, PackageJsonLoadError> { + ) -> Result, PackageJsonLoadError> { fn resolve_alias_from_pkg_json( req: &PackageReq, pkg_json: &PackageJson, - ) -> Option { + ) -> Option { let deps = pkg_json.resolve_local_package_json_deps(); - for (key, value) in deps - .dependencies - .into_iter() - .chain(deps.dev_dependencies.into_iter()) + for (key, value) in + deps.dependencies.iter().chain(deps.dev_dependencies.iter()) { if let Ok(value) = value { match value { @@ -194,12 +197,14 @@ impl ByonmNpmResolver { if dep_req.name == req.name && dep_req.version_req.intersects(&req.version_req) { - return Some(key); + return Some(key.clone()); } } PackageJsonDepValue::Workspace(_workspace) => { - if key == req.name && req.version_req.tag() == Some("workspace") { - return Some(key); + if key.as_str() == req.name + && req.version_req.tag() == Some("workspace") + { + return Some(key.clone()); } } } @@ -246,7 +251,7 @@ impl ByonmNpmResolver { if let Ok(Some(dep_pkg_json)) = self.load_pkg_json(&pkg_folder.join("package.json")) { - if dep_pkg_json.name.as_ref() == Some(&req.name) { + if dep_pkg_json.name.as_deref() == Some(req.name.as_str()) { let matches_req = dep_pkg_json .version .as_ref() @@ -297,7 +302,7 @@ impl ByonmNpmResolver { // now check if node_modules/.deno/ matches this constraint let root_node_modules_dir = self.root_node_modules_dir.as_ref()?; let node_modules_deno_dir = root_node_modules_dir.join(".deno"); - let Ok(entries) = self.fs.read_dir_sync(&node_modules_deno_dir) else { + let Ok(entries) = self.sys.fs_read_dir(&node_modules_deno_dir) else { return None; }; let search_prefix = format!( @@ -310,10 +315,17 @@ impl ByonmNpmResolver { // - @denotest+add@1.0.0 // - @denotest+add@1.0.0_1 for entry in entries { - if !entry.is_directory { + let Ok(entry) = entry else { + continue; + }; + let Ok(file_type) = entry.file_type() else { + continue; + }; + if !file_type.is_dir() { continue; } - let Some(version_and_copy_idx) = entry.name.strip_prefix(&search_prefix) + let entry_name = entry.file_name().to_string_lossy().into_owned(); + let Some(version_and_copy_idx) = entry_name.strip_prefix(&search_prefix) else { continue; }; @@ -326,8 +338,8 @@ impl ByonmNpmResolver { }; if let Some(tag) = req.version_req.tag() { let initialized_file = - node_modules_deno_dir.join(&entry.name).join(".initialized"); - let Ok(contents) = self.fs.read_to_string_lossy(&initialized_file) + node_modules_deno_dir.join(&entry_name).join(".initialized"); + let Ok(contents) = self.sys.fs_read_to_string_lossy(&initialized_file) else { continue; }; @@ -335,19 +347,19 @@ impl ByonmNpmResolver { if tags.any(|t| t == tag) { if let Some((best_version_version, _)) = &best_version { if version > *best_version_version { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } else { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } } else if req.version_req.matches(&version) { if let Some((best_version_version, _)) = &best_version { if version > *best_version_version { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } else { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } } @@ -362,9 +374,14 @@ impl ByonmNpmResolver { } impl< - Fs: DenoResolverFs + Send + Sync + std::fmt::Debug, - TEnv: NodeResolverEnv, - > CliNpmReqResolver for ByonmNpmResolver + Sys: FsCanonicalize + + FsMetadata + + FsRead + + FsReadDir + + Send + + Sync + + std::fmt::Debug, + > CliNpmReqResolver for ByonmNpmResolver { fn resolve_pkg_folder_from_deno_module_req( &self, @@ -379,17 +396,22 @@ impl< } impl< - Fs: DenoResolverFs + Send + Sync + std::fmt::Debug, - TEnv: NodeResolverEnv, - > NpmPackageFolderResolver for ByonmNpmResolver + Sys: FsCanonicalize + + FsMetadata + + FsRead + + FsReadDir + + Send + + Sync + + std::fmt::Debug, + > NpmPackageFolderResolver for ByonmNpmResolver { fn resolve_package_folder_from_package( &self, name: &str, referrer: &Url, ) -> Result { - fn inner( - fs: &Fs, + fn inner( + sys: &TSys, name: &str, referrer: &Url, ) -> Result { @@ -406,7 +428,7 @@ impl< }; let sub_dir = join_package_name(&node_modules_folder, name); - if fs.is_dir_sync(&sub_dir) { + if sys.fs_is_dir_no_err(&sub_dir) { return Ok(sub_dir); } } @@ -422,8 +444,8 @@ impl< ) } - let path = inner(&self.fs, name, referrer)?; - self.fs.realpath_sync(&path).map_err(|err| { + let path = inner(&self.sys, name, referrer)?; + self.sys.fs_canonicalize(&path).map_err(|err| { PackageFolderResolveIoError { package_name: name.to_string(), referrer: referrer.clone(), diff --git a/resolvers/deno/npm/mod.rs b/resolvers/deno/npm/mod.rs index 64ec86fe3f..082940eb34 100644 --- a/resolvers/deno/npm/mod.rs +++ b/resolvers/deno/npm/mod.rs @@ -6,7 +6,6 @@ use std::path::PathBuf; use boxed_error::Boxed; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::NodeResolveError; use node_resolver::errors::NodeResolveErrorKind; use node_resolver::errors::PackageFolderResolveErrorKind; @@ -15,15 +14,18 @@ use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::PackageResolveErrorKind; use node_resolver::errors::PackageSubpathResolveError; use node_resolver::InNpmPackageCheckerRc; +use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NodeResolution; use node_resolver::NodeResolutionKind; use node_resolver::NodeResolverRc; use node_resolver::ResolutionMode; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; use thiserror::Error; use url::Url; -use crate::fs::DenoResolverFs; - pub use byonm::ByonmInNpmPackageChecker; pub use byonm::ByonmNpmResolver; pub use byonm::ByonmNpmResolverCreateOptions; @@ -95,40 +97,46 @@ pub trait CliNpmReqResolver: Debug + Send + Sync { } pub struct NpmReqResolverOptions< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { /// The resolver when "bring your own node_modules" is enabled where Deno /// does not setup the node_modules directories automatically, but instead /// uses what already exists on the file system. - pub byonm_resolver: Option>, - pub fs: Fs, + pub byonm_resolver: Option>, pub in_npm_pkg_checker: InNpmPackageCheckerRc, - pub node_resolver: NodeResolverRc, + pub node_resolver: NodeResolverRc, pub npm_req_resolver: CliNpmReqResolverRc, + pub sys: TSys, } #[allow(clippy::disallowed_types)] -pub type NpmReqResolverRc = - crate::sync::MaybeArc>; +pub type NpmReqResolverRc = + crate::sync::MaybeArc>; #[derive(Debug)] -pub struct NpmReqResolver -{ - byonm_resolver: Option>, - fs: Fs, +pub struct NpmReqResolver< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, +> { + byonm_resolver: Option>, + sys: TSys, in_npm_pkg_checker: InNpmPackageCheckerRc, - node_resolver: NodeResolverRc, + node_resolver: NodeResolverRc, npm_resolver: CliNpmReqResolverRc, } -impl - NpmReqResolver +impl< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, + > NpmReqResolver { - pub fn new(options: NpmReqResolverOptions) -> Self { + pub fn new( + options: NpmReqResolverOptions, + ) -> Self { Self { byonm_resolver: options.byonm_resolver, - fs: options.fs, + sys: options.sys, in_npm_pkg_checker: options.in_npm_pkg_checker, node_resolver: options.node_resolver, npm_resolver: options.npm_req_resolver, @@ -175,7 +183,7 @@ impl Err(err) => { if self.byonm_resolver.is_some() { let package_json_path = package_folder.join("package.json"); - if !self.fs.exists_sync(&package_json_path) { + if !self.sys.fs_exists_no_err(&package_json_path) { return Err( MissingPackageNodeModulesFolderError { package_json_path }.into(), ); diff --git a/resolvers/deno/sync.rs b/resolvers/deno/sync.rs index 6e62336901..ebcf8509d5 100644 --- a/resolvers/deno/sync.rs +++ b/resolvers/deno/sync.rs @@ -13,21 +13,35 @@ mod inner { #[cfg(not(feature = "sync"))] mod inner { + use std::cell::Ref; + use std::cell::RefCell; + use std::collections::HashMap; + use std::hash::BuildHasher; + use std::hash::Hash; use std::hash::RandomState; pub use std::rc::Rc as MaybeArc; // Wrapper struct that exposes a subset of `DashMap` API. - #[derive(Default)] - struct MaybeDashMap(RefCell>); + #[derive(Debug)] + pub struct MaybeDashMap(RefCell>); - impl MaybeDashMap { - pub fn get(&'a self, key: &K) -> Option<&'a V> { - let inner = self.0.borrow(); - inner.get(key) + impl Default for MaybeDashMap + where + K: Eq + Hash, + S: Default + BuildHasher + Clone, + { + fn default() -> Self { + Self(RefCell::new(Default::default())) + } + } + + impl MaybeDashMap { + pub fn get<'a>(&'a self, key: &K) -> Option> { + Ref::filter_map(self.0.borrow(), |map| map.get(key)).ok() } pub fn insert(&self, key: K, value: V) -> Option { - let inner = self.0.borrow_mut(); + let mut inner = self.0.borrow_mut(); inner.insert(key, value) } } diff --git a/resolvers/node/Cargo.toml b/resolvers/node/Cargo.toml index e175bcfafa..1e35c0a355 100644 --- a/resolvers/node/Cargo.toml +++ b/resolvers/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "node_resolver" -version = "0.21.0" +version = "0.22.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -29,6 +29,7 @@ once_cell.workspace = true path-clean = "=0.1.0" regex.workspace = true serde_json.workspace = true +sys_traits.workspace = true thiserror.workspace = true tokio.workspace = true url.workspace = true diff --git a/resolvers/node/analyze.rs b/resolvers/node/analyze.rs index a444f4d923..2024e6a1e8 100644 --- a/resolvers/node/analyze.rs +++ b/resolvers/node/analyze.rs @@ -16,11 +16,14 @@ use once_cell::sync::Lazy; use anyhow::Context; use anyhow::Error as AnyError; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsRead; use url::Url; -use crate::env::NodeResolverEnv; use crate::npm::InNpmPackageCheckerRc; use crate::resolution::NodeResolverRc; +use crate::IsBuiltInNodeModuleChecker; use crate::NodeResolutionKind; use crate::NpmPackageFolderResolverRc; use crate::PackageJsonResolverRc; @@ -60,34 +63,38 @@ pub trait CjsCodeAnalyzer { pub struct NodeCodeTranslator< TCjsCodeAnalyzer: CjsCodeAnalyzer, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, > { cjs_code_analyzer: TCjsCodeAnalyzer, - env: TNodeResolverEnv, in_npm_pkg_checker: InNpmPackageCheckerRc, - node_resolver: NodeResolverRc, + node_resolver: NodeResolverRc, npm_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, } -impl - NodeCodeTranslator +impl< + TCjsCodeAnalyzer: CjsCodeAnalyzer, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, + > NodeCodeTranslator { pub fn new( cjs_code_analyzer: TCjsCodeAnalyzer, - env: TNodeResolverEnv, in_npm_pkg_checker: InNpmPackageCheckerRc, - node_resolver: NodeResolverRc, + node_resolver: NodeResolverRc, npm_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, ) -> Self { Self { cjs_code_analyzer, - env, in_npm_pkg_checker, node_resolver, npm_resolver, pkg_json_resolver, + sys, } } @@ -162,7 +169,7 @@ impl add_export( &mut source, export, - &format!("mod[\"{}\"]", escape_for_double_quote_string(export)), + &format!("mod[{}]", to_double_quote_string(export)), &mut temp_var_count, ); } @@ -366,7 +373,7 @@ impl // old school if package_subpath != "." { let d = module_dir.join(package_subpath); - if self.env.is_dir_sync(&d) { + if self.sys.fs_is_dir_no_err(&d) { // subdir might have a package.json that specifies the entrypoint let package_json_path = d.join("package.json"); let maybe_package_json = self @@ -423,13 +430,13 @@ impl referrer: &Path, ) -> Result { let p = p.clean(); - if self.env.exists_sync(&p) { + if self.sys.fs_exists_no_err(&p) { let file_name = p.file_name().unwrap(); let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if self.env.is_file_sync(&p_js) { + if self.sys.fs_is_file_no_err(&p_js) { return Ok(p_js); - } else if self.env.is_dir_sync(&p) { + } else if self.sys.fs_is_dir_no_err(&p) { return Ok(p.join("index.js")); } else { return Ok(p); @@ -438,14 +445,14 @@ impl { let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if self.env.is_file_sync(&p_js) { + if self.sys.fs_is_file_no_err(&p_js) { return Ok(p_js); } } { let p_json = p.with_file_name(format!("{}.json", file_name.to_str().unwrap())); - if self.env.is_file_sync(&p_json) { + if self.sys.fs_is_file_no_err(&p_json) { return Ok(p_json); } } @@ -561,8 +568,8 @@ fn add_export( "const __deno_export_{temp_var_count}__ = {initializer};" )); source.push(format!( - "export {{ __deno_export_{temp_var_count}__ as \"{}\" }};", - escape_for_double_quote_string(name) + "export {{ __deno_export_{temp_var_count}__ as {} }};", + to_double_quote_string(name) )); } else { source.push(format!("export const {name} = {initializer};")); @@ -620,14 +627,9 @@ fn not_found(path: &str, referrer: &Path) -> AnyError { std::io::Error::new(std::io::ErrorKind::NotFound, msg).into() } -fn escape_for_double_quote_string(text: &str) -> Cow { - // this should be rare, so doing a scan first before allocating is ok - if text.chars().any(|c| matches!(c, '"' | '\\')) { - // don't bother making this more complex for perf because it's rare - Cow::Owned(text.replace('\\', "\\\\").replace('"', "\\\"")) - } else { - Cow::Borrowed(text) - } +fn to_double_quote_string(text: &str) -> String { + // serde can handle this for us + serde_json::to_string(text).unwrap() } #[cfg(test)] @@ -665,4 +667,13 @@ mod tests { Some(("@some-package/core".to_string(), "./actions".to_string())) ); } + + #[test] + fn test_to_double_quote_string() { + assert_eq!(to_double_quote_string("test"), "\"test\""); + assert_eq!( + to_double_quote_string("\r\n\t\"test"), + "\"\\r\\n\\t\\\"test\"" + ); + } } diff --git a/resolvers/node/env.rs b/resolvers/node/env.rs deleted file mode 100644 index b520ece0f8..0000000000 --- a/resolvers/node/env.rs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::path::Path; -use std::path::PathBuf; - -use crate::sync::MaybeSend; -use crate::sync::MaybeSync; - -pub struct NodeResolverFsStat { - pub is_file: bool, - pub is_dir: bool, - pub is_symlink: bool, -} - -pub trait NodeResolverEnv: std::fmt::Debug + MaybeSend + MaybeSync { - fn is_builtin_node_module(&self, specifier: &str) -> bool; - - fn realpath_sync(&self, path: &Path) -> std::io::Result; - - fn stat_sync(&self, path: &Path) -> std::io::Result; - - fn exists_sync(&self, path: &Path) -> bool; - - fn is_file_sync(&self, path: &Path) -> bool { - self - .stat_sync(path) - .map(|stat| stat.is_file) - .unwrap_or(false) - } - - fn is_dir_sync(&self, path: &Path) -> bool { - self - .stat_sync(path) - .map(|stat| stat.is_dir) - .unwrap_or(false) - } - - fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs; -} diff --git a/resolvers/node/errors.rs b/resolvers/node/errors.rs index 600a365a8f..26b1a1d84a 100644 --- a/resolvers/node/errors.rs +++ b/resolvers/node/errors.rs @@ -320,7 +320,6 @@ impl NodeJsErrorCoded for PackageJsonLoadError { impl NodeJsErrorCoded for ClosestPkgJsonError { fn code(&self) -> NodeJsErrorCode { match self.as_kind() { - ClosestPkgJsonErrorKind::CanonicalizingDir(e) => e.code(), ClosestPkgJsonErrorKind::Load(e) => e.code(), } } @@ -331,26 +330,10 @@ pub struct ClosestPkgJsonError(pub Box); #[derive(Debug, Error)] pub enum ClosestPkgJsonErrorKind { - #[error(transparent)] - CanonicalizingDir(#[from] CanonicalizingPkgJsonDirError), #[error(transparent)] Load(#[from] PackageJsonLoadError), } -#[derive(Debug, Error)] -#[error("[{}] Failed canonicalizing package.json directory '{}'.", self.code(), dir_path.display())] -pub struct CanonicalizingPkgJsonDirError { - pub dir_path: PathBuf, - #[source] - pub source: std::io::Error, -} - -impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - // todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError #[derive(Debug, Error)] #[error( diff --git a/resolvers/node/lib.rs b/resolvers/node/lib.rs index c73c395dfc..075f819ebb 100644 --- a/resolvers/node/lib.rs +++ b/resolvers/node/lib.rs @@ -4,7 +4,6 @@ #![deny(clippy::print_stdout)] pub mod analyze; -pub mod env; pub mod errors; mod npm; mod package_json; @@ -23,6 +22,7 @@ pub use package_json::PackageJsonThreadLocalCache; pub use path::PathClean; pub use resolution::parse_npm_pkg_name; pub use resolution::resolve_specifier_into_node_modules; +pub use resolution::IsBuiltInNodeModuleChecker; pub use resolution::NodeResolution; pub use resolution::NodeResolutionKind; pub use resolution::NodeResolver; diff --git a/resolvers/node/package_json.rs b/resolvers/node/package_json.rs index e3793af84a..ebbe099014 100644 --- a/resolvers/node/package_json.rs +++ b/resolvers/node/package_json.rs @@ -1,17 +1,16 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_package_json::PackageJson; -use deno_package_json::PackageJsonRc; -use deno_path_util::strip_unc_prefix; use std::cell::RefCell; use std::collections::HashMap; use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; + +use deno_package_json::PackageJson; +use deno_package_json::PackageJsonRc; +use sys_traits::FsRead; use url::Url; -use crate::env::NodeResolverEnv; -use crate::errors::CanonicalizingPkgJsonDirError; use crate::errors::ClosestPkgJsonError; use crate::errors::PackageJsonLoadError; @@ -40,17 +39,17 @@ impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache { } #[allow(clippy::disallowed_types)] -pub type PackageJsonResolverRc = - crate::sync::MaybeArc>; +pub type PackageJsonResolverRc = + crate::sync::MaybeArc>; #[derive(Debug)] -pub struct PackageJsonResolver { - env: TEnv, +pub struct PackageJsonResolver { + sys: TSys, } -impl PackageJsonResolver { - pub fn new(env: TEnv) -> Self { - Self { env } +impl PackageJsonResolver { + pub fn new(sys: TSys) -> Self { + Self { sys } } pub fn get_closest_package_json( @@ -67,37 +66,8 @@ impl PackageJsonResolver { &self, file_path: &Path, ) -> Result, ClosestPkgJsonError> { - // we use this for deno compile using byonm because the script paths - // won't be in virtual file system, but the package.json paths will be - fn canonicalize_first_ancestor_exists( - dir_path: &Path, - env: &TEnv, - ) -> Result, std::io::Error> { - for ancestor in dir_path.ancestors() { - match env.realpath_sync(ancestor) { - Ok(dir_path) => return Ok(Some(dir_path)), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => { - // keep searching - } - Err(err) => return Err(err), - } - } - Ok(None) - } - let parent_dir = file_path.parent().unwrap(); - let Some(start_dir) = canonicalize_first_ancestor_exists( - parent_dir, &self.env, - ) - .map_err(|source| CanonicalizingPkgJsonDirError { - dir_path: parent_dir.to_path_buf(), - source, - })? - else { - return Ok(None); - }; - let start_dir = strip_unc_prefix(start_dir); - for current_dir in start_dir.ancestors() { + for current_dir in parent_dir.ancestors() { let package_json_path = current_dir.join("package.json"); if let Some(pkg_json) = self.load_package_json(&package_json_path)? { return Ok(Some(pkg_json)); @@ -112,9 +82,9 @@ impl PackageJsonResolver { path: &Path, ) -> Result, PackageJsonLoadError> { let result = PackageJson::load_from_path( - path, - self.env.pkg_json_fs(), + &self.sys, Some(&PackageJsonThreadLocalCache), + path, ); match result { Ok(pkg_json) => Ok(Some(pkg_json)), diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs index 5f87698cd6..95631daf39 100644 --- a/resolvers/node/resolution.rs +++ b/resolvers/node/resolution.rs @@ -9,9 +9,13 @@ use anyhow::Error as AnyError; use deno_path_util::url_from_file_path; use serde_json::Map; use serde_json::Value; +use sys_traits::FileType; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; +use sys_traits::FsRead; use url::Url; -use crate::env::NodeResolverEnv; use crate::errors; use crate::errors::DataUrlReferrerError; use crate::errors::FinalizeResolutionError; @@ -98,29 +102,44 @@ impl NodeResolution { } } -#[allow(clippy::disallowed_types)] -pub type NodeResolverRc = crate::sync::MaybeArc>; - -#[derive(Debug)] -pub struct NodeResolver { - env: TEnv, - in_npm_pkg_checker: InNpmPackageCheckerRc, - npm_pkg_folder_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, +pub trait IsBuiltInNodeModuleChecker: std::fmt::Debug { + fn is_builtin_node_module(&self, specifier: &str) -> bool; } -impl NodeResolver { +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = + crate::sync::MaybeArc>; + +#[derive(Debug)] +pub struct NodeResolver< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, +> { + in_npm_pkg_checker: InNpmPackageCheckerRc, + is_built_in_node_module_checker: TIsBuiltInNodeModuleChecker, + npm_pkg_folder_resolver: NpmPackageFolderResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, +} + +impl< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, + > NodeResolver +{ pub fn new( - env: TEnv, in_npm_pkg_checker: InNpmPackageCheckerRc, + is_built_in_node_module_checker: TIsBuiltInNodeModuleChecker, npm_pkg_folder_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, ) -> Self { Self { - env, in_npm_pkg_checker, + is_built_in_node_module_checker, npm_pkg_folder_resolver, pkg_json_resolver, + sys, } } @@ -140,7 +159,10 @@ impl NodeResolver { // Note: if we are here, then the referrer is an esm module // TODO(bartlomieju): skipped "policy" part as we don't plan to support it - if self.env.is_builtin_node_module(specifier) { + if self + .is_built_in_node_module_checker + .is_builtin_node_module(specifier) + { return Ok(NodeResolution::BuiltIn(specifier.to_string())); } @@ -282,32 +304,25 @@ impl NodeResolver { p_str.to_string() }; - let (is_dir, is_file) = if let Ok(stats) = self.env.stat_sync(Path::new(&p)) - { - (stats.is_dir, stats.is_file) - } else { - (false, false) - }; - if is_dir { - return Err( + let maybe_file_type = self.sys.fs_metadata(p).map(|m| m.file_type()); + match maybe_file_type { + Ok(FileType::Dir) => Err( UnsupportedDirImportError { dir_url: resolved.clone(), maybe_referrer: maybe_referrer.map(ToOwned::to_owned), } .into(), - ); - } else if !is_file { - return Err( + ), + Ok(FileType::File) => Ok(resolved), + _ => Err( ModuleNotFoundError { specifier: resolved, maybe_referrer: maybe_referrer.map(ToOwned::to_owned), typ: "module", } .into(), - ); + ), } - - Ok(resolved) } pub fn resolve_package_subpath_from_deno_module( @@ -318,6 +333,8 @@ impl NodeResolver { resolution_mode: ResolutionMode, resolution_kind: NodeResolutionKind, ) -> Result { + // todo(dsherret): don't allocate a string here (maybe use an + // enum that says the subpath is not prefixed with a ./) let package_subpath = package_subpath .map(|s| format!("./{s}")) .unwrap_or_else(|| ".".to_string()); @@ -395,8 +412,8 @@ impl NodeResolver { maybe_referrer: Option<&Url>, resolution_mode: ResolutionMode, ) -> Result { - fn probe_extensions( - fs: &TEnv, + fn probe_extensions( + sys: &TSys, path: &Path, lowercase_path: &str, resolution_mode: ResolutionMode, @@ -405,20 +422,20 @@ impl NodeResolver { let mut searched_for_d_cts = false; if lowercase_path.ends_with(".mjs") { let d_mts_path = with_known_extension(path, "d.mts"); - if fs.exists_sync(&d_mts_path) { + if sys.fs_exists_no_err(&d_mts_path) { return Some(d_mts_path); } searched_for_d_mts = true; } else if lowercase_path.ends_with(".cjs") { let d_cts_path = with_known_extension(path, "d.cts"); - if fs.exists_sync(&d_cts_path) { + if sys.fs_exists_no_err(&d_cts_path) { return Some(d_cts_path); } searched_for_d_cts = true; } let dts_path = with_known_extension(path, "d.ts"); - if fs.exists_sync(&dts_path) { + if sys.fs_exists_no_err(&dts_path) { return Some(dts_path); } @@ -432,7 +449,7 @@ impl NodeResolver { _ => None, // already searched above }; if let Some(specific_dts_path) = specific_dts_path { - if fs.exists_sync(&specific_dts_path) { + if sys.fs_exists_no_err(&specific_dts_path) { return Some(specific_dts_path); } } @@ -447,11 +464,11 @@ impl NodeResolver { return Ok(url_from_file_path(path).unwrap()); } if let Some(path) = - probe_extensions(&self.env, path, &lowercase_path, resolution_mode) + probe_extensions(&self.sys, path, &lowercase_path, resolution_mode) { return Ok(url_from_file_path(&path).unwrap()); } - if self.env.is_dir_sync(path) { + if self.sys.fs_is_dir_no_err(path) { let resolution_result = self.resolve_package_dir_subpath( path, /* sub path */ ".", @@ -465,7 +482,7 @@ impl NodeResolver { } let index_path = path.join("index.js"); if let Some(path) = probe_extensions( - &self.env, + &self.sys, &index_path, &index_path.to_string_lossy().to_lowercase(), resolution_mode, @@ -669,7 +686,10 @@ impl NodeResolver { return match result { Ok(url) => Ok(url), Err(err) => { - if self.env.is_builtin_node_module(target) { + if self + .is_built_in_node_module_checker + .is_builtin_node_module(target) + { Ok(Url::parse(&format!("node:{}", target)).unwrap()) } else { Err(err) @@ -1351,7 +1371,7 @@ impl NodeResolver { if let Some(main) = maybe_main { let guess = package_json.path.parent().unwrap().join(main).clean(); - if self.env.is_file_sync(&guess) { + if self.sys.fs_is_file_no_err(&guess) { return Ok(url_from_file_path(&guess).unwrap()); } @@ -1380,7 +1400,7 @@ impl NodeResolver { .unwrap() .join(format!("{main}{ending}")) .clean(); - if self.env.is_file_sync(&guess) { + if self.sys.fs_is_file_no_err(&guess) { // TODO(bartlomieju): emitLegacyIndexDeprecation() return Ok(url_from_file_path(&guess).unwrap()); } @@ -1415,7 +1435,7 @@ impl NodeResolver { }; for index_file_name in index_file_names { let guess = directory.join(index_file_name).clean(); - if self.env.is_file_sync(&guess) { + if self.sys.fs_is_file_no_err(&guess) { // TODO(bartlomieju): emitLegacyIndexDeprecation() return Ok(url_from_file_path(&guess).unwrap()); } @@ -1452,9 +1472,7 @@ impl NodeResolver { { // Specifiers in the node_modules directory are canonicalized // so canoncalize then check if it's in the node_modules directory. - let specifier = resolve_specifier_into_node_modules(specifier, &|path| { - self.env.realpath_sync(path) - }); + let specifier = resolve_specifier_into_node_modules(&self.sys, specifier); return Some(specifier); } @@ -1715,16 +1733,15 @@ pub fn parse_npm_pkg_name( /// not be fully resolved at the time deno_graph is analyzing it /// because the node_modules folder might not exist at that time. pub fn resolve_specifier_into_node_modules( + sys: &impl FsCanonicalize, specifier: &Url, - canonicalize: &impl Fn(&Path) -> std::io::Result, ) -> Url { deno_path_util::url_to_file_path(specifier) .ok() // this path might not exist at the time the graph is being created // because the node_modules folder might not yet exist .and_then(|path| { - deno_path_util::canonicalize_path_maybe_not_exists(&path, canonicalize) - .ok() + deno_path_util::fs::canonicalize_path_maybe_not_exists(sys, &path).ok() }) .and_then(|path| deno_path_util::url_from_file_path(&path).ok()) .unwrap_or_else(|| specifier.clone()) diff --git a/resolvers/npm_cache/Cargo.toml b/resolvers/npm_cache/Cargo.toml index 1cc7237025..48d0a32437 100644 --- a/resolvers/npm_cache/Cargo.toml +++ b/resolvers/npm_cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_npm_cache" -version = "0.2.0" +version = "0.3.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -23,7 +23,9 @@ async-trait.workspace = true base64.workspace = true boxed_error.workspace = true deno_cache_dir.workspace = true +deno_error.workspace = true deno_npm.workspace = true +deno_path_util.workspace = true deno_semver.workspace = true deno_unsync = { workspace = true, features = ["tokio"] } faster-hex.workspace = true @@ -36,6 +38,7 @@ percent-encoding.workspace = true rand.workspace = true ring.workspace = true serde_json.workspace = true +sys_traits.workspace = true tar.workspace = true tempfile = "3.4.0" thiserror.workspace = true diff --git a/resolvers/npm_cache/fs_util.rs b/resolvers/npm_cache/fs_util.rs new file mode 100644 index 0000000000..ed123f085c --- /dev/null +++ b/resolvers/npm_cache/fs_util.rs @@ -0,0 +1,99 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use anyhow::Context; +use anyhow::Error as AnyError; +use std::io::ErrorKind; +use std::path::Path; +use std::time::Duration; +use sys_traits::FsCreateDirAll; +use sys_traits::FsDirEntry; +use sys_traits::FsHardLink; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::ThreadSleep; + +/// Hardlinks the files in one directory to another directory. +/// +/// Note: Does not handle symlinks. +pub fn hard_link_dir_recursive< + TSys: FsCreateDirAll + FsHardLink + FsReadDir + FsRemoveFile + ThreadSleep, +>( + sys: &TSys, + from: &Path, + to: &Path, +) -> Result<(), AnyError> { + sys + .fs_create_dir_all(to) + .with_context(|| format!("Creating {}", to.display()))?; + let read_dir = sys + .fs_read_dir(from) + .with_context(|| format!("Reading {}", from.display()))?; + + for entry in read_dir { + let entry = entry?; + let file_type = entry.file_type()?; + let new_from = from.join(entry.file_name()); + let new_to = to.join(entry.file_name()); + + if file_type.is_dir() { + hard_link_dir_recursive(sys, &new_from, &new_to).with_context(|| { + format!("Dir {} to {}", new_from.display(), new_to.display()) + })?; + } else if file_type.is_file() { + // note: chance for race conditions here between attempting to create, + // then removing, then attempting to create. There doesn't seem to be + // a way to hard link with overwriting in Rust, but maybe there is some + // way with platform specific code. The workaround here is to handle + // scenarios where something else might create or remove files. + if let Err(err) = sys.fs_hard_link(&new_from, &new_to) { + if err.kind() == ErrorKind::AlreadyExists { + if let Err(err) = sys.fs_remove_file(&new_to) { + if err.kind() == ErrorKind::NotFound { + // Assume another process/thread created this hard link to the file we are wanting + // to remove then sleep a little bit to let the other process/thread move ahead + // faster to reduce contention. + sys.thread_sleep(Duration::from_millis(10)); + } else { + return Err(err).with_context(|| { + format!( + "Removing file to hard link {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + + // Always attempt to recreate the hardlink. In contention scenarios, the other process + // might have been killed or exited after removing the file, but before creating the hardlink + if let Err(err) = sys.fs_hard_link(&new_from, &new_to) { + // Assume another process/thread created this hard link to the file we are wanting + // to now create then sleep a little bit to let the other process/thread move ahead + // faster to reduce contention. + if err.kind() == ErrorKind::AlreadyExists { + sys.thread_sleep(Duration::from_millis(10)); + } else { + return Err(err).with_context(|| { + format!( + "Hard linking {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + } else { + return Err(err).with_context(|| { + format!( + "Hard linking {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + } + } + + Ok(()) +} diff --git a/resolvers/npm_cache/lib.rs b/resolvers/npm_cache/lib.rs index 9f5424dc46..e681fa71ac 100644 --- a/resolvers/npm_cache/lib.rs +++ b/resolvers/npm_cache/lib.rs @@ -9,23 +9,37 @@ use std::sync::Arc; use anyhow::bail; use anyhow::Context; use anyhow::Error as AnyError; +use deno_cache_dir::file_fetcher::CacheSetting; use deno_cache_dir::npm::NpmCacheDir; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::NpmPackageCacheFolderId; +use deno_path_util::fs::atomic_write_file_with_retries; use deno_semver::package::PackageNv; +use deno_semver::StackString; use deno_semver::Version; use http::HeaderName; use http::HeaderValue; use http::StatusCode; use parking_lot::Mutex; +use sys_traits::FsCreateDirAll; +use sys_traits::FsHardLink; +use sys_traits::FsMetadata; +use sys_traits::FsOpen; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::FsRename; +use sys_traits::SystemRandom; +use sys_traits::ThreadSleep; use url::Url; +mod fs_util; mod registry_info; mod remote; mod tarball; mod tarball_extract; +pub use fs_util::hard_link_dir_recursive; pub use registry_info::RegistryInfoProvider; pub use tarball::TarballCache; @@ -53,18 +67,7 @@ impl std::fmt::Display for DownloadError { } #[async_trait::async_trait(?Send)] -pub trait NpmCacheEnv: Send + Sync + 'static { - fn exists(&self, path: &Path) -> bool; - fn hard_link_dir_recursive( - &self, - from: &Path, - to: &Path, - ) -> Result<(), AnyError>; - fn atomic_write_file_with_retries( - &self, - file_path: &Path, - data: &[u8], - ) -> std::io::Result<()>; +pub trait NpmCacheHttpClient: Send + Sync + 'static { async fn download_with_retries_on_any_tokio_runtime( &self, url: Url, @@ -90,6 +93,27 @@ pub enum NpmCacheSetting { } impl NpmCacheSetting { + pub fn from_cache_setting(cache_setting: &CacheSetting) -> NpmCacheSetting { + match cache_setting { + CacheSetting::Only => NpmCacheSetting::Only, + CacheSetting::ReloadAll => NpmCacheSetting::ReloadAll, + CacheSetting::ReloadSome(values) => { + if values.iter().any(|v| v == "npm:") { + NpmCacheSetting::ReloadAll + } else { + NpmCacheSetting::ReloadSome { + npm_package_names: values + .iter() + .filter_map(|v| v.strip_prefix("npm:")) + .map(|n| n.to_string()) + .collect(), + } + } + } + CacheSetting::RespectHeaders => panic!("not supported"), + CacheSetting::Use => NpmCacheSetting::Use, + } + } pub fn should_use_for_npm_package(&self, package_name: &str) -> bool { match self { NpmCacheSetting::ReloadAll => false, @@ -103,27 +127,48 @@ impl NpmCacheSetting { /// Stores a single copy of npm packages in a cache. #[derive(Debug)] -pub struct NpmCache { - env: Arc, +pub struct NpmCache< + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom, +> { cache_dir: Arc, + sys: TSys, cache_setting: NpmCacheSetting, npmrc: Arc, previously_reloaded_packages: Mutex>, } -impl NpmCache { +impl< + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom, + > NpmCache +{ pub fn new( cache_dir: Arc, + sys: TSys, cache_setting: NpmCacheSetting, - env: Arc, npmrc: Arc, ) -> Self { Self { cache_dir, + sys, cache_setting, - env, - previously_reloaded_packages: Default::default(), npmrc, + previously_reloaded_packages: Default::default(), } } @@ -188,9 +233,11 @@ impl NpmCache { // it seems Windows does an "AccessDenied" error when moving a // directory with hard links, so that's why this solution is done with_folder_sync_lock(&folder_id.nv, &package_folder, || { - self - .env - .hard_link_dir_recursive(&original_package_folder, &package_folder) + hard_link_dir_recursive( + &self.sys, + &original_package_folder, + &package_folder, + ) })?; Ok(()) } @@ -238,7 +285,7 @@ impl NpmCache { .and_then(|cache_id| { Some(NpmPackageCacheFolderId { nv: PackageNv { - name: cache_id.name, + name: StackString::from_string(cache_id.name), version: Version::parse_from_npm(&cache_id.version).ok()?, }, copy_index: cache_id.copy_index, @@ -267,9 +314,12 @@ impl NpmCache { ) -> Result<(), AnyError> { let file_cache_path = self.get_registry_package_info_file_cache_path(name); let file_text = serde_json::to_string(&package_info)?; - self - .env - .atomic_write_file_with_retries(&file_cache_path, file_text.as_bytes())?; + atomic_write_file_with_retries( + &self.sys, + &file_cache_path, + file_text.as_bytes(), + 0o644, + )?; Ok(()) } @@ -281,6 +331,7 @@ impl NpmCache { const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock"; +// todo(dsherret): use `sys` here instead of `std::fs`. fn with_folder_sync_lock( package: &PackageNv, output_folder: &Path, diff --git a/resolvers/npm_cache/registry_info.rs b/resolvers/npm_cache/registry_info.rs index 543ddadc5a..57e188200d 100644 --- a/resolvers/npm_cache/registry_info.rs +++ b/resolvers/npm_cache/registry_info.rs @@ -18,16 +18,51 @@ use deno_unsync::sync::MultiRuntimeAsyncValueCreator; use futures::future::LocalBoxFuture; use futures::FutureExt; use parking_lot::Mutex; +use sys_traits::FsCreateDirAll; +use sys_traits::FsHardLink; +use sys_traits::FsMetadata; +use sys_traits::FsOpen; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::FsRename; +use sys_traits::SystemRandom; +use sys_traits::ThreadSleep; +use thiserror::Error; use url::Url; use crate::remote::maybe_auth_header_for_npm_registry; use crate::NpmCache; -use crate::NpmCacheEnv; +use crate::NpmCacheHttpClient; use crate::NpmCacheSetting; type LoadResult = Result>; type LoadFuture = LocalBoxFuture<'static, LoadResult>; +#[derive(Debug, Error)] +#[error(transparent)] +pub struct AnyhowJsError(pub AnyError); + +impl deno_error::JsErrorClass for AnyhowJsError { + fn get_class(&self) -> &'static str { + "generic" + } + + fn get_message(&self) -> std::borrow::Cow<'static, str> { + self.0.to_string().into() + } + + fn get_additional_properties( + &self, + ) -> Option< + Vec<( + std::borrow::Cow<'static, str>, + std::borrow::Cow<'static, str>, + )>, + > { + None + } +} + #[derive(Debug, Clone)] enum FutureResult { PackageNotExists, @@ -96,25 +131,54 @@ impl MemoryCache { /// /// This is shared amongst all the workers. #[derive(Debug)] -pub struct RegistryInfoProvider { +pub struct RegistryInfoProvider< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, +> { // todo(#27198): remove this - cache: Arc>, - env: Arc, + cache: Arc>, + http_client: Arc, npmrc: Arc, force_reload_flag: AtomicFlag, memory_cache: Mutex, previously_loaded_packages: Mutex>, } -impl RegistryInfoProvider { +impl< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, + > RegistryInfoProvider +{ pub fn new( - cache: Arc>, - env: Arc, + cache: Arc>, + http_client: Arc, npmrc: Arc, ) -> Self { Self { cache, - env, + http_client, npmrc, force_reload_flag: AtomicFlag::lowered(), memory_cache: Default::default(), @@ -144,7 +208,9 @@ impl RegistryInfoProvider { } } - pub fn as_npm_registry_api(self: &Arc) -> NpmRegistryApiAdapter { + pub fn as_npm_registry_api( + self: &Arc, + ) -> NpmRegistryApiAdapter { NpmRegistryApiAdapter(self.clone()) } @@ -157,9 +223,9 @@ impl RegistryInfoProvider { Ok(None) => Err(NpmRegistryPackageInfoLoadError::PackageNotExists { package_name: name.to_string(), }), - Err(err) => { - Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new(err))) - } + Err(err) => Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new( + AnyhowJsError(err), + ))), } } @@ -315,7 +381,7 @@ impl RegistryInfoProvider { downloader.previously_loaded_packages.lock().insert(name.to_string()); let maybe_bytes = downloader - .env + .http_client .download_with_retries_on_any_tokio_runtime( package_url, maybe_auth_header, @@ -352,12 +418,39 @@ impl RegistryInfoProvider { } } -pub struct NpmRegistryApiAdapter( - Arc>, -); +pub struct NpmRegistryApiAdapter< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, +>(Arc>); #[async_trait(?Send)] -impl NpmRegistryApi for NpmRegistryApiAdapter { +impl< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, + > NpmRegistryApi for NpmRegistryApiAdapter +{ async fn package_info( &self, name: &str, diff --git a/resolvers/npm_cache/tarball.rs b/resolvers/npm_cache/tarball.rs index 5c8e460fd6..3a7e9df8a9 100644 --- a/resolvers/npm_cache/tarball.rs +++ b/resolvers/npm_cache/tarball.rs @@ -15,13 +15,22 @@ use futures::future::LocalBoxFuture; use futures::FutureExt; use http::StatusCode; use parking_lot::Mutex; +use sys_traits::FsCreateDirAll; +use sys_traits::FsHardLink; +use sys_traits::FsMetadata; +use sys_traits::FsOpen; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::FsRename; +use sys_traits::SystemRandom; +use sys_traits::ThreadSleep; use url::Url; use crate::remote::maybe_auth_header_for_npm_registry; use crate::tarball_extract::verify_and_extract_tarball; use crate::tarball_extract::TarballExtractionMode; use crate::NpmCache; -use crate::NpmCacheEnv; +use crate::NpmCacheHttpClient; use crate::NpmCacheSetting; type LoadResult = Result<(), Arc>; @@ -42,22 +51,54 @@ enum MemoryCacheItem { /// /// This is shared amongst all the workers. #[derive(Debug)] -pub struct TarballCache { - cache: Arc>, - env: Arc, +pub struct TarballCache< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsRemoveFile + + FsReadDir + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, +> { + cache: Arc>, + http_client: Arc, + sys: TSys, npmrc: Arc, memory_cache: Mutex>, } -impl TarballCache { +impl< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsRemoveFile + + FsReadDir + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, + > TarballCache +{ pub fn new( - cache: Arc>, - env: Arc, + cache: Arc>, + http_client: Arc, + sys: TSys, npmrc: Arc, ) -> Self { Self { cache, - env, + http_client, + sys, npmrc, memory_cache: Default::default(), } @@ -131,7 +172,7 @@ impl TarballCache { let package_folder = tarball_cache.cache.package_folder_for_nv_and_url(&package_nv, registry_url); let should_use_cache = tarball_cache.cache.should_use_cache_for_package(&package_nv); - let package_folder_exists = tarball_cache.env.exists(&package_folder); + let package_folder_exists = tarball_cache.sys.fs_exists_no_err(&package_folder); if should_use_cache && package_folder_exists { return Ok(()); } else if tarball_cache.cache.cache_setting() == &NpmCacheSetting::Only { @@ -156,7 +197,7 @@ impl TarballCache { tarball_cache.npmrc.tarball_config(&tarball_uri); let maybe_auth_header = maybe_registry_config.and_then(|c| maybe_auth_header_for_npm_registry(c).ok()?); - let result = tarball_cache.env + let result = tarball_cache.http_client .download_with_retries_on_any_tokio_runtime(tarball_uri, maybe_auth_header) .await; let maybe_bytes = match result { diff --git a/resolvers/npm_cache/tarball_extract.rs b/resolvers/npm_cache/tarball_extract.rs index c4c614b35f..affe93eaa4 100644 --- a/resolvers/npm_cache/tarball_extract.rs +++ b/resolvers/npm_cache/tarball_extract.rs @@ -236,7 +236,7 @@ mod test { #[test] pub fn test_verify_tarball() { let package = PackageNv { - name: "package".to_string(), + name: "package".into(), version: Version::parse_from_npm("1.0.0").unwrap(), }; let actual_checksum = diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index cb12abb141..ca21547efc 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.191.0" +version = "0.192.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -122,6 +122,7 @@ same-file = "1.0.6" serde.workspace = true signal-hook = "0.3.17" signal-hook-registry = "1.4.0" +sys_traits.workspace = true tempfile.workspace = true thiserror.workspace = true tokio.workspace = true diff --git a/runtime/errors.rs b/runtime/errors.rs index 3f8e900851..01588c593b 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -1157,7 +1157,7 @@ mod node { WorkerThreadsFilenameError::UrlToPathString => "Error", WorkerThreadsFilenameError::UrlToPath => "Error", WorkerThreadsFilenameError::FileNotFound(_) => "Error", - WorkerThreadsFilenameError::Fs(e) => super::get_fs_error(e), + WorkerThreadsFilenameError::Fs(e) => super::get_io_error_class(e), } } @@ -1173,7 +1173,7 @@ mod node { | UrlConversion(_) | ReadModule(_) | PackageImportsResolve(_) => "Error", - Fs(e) | UnableToGetCwd(e) => super::get_fs_error(e), + Fs(e) | UnableToGetCwd(e) => super::get_io_error_class(e), } } diff --git a/runtime/examples/extension/main.rs b/runtime/examples/extension/main.rs index 1ff16ec83f..a4ac85bf5e 100644 --- a/runtime/examples/extension/main.rs +++ b/runtime/examples/extension/main.rs @@ -37,11 +37,12 @@ async fn main() -> Result<(), AnyError> { let main_module = ModuleSpecifier::from_file_path(js_path).unwrap(); eprintln!("Running {main_module}..."); let fs = Arc::new(RealFs); - let permission_desc_parser = - Arc::new(RuntimePermissionDescriptorParser::new(fs.clone())); + let permission_desc_parser = Arc::new( + RuntimePermissionDescriptorParser::new(sys_traits::impls::RealSys), + ); let mut worker = MainWorker::bootstrap_from_options( main_module.clone(), - WorkerServiceOptions { + WorkerServiceOptions:: { module_loader: Rc::new(FsModuleLoader), permissions: PermissionsContainer::allow_all(permission_desc_parser), blob_store: Default::default(), diff --git a/runtime/fmt_errors.rs b/runtime/fmt_errors.rs index 6f120b5d46..3c60c3a3d7 100644 --- a/runtime/fmt_errors.rs +++ b/runtime/fmt_errors.rs @@ -422,6 +422,20 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec { "Run again with `--unstable-webgpu` flag to enable this API.", ), ]; + } else if msg.contains("listenQuic is not a function") { + return vec![ + FixSuggestion::info("listenQuic is an unstable API."), + FixSuggestion::hint( + "Run again with `--unstable-net` flag to enable this API.", + ), + ]; + } else if msg.contains("connectQuic is not a function") { + return vec![ + FixSuggestion::info("connectQuic is an unstable API."), + FixSuggestion::hint( + "Run again with `--unstable-net` flag to enable this API.", + ), + ]; // Try to capture errors like: // ``` // Uncaught Error: Cannot find module '../build/Release/canvas.node' diff --git a/runtime/js/90_deno_ns.js b/runtime/js/90_deno_ns.js index a510ee33c4..5511649279 100644 --- a/runtime/js/90_deno_ns.js +++ b/runtime/js/90_deno_ns.js @@ -13,6 +13,7 @@ import * as console from "ext:deno_console/01_console.js"; import * as ffi from "ext:deno_ffi/00_ffi.js"; import * as net from "ext:deno_net/01_net.js"; import * as tls from "ext:deno_net/02_tls.js"; +import * as quic from "ext:deno_net/03_quic.js"; import * as serve from "ext:deno_http/00_serve.ts"; import * as http from "ext:deno_http/01_http.js"; import * as websocket from "ext:deno_http/02_websocket.ts"; @@ -174,6 +175,15 @@ denoNsUnstableById[unstableIds.net] = { op_net_listen_udp, op_net_listen_unixpacket, ), + + connectQuic: quic.connectQuic, + listenQuic: quic.listenQuic, + QuicBidirectionalStream: quic.QuicBidirectionalStream, + QuicConn: quic.QuicConn, + QuicListener: quic.QuicListener, + QuicReceiveStream: quic.QuicReceiveStream, + QuicSendStream: quic.QuicSendStream, + QuicIncoming: quic.QuicIncoming, }; // denoNsUnstableById[unstableIds.unsafeProto] = { __proto__: null } diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 19432745d4..a11444bc36 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -170,12 +170,14 @@ function postMessage(message, transferOrOptions = { __proto__: null }) { let isClosing = false; let globalDispatchEvent; +let closeOnIdle; function hasMessageEventListener() { // the function name is kind of a misnomer, but we want to behave // as if we have message event listeners if a node message port is explicitly // refed (and the inverse as well) - return event.listenerCount(globalThis, "message") > 0 || + return (event.listenerCount(globalThis, "message") > 0 && + !globalThis[messagePort.unrefParentPort]) || messagePort.refedMessagePortsCount > 0; } @@ -188,7 +190,10 @@ async function pollForMessages() { } while (!isClosing) { const recvMessage = op_worker_recv_message(); - if (globalThis[messagePort.unrefPollForMessages] === true) { + // In a Node.js worker, unref() the op promise to prevent it from + // keeping the event loop alive. This avoids the need to explicitly + // call self.close() or worker.terminate(). + if (closeOnIdle) { core.unrefOpPromise(recvMessage); } const data = await recvMessage; @@ -521,6 +526,9 @@ const NOT_IMPORTED_OPS = [ // Used in jupyter API "op_base64_encode", + // Used in the lint API + "op_lint_create_serialized_ast", + // Related to `Deno.test()` API "op_test_event_step_result_failed", "op_test_event_step_result_ignored", @@ -915,6 +923,7 @@ function bootstrapWorkerRuntime( 6: argv0, 7: nodeDebug, 13: otelConfig, + 14: closeOnIdle_, } = runtimeOptions; performance.setTimeOrigin(); @@ -967,6 +976,7 @@ function bootstrapWorkerRuntime( globalThis.pollForMessages = pollForMessages; globalThis.hasMessageEventListener = hasMessageEventListener; + closeOnIdle = closeOnIdle_; for (let i = 0; i <= unstableFeatures.length; i++) { const id = unstableFeatures[i]; diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index b8ebc88bed..71169217a7 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -424,8 +424,11 @@ fn rss() -> usize { let mut count = libc::MACH_TASK_BASIC_INFO_COUNT; // SAFETY: libc calls let r = unsafe { + extern "C" { + static mut mach_task_self_: std::ffi::c_uint; + } libc::task_info( - libc::mach_task_self(), + mach_task_self_, libc::MACH_TASK_BASIC_INFO, task_info.as_mut_ptr() as libc::task_info_t, &mut count as *mut libc::mach_msg_type_number_t, diff --git a/runtime/ops/web_worker/sync_fetch.rs b/runtime/ops/web_worker/sync_fetch.rs index d1f133d3d2..508bcb7bb0 100644 --- a/runtime/ops/web_worker/sync_fetch.rs +++ b/runtime/ops/web_worker/sync_fetch.rs @@ -104,11 +104,7 @@ pub fn op_worker_sync_fetch( let (body, mime_type, res_url) = match script_url.scheme() { "http" | "https" => { - let mut req = http::Request::new( - http_body_util::Empty::new() - .map_err(|never| match never {}) - .boxed(), - ); + let mut req = http::Request::new(deno_fetch::ReqBody::empty()); *req.uri_mut() = script_url.as_str().parse()?; let resp = diff --git a/runtime/permissions.rs b/runtime/permissions.rs index e8460e03f8..968c41560c 100644 --- a/runtime/permissions.rs +++ b/runtime/permissions.rs @@ -21,13 +21,17 @@ use deno_permissions::SysDescriptorParseError; use deno_permissions::WriteDescriptor; #[derive(Debug)] -pub struct RuntimePermissionDescriptorParser { - fs: deno_fs::FileSystemRc, +pub struct RuntimePermissionDescriptorParser< + TSys: sys_traits::EnvCurrentDir + Send + Sync, +> { + sys: TSys, } -impl RuntimePermissionDescriptorParser { - pub fn new(fs: deno_fs::FileSystemRc) -> Self { - Self { fs } +impl + RuntimePermissionDescriptorParser +{ + pub fn new(sys: TSys) -> Self { + Self { sys } } fn resolve_from_cwd(&self, path: &str) -> Result { @@ -45,14 +49,15 @@ impl RuntimePermissionDescriptorParser { fn resolve_cwd(&self) -> Result { self - .fs - .cwd() - .map_err(|e| PathResolveError::CwdResolve(e.into_io_error())) + .sys + .env_current_dir() + .map_err(PathResolveError::CwdResolve) } } -impl deno_permissions::PermissionDescriptorParser - for RuntimePermissionDescriptorParser +impl + deno_permissions::PermissionDescriptorParser + for RuntimePermissionDescriptorParser { fn parse_read_descriptor( &self, @@ -151,16 +156,14 @@ impl deno_permissions::PermissionDescriptorParser #[cfg(test)] mod test { - use std::sync::Arc; - - use deno_fs::RealFs; use deno_permissions::PermissionDescriptorParser; use super::*; #[test] fn test_handle_empty_value() { - let parser = RuntimePermissionDescriptorParser::new(Arc::new(RealFs)); + let parser = + RuntimePermissionDescriptorParser::new(sys_traits::impls::RealSys); assert!(parser.parse_read_descriptor("").is_err()); assert!(parser.parse_write_descriptor("").is_err()); assert!(parser.parse_env_descriptor("").is_err()); diff --git a/runtime/permissions/Cargo.toml b/runtime/permissions/Cargo.toml index dc46b03310..a7bd342a9c 100644 --- a/runtime/permissions/Cargo.toml +++ b/runtime/permissions/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_permissions" -version = "0.42.0" +version = "0.43.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index a1a217738d..1c5fb36f93 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -39,10 +39,11 @@ pub use prompter::PromptCallback; pub use prompter::PromptResponse; #[derive(Debug, thiserror::Error)] -#[error("Requires {access}, {}", format_permission_error(.name))] -pub struct PermissionDeniedError { - pub access: String, - pub name: &'static str, +pub enum PermissionDeniedError { + #[error("Requires {access}, {}", format_permission_error(.name))] + Retryable { access: String, name: &'static str }, + #[error("Requires {access}, which cannot be granted in this environment")] + Fatal { access: String }, } fn format_permission_error(name: &'static str) -> String { @@ -144,11 +145,11 @@ impl PermissionState { ) } - fn error( + fn retryable_error( name: &'static str, info: impl FnOnce() -> Option, ) -> PermissionDeniedError { - PermissionDeniedError { + PermissionDeniedError::Retryable { access: Self::fmt_access(name, info), name, } @@ -182,7 +183,7 @@ impl PermissionState { PermissionState::Prompt if prompt => { let msg = { let info = info(); - StringBuilder::build(|builder| { + StringBuilder::::build(|builder| { builder.append(name); builder.append(" access"); if let Some(info) = &info { @@ -201,10 +202,12 @@ impl PermissionState { Self::log_perm_access(name, info); (Ok(()), true, true) } - PromptResponse::Deny => (Err(Self::error(name, info)), true, false), + PromptResponse::Deny => { + (Err(Self::retryable_error(name, info)), true, false) + } } } - _ => (Err(Self::error(name, info)), false, false), + _ => (Err(Self::retryable_error(name, info)), false, false), } } } @@ -495,7 +498,7 @@ impl UnaryPermission { } let maybe_formatted_display_name = desc.map(|d| format_display_name(d.display_name())); - let message = StringBuilder::build(|builder| { + let message = StringBuilder::::build(|builder| { builder.append(TQuery::flag_name()); builder.append(" access"); if let Some(display_name) = &maybe_formatted_display_name { diff --git a/runtime/snapshot.rs b/runtime/snapshot.rs index 48c500ef74..ad73f485ad 100644 --- a/runtime/snapshot.rs +++ b/runtime/snapshot.rs @@ -306,7 +306,10 @@ pub fn create_runtime_snapshot( ), deno_io::deno_io::init_ops_and_esm(Default::default()), deno_fs::deno_fs::init_ops_and_esm::(fs.clone()), - deno_node::deno_node::init_ops_and_esm::(None, fs.clone()), + deno_node::deno_node::init_ops_and_esm::< + Permissions, + sys_traits::impls::RealSys, + >(None, fs.clone()), runtime::init_ops_and_esm(), ops::runtime::deno_runtime::init_ops("deno:runtime".parse().unwrap()), ops::worker_host::deno_worker_host::init_ops( diff --git a/runtime/sys_info.rs b/runtime/sys_info.rs index cffc90e9da..99bfcfe103 100644 --- a/runtime/sys_info.rs +++ b/runtime/sys_info.rs @@ -278,11 +278,15 @@ pub fn mem_info() -> Option { mem_info.swap_total = xs.xsu_total; mem_info.swap_free = xs.xsu_avail; + extern "C" { + fn mach_host_self() -> std::ffi::c_uint; + } + let mut count: u32 = libc::HOST_VM_INFO64_COUNT as _; let mut stat = std::mem::zeroed::(); if libc::host_statistics64( // TODO(@littledivy): Put this in a once_cell. - libc::mach_host_self(), + mach_host_self(), libc::HOST_VM_INFO64, &mut stat as *mut libc::vm_statistics64 as *mut _, &mut count, @@ -291,11 +295,9 @@ pub fn mem_info() -> Option { // TODO(@littledivy): Put this in a once_cell let page_size = libc::sysconf(libc::_SC_PAGESIZE) as u64; mem_info.available = - (stat.free_count as u64 + stat.inactive_count as u64) * page_size - / 1024; + (stat.free_count as u64 + stat.inactive_count as u64) * page_size; mem_info.free = - (stat.free_count as u64 - stat.speculative_count as u64) * page_size - / 1024; + (stat.free_count as u64 - stat.speculative_count as u64) * page_size; } } } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index e3a69b39c0..2b46d9a2ff 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -33,6 +33,7 @@ use deno_fs::FileSystem; use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::dynamic::MultiBackendDbHandler; +use deno_node::ExtNodeSys; use deno_node::NodeExtInitServices; use deno_permissions::PermissionsContainer; use deno_terminal::colors; @@ -58,7 +59,6 @@ use std::task::Poll; use crate::inspector_server::InspectorServer; use crate::ops; use crate::ops::process::NpmProcessStateProviderRc; -use crate::ops::worker_host::WorkersTable; use crate::shared::maybe_transpile_source; use crate::shared::runtime; use crate::tokio_util::create_and_run_current_thread; @@ -338,7 +338,7 @@ fn create_handles( (internal_handle, external_handle) } -pub struct WebWorkerServiceOptions { +pub struct WebWorkerServiceOptions { pub blob_store: Arc, pub broadcast_channel: InMemoryBroadcastChannel, pub compiled_wasm_module_store: Option, @@ -346,7 +346,7 @@ pub struct WebWorkerServiceOptions { pub fs: Arc, pub maybe_inspector_server: Option>, pub module_loader: Rc, - pub node_services: Option, + pub node_services: Option>, pub npm_process_state_provider: Option, pub permissions: PermissionsContainer, pub root_cert_store_provider: Option>, @@ -385,7 +385,6 @@ pub struct WebWorker { pub js_runtime: JsRuntime, pub name: String, close_on_idle: bool, - has_executed_main_module: bool, internal_handle: WebWorkerInternalHandle, pub worker_type: WebWorkerType, pub main_module: ModuleSpecifier, @@ -404,8 +403,8 @@ impl Drop for WebWorker { } impl WebWorker { - pub fn bootstrap_from_options( - services: WebWorkerServiceOptions, + pub fn bootstrap_from_options( + services: WebWorkerServiceOptions, options: WebWorkerOptions, ) -> (Self, SendableWebWorkerHandle) { let (mut worker, handle, bootstrap_options) = @@ -414,8 +413,8 @@ impl WebWorker { (worker, handle) } - fn from_options( - services: WebWorkerServiceOptions, + fn from_options( + services: WebWorkerServiceOptions, mut options: WebWorkerOptions, ) -> (Self, SendableWebWorkerHandle, BootstrapOptions) { deno_core::extension!(deno_permissions_web_worker, @@ -506,7 +505,7 @@ impl WebWorker { deno_fs::deno_fs::init_ops_and_esm::( services.fs.clone(), ), - deno_node::deno_node::init_ops_and_esm::( + deno_node::deno_node::init_ops_and_esm::( services.node_services, services.fs, ), @@ -658,7 +657,6 @@ impl WebWorker { has_message_event_listener_fn: None, bootstrap_fn_global: Some(bootstrap_fn_global), close_on_idle: options.close_on_idle, - has_executed_main_module: false, maybe_worker_metadata: options.maybe_worker_metadata, }, external_handle, @@ -799,7 +797,6 @@ impl WebWorker { maybe_result = &mut receiver => { debug!("received worker module evaluate {:#?}", maybe_result); - self.has_executed_main_module = true; maybe_result } @@ -837,6 +834,9 @@ impl WebWorker { } if self.close_on_idle { + if self.has_message_event_listener() { + return Poll::Pending; + } return Poll::Ready(Ok(())); } @@ -851,22 +851,7 @@ impl WebWorker { Poll::Ready(Ok(())) } } - Poll::Pending => { - // This is special code path for workers created from `node:worker_threads` - // module that have different semantics than Web workers. - // We want the worker thread to terminate automatically if we've done executing - // Top-Level await, there are no child workers spawned by that workers - // and there's no "message" event listener. - if self.close_on_idle - && self.has_executed_main_module - && !self.has_child_workers() - && !self.has_message_event_listener() - { - Poll::Ready(Ok(())) - } else { - Poll::Pending - } - } + Poll::Pending => Poll::Pending, } } @@ -904,15 +889,6 @@ impl WebWorker { None => false, } } - - fn has_child_workers(&mut self) -> bool { - !self - .js_runtime - .op_state() - .borrow() - .borrow::() - .is_empty() - } } fn print_worker_error( diff --git a/runtime/worker.rs b/runtime/worker.rs index 46fbd7b43f..a9a4440410 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -39,6 +39,7 @@ use deno_fs::FileSystem; use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::dynamic::MultiBackendDbHandler; +use deno_node::ExtNodeSys; use deno_node::NodeExtInitServices; use deno_permissions::PermissionsContainer; use deno_tls::RootCertStoreProvider; @@ -128,7 +129,7 @@ pub struct MainWorker { dispatch_process_exit_event_fn_global: v8::Global, } -pub struct WorkerServiceOptions { +pub struct WorkerServiceOptions { pub blob_store: Arc, pub broadcast_channel: InMemoryBroadcastChannel, pub feature_checker: Arc, @@ -139,7 +140,7 @@ pub struct WorkerServiceOptions { /// If not provided runtime will error if code being /// executed tries to load modules. pub module_loader: Rc, - pub node_services: Option, + pub node_services: Option>, pub npm_process_state_provider: Option, pub permissions: PermissionsContainer, pub root_cert_store_provider: Option>, @@ -304,9 +305,9 @@ pub fn create_op_metrics( } impl MainWorker { - pub fn bootstrap_from_options( + pub fn bootstrap_from_options( main_module: ModuleSpecifier, - services: WorkerServiceOptions, + services: WorkerServiceOptions, options: WorkerOptions, ) -> Self { let (mut worker, bootstrap_options) = @@ -315,9 +316,9 @@ impl MainWorker { worker } - fn from_options( + fn from_options( main_module: ModuleSpecifier, - services: WorkerServiceOptions, + services: WorkerServiceOptions, mut options: WorkerOptions, ) -> (Self, BootstrapOptions) { deno_core::extension!(deno_permissions_worker, @@ -417,7 +418,7 @@ impl MainWorker { deno_fs::deno_fs::init_ops_and_esm::( services.fs.clone(), ), - deno_node::deno_node::init_ops_and_esm::( + deno_node::deno_node::init_ops_and_esm::( services.node_services, services.fs, ), diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 2020c2bc8d..8364fe0d2b 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -120,6 +120,7 @@ pub struct BootstrapOptions { pub serve_port: Option, pub serve_host: Option, pub otel_config: OtelConfig, + pub close_on_idle: bool, } impl Default for BootstrapOptions { @@ -155,6 +156,7 @@ impl Default for BootstrapOptions { serve_port: Default::default(), serve_host: Default::default(), otel_config: Default::default(), + close_on_idle: false, } } } @@ -198,6 +200,8 @@ struct BootstrapV8<'a>( Option, // OTEL config Box<[u8]>, + // close on idle + bool, ); impl BootstrapOptions { @@ -225,6 +229,7 @@ impl BootstrapOptions { serve_is_main, serve_worker_count, self.otel_config.as_v8(), + self.close_on_idle, ); bootstrap.serialize(ser).unwrap() diff --git a/tests/Cargo.toml b/tests/Cargo.toml index fa51d7b77b..1300066c64 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -60,6 +60,7 @@ pretty_assertions.workspace = true regex.workspace = true reqwest.workspace = true serde.workspace = true +sys_traits = { workspace = true, features = ["real", "getrandom", "libc", "winapi"] } test_util.workspace = true tokio.workspace = true tower-lsp.workspace = true diff --git a/tests/integration/bench_tests.rs b/tests/integration/bench_tests.rs index d588f5b437..4ee029d648 100644 --- a/tests/integration/bench_tests.rs +++ b/tests/integration/bench_tests.rs @@ -43,7 +43,7 @@ fn conditionally_loads_type_graph() { .new_command() .args("bench --reload -L debug run/type_directives_js_main.js") .run(); - output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); let output = context .new_command() .args("bench --reload -L debug --no-check run/type_directives_js_main.js") diff --git a/tests/integration/cache_tests.rs b/tests/integration/cache_tests.rs index d9fb8e38e5..4cddae1af1 100644 --- a/tests/integration/cache_tests.rs +++ b/tests/integration/cache_tests.rs @@ -107,5 +107,5 @@ fn loads_type_graph() { .new_command() .args("cache --reload -L debug run/type_directives_js_main.js") .run(); - output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); } diff --git a/tests/integration/check_tests.rs b/tests/integration/check_tests.rs index b98d719fca..a1fdf83403 100644 --- a/tests/integration/check_tests.rs +++ b/tests/integration/check_tests.rs @@ -218,7 +218,7 @@ fn npm_module_check_then_error() { "npm:@denotest/breaking-change-between-versions", ) .unwrap(), - "1.0.0".to_string(), + "1.0.0".into(), ); lockfile_path.write(lockfile.as_json_string()); temp_dir.write( @@ -236,7 +236,7 @@ fn npm_module_check_then_error() { "npm:@denotest/breaking-change-between-versions", ) .unwrap(), - "2.0.0".to_string(), + "2.0.0".into(), ); lockfile_path.write(lockfile.as_json_string()); diff --git a/tests/integration/compile_tests.rs b/tests/integration/compile_tests.rs index a69e873ab4..a715233933 100644 --- a/tests/integration/compile_tests.rs +++ b/tests/integration/compile_tests.rs @@ -2,7 +2,6 @@ use deno_core::serde_json; use test_util as util; -use util::assert_contains; use util::assert_not_contains; use util::testdata_path; use util::TestContext; @@ -90,78 +89,6 @@ fn standalone_args() { .assert_exit_code(0); } -#[test] -fn standalone_error() { - let context = TestContextBuilder::new().build(); - let dir = context.temp_dir(); - let exe = if cfg!(windows) { - dir.path().join("error.exe") - } else { - dir.path().join("error") - }; - context - .new_command() - .args_vec([ - "compile", - "--output", - &exe.to_string_lossy(), - "./compile/standalone_error.ts", - ]) - .run() - .skip_output_check() - .assert_exit_code(0); - - let output = context.new_command().name(&exe).split_output().run(); - output.assert_exit_code(1); - output.assert_stdout_matches_text(""); - let stderr = output.stderr(); - // On Windows, we cannot assert the file path (because '\'). - // Instead we just check for relevant output. - assert_contains!(stderr, "error: Uncaught (in promise) Error: boom!"); - assert_contains!(stderr, "\n at boom (file://"); - assert_contains!(stderr, "standalone_error.ts:2:9"); - assert_contains!(stderr, "at foo (file://"); - assert_contains!(stderr, "standalone_error.ts:5:3"); - assert_contains!(stderr, "standalone_error.ts:7:1"); -} - -#[test] -fn standalone_error_module_with_imports() { - let context = TestContextBuilder::new().build(); - let dir = context.temp_dir(); - let exe = if cfg!(windows) { - dir.path().join("error.exe") - } else { - dir.path().join("error") - }; - context - .new_command() - .args_vec([ - "compile", - "--output", - &exe.to_string_lossy(), - "./compile/standalone_error_module_with_imports_1.ts", - ]) - .run() - .skip_output_check() - .assert_exit_code(0); - - let output = context - .new_command() - .name(&exe) - .env("NO_COLOR", "1") - .split_output() - .run(); - output.assert_stdout_matches_text("hello\n"); - let stderr = output.stderr(); - // On Windows, we cannot assert the file path (because '\'). - // Instead we just check for relevant output. - assert_contains!(stderr, "error: Uncaught (in promise) Error: boom!"); - assert_contains!(stderr, "\n at file://"); - assert_contains!(stderr, "standalone_error_module_with_imports_2.ts:2:7"); - output.assert_exit_code(1); -} - #[test] fn standalone_load_datauri() { let context = TestContextBuilder::new().build(); diff --git a/tests/integration/js_unit_tests.rs b/tests/integration/js_unit_tests.rs index 577ca043ca..afb97a3458 100644 --- a/tests/integration/js_unit_tests.rs +++ b/tests/integration/js_unit_tests.rs @@ -52,6 +52,8 @@ util::unit_test_factory!( kv_queue_test, kv_queue_undelivered_test, link_test, + lint_selectors_test, + lint_plugin_test, make_temp_test, message_channel_test, mkdir_test, @@ -66,6 +68,7 @@ util::unit_test_factory!( process_test, progressevent_test, promise_hooks_test, + quic_test, read_dir_test, read_file_test, read_link_test, diff --git a/tests/integration/jsr_tests.rs b/tests/integration/jsr_tests.rs index c4812e6bfb..d3fa5cd98f 100644 --- a/tests/integration/jsr_tests.rs +++ b/tests/integration/jsr_tests.rs @@ -159,7 +159,7 @@ console.log(version);"#, .get_mut( &JsrDepPackageReq::from_str("jsr:@denotest/no-module-graph@0.1").unwrap(), ) - .unwrap() = "0.1.0".to_string(); + .unwrap() = "0.1.0".into(); lockfile_path.write(lockfile.as_json_string()); test_context @@ -191,8 +191,8 @@ fn reload_info_not_found_cache_but_exists_remote() { Url::parse(&format!("http://127.0.0.1:4250/{}/meta.json", package)) .unwrap(); let cache = deno_cache_dir::GlobalHttpCache::new( + sys_traits::impls::RealSys, deno_dir.path().join("remote").to_path_buf(), - deno_cache_dir::TestRealDenoCacheEnv, ); let entry = cache .get(&cache.cache_item_key(&specifier).unwrap(), None) diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index e21157ebc6..4a8999f821 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -2082,6 +2082,88 @@ fn lsp_inlay_hints_not_enabled() { client.shutdown(); } +#[test] +fn lsp_suggestion_actions_disabled() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let temp_dir = context.temp_dir(); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + "lint": false, + }, + "typescript": { + "suggestionActions": { + "enabled": false, + }, + }, + })); + client.read_diagnostics(); + let diagnostics = client.did_open(json!({ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": r#" + // The settings should disable the suggestion for this to be async. + function asyncLikeFunction() { + return new Promise((r) => r(null)).then((v) => v); + } + console.log(asyncLikeFunction); + + // Deprecated warnings should remain. + /** @deprecated */ + function deprecatedFunction() {} + console.log(deprecatedFunction); + + // Unused warnings should remain. + const unsusedVariable = 1; + "#, + }, + })); + assert_eq!( + json!(diagnostics.all()), + json!([ + { + "range": { + "start": { "line": 10, "character": 20 }, + "end": { "line": 10, "character": 38 }, + }, + "severity": 4, + "code": 6385, + "source": "deno-ts", + "message": "'deprecatedFunction' is deprecated.", + "relatedInformation": [ + { + "location": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "range": { + "start": { "line": 8, "character": 12 }, + "end": { "line": 8, "character": 24 }, + }, + }, + "message": "The declaration was marked as deprecated here.", + }, + ], + "tags": [2], + }, + { + "range": { + "start": { "line": 13, "character": 14 }, + "end": { "line": 13, "character": 29 }, + }, + "severity": 4, + "code": 6133, + "source": "deno-ts", + "message": "'unsusedVariable' is declared but its value is never read.", + "tags": [1], + }, + ]), + ); + client.shutdown(); +} + #[test] fn lsp_workspace_disable_enable_paths() { fn run_test(use_trailing_slash: bool) { @@ -5984,6 +6066,119 @@ fn lsp_jsr_code_action_missing_declaration() { ); } +#[test] +fn lsp_jsr_code_action_move_to_new_file() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .build(); + let temp_dir = context.temp_dir(); + let file = source_file( + temp_dir.path().join("file.ts"), + r#" + import { someFunction } from "jsr:@denotest/types-file"; + export const someValue = someFunction(); + "#, + ); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.write_request( + "workspace/executeCommand", + json!({ + "command": "deno.cache", + "arguments": [[], file.url()], + }), + ); + client.did_open_file(&file); + let list = client + .write_request_with_res_as::>( + "textDocument/codeAction", + json!({ + "textDocument": { "uri": file.url() }, + "range": { + "start": { "line": 2, "character": 19 }, + "end": { "line": 2, "character": 28 }, + }, + "context": { "diagnostics": [] }, + }), + ) + .unwrap(); + let action = list + .iter() + .find_map(|c| match c { + lsp::CodeActionOrCommand::CodeAction(a) + if &a.title == "Move to a new file" => + { + Some(a) + } + _ => None, + }) + .unwrap(); + let res = client.write_request("codeAction/resolve", json!(action)); + assert_eq!( + res, + json!({ + "title": "Move to a new file", + "kind": "refactor.move.newFile", + "edit": { + "documentChanges": [ + { + "textDocument": { "uri": file.url(), "version": 1 }, + "edits": [ + { + "range": { + "start": { "line": 1, "character": 6 }, + "end": { "line": 2, "character": 0 }, + }, + "newText": "", + }, + { + "range": { + "start": { "line": 2, "character": 0 }, + "end": { "line": 3, "character": 4 }, + }, + "newText": "", + }, + ], + }, + { + "kind": "create", + "uri": file.url().join("someValue.ts").unwrap(), + "options": { + "ignoreIfExists": true, + }, + }, + { + "textDocument": { + "uri": file.url().join("someValue.ts").unwrap(), + "version": null, + }, + "edits": [ + { + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 0 }, + }, + "newText": "import { someFunction } from \"jsr:@denotest/types-file\";\n\nexport const someValue = someFunction();\n", + }, + ], + }, + ], + }, + "isPreferred": false, + "data": { + "specifier": file.url(), + "range": { + "start": { "line": 2, "character": 19 }, + "end": { "line": 2, "character": 28 }, + }, + "refactorName": "Move to a new file", + "actionName": "Move to a new file", + }, + }), + ); +} + #[test] fn lsp_code_actions_deno_cache_npm() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -7878,6 +8073,73 @@ fn lsp_completions_auto_import() { client.shutdown(); } +#[test] +fn lsp_completions_auto_import_node_builtin() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .build(); + let temp_dir = context.temp_dir(); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.did_open(json!({ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": r#" + import "npm:@types/node"; + pathToFileURL + "#, + } + })); + client.write_request( + "workspace/executeCommand", + json!({ + "command": "deno.cache", + "arguments": [[], temp_dir.url().join("file.ts").unwrap()], + }), + ); + let list = client.get_completion_list( + temp_dir.url().join("file.ts").unwrap(), + (2, 21), + json!({ "triggerKind": 2 }), + ); + assert!(!list.is_incomplete); + let item = list + .items + .iter() + .find(|item| item.label == "pathToFileURL") + .unwrap(); + let res = client.write_request("completionItem/resolve", json!(item)); + assert_eq!( + res, + json!({ + "label": "pathToFileURL", + "labelDetails": { + "description": "node:url", + }, + "kind": 3, + "detail": "function pathToFileURL(path: string, options?: PathToFileUrlOptions): URL", + "documentation": { + "kind": "markdown", + "value": "This function ensures that `path` is resolved absolutely, and that the URL\ncontrol characters are correctly encoded when converting into a File URL.\n\n```js\nimport { pathToFileURL } from 'node:url';\n\nnew URL('/foo#1', 'file:'); // Incorrect: file:///foo#1\npathToFileURL('/foo#1'); // Correct: file:///foo#1 (POSIX)\n\nnew URL('/some/path%.c', 'file:'); // Incorrect: file:///some/path%.c\npathToFileURL('/some/path%.c'); // Correct: file:///some/path%.c (POSIX)\n```\n\n*@since* - v10.12.0 \n\n*@param* - path The path to convert to a File URL. \n\n*@return* - The file URL object.", + }, + "sortText": "￿16_1", + "additionalTextEdits": [ + { + "range": { + "start": { "line": 2, "character": 0 }, + "end": { "line": 2, "character": 0 }, + }, + "newText": " import { pathToFileURL } from \"node:url\";\n", + }, + ], + }), + ); + client.shutdown(); +} + #[test] fn lsp_npm_completions_auto_import_and_quick_fix_no_import_map() { let context = TestContextBuilder::new() @@ -8450,6 +8712,7 @@ fn lsp_completions_node_specifier() { "node:http2", "node:https", "node:inspector", + "node:inspector/promises", "node:module", "node:net", "node:os", @@ -8460,9 +8723,9 @@ fn lsp_completions_node_specifier() { "node:process", "node:punycode", "node:querystring", - "node:repl", "node:readline", "node:readline/promises", + "node:repl", "node:sqlite", "node:stream", "node:stream/consumers", diff --git a/tests/integration/run_tests.rs b/tests/integration/run_tests.rs index f0b536aa22..77c0a46c5f 100644 --- a/tests/integration/run_tests.rs +++ b/tests/integration/run_tests.rs @@ -922,7 +922,7 @@ fn type_directives_js_main() { .new_command() .args("run --reload -L debug --check run/type_directives_js_main.js") .run(); - output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); let output = context .new_command() .args("run --reload -L debug run/type_directives_js_main.js") diff --git a/tests/integration/test_tests.rs b/tests/integration/test_tests.rs index 64857ae110..ca83682833 100644 --- a/tests/integration/test_tests.rs +++ b/tests/integration/test_tests.rs @@ -111,7 +111,7 @@ fn conditionally_loads_type_graph() { .new_command() .args("test --reload -L debug run/type_directives_js_main.js") .run(); - output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); let output = context .new_command() .args("test --reload -L debug --no-check run/type_directives_js_main.js") diff --git a/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.1/mod.ts b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.1/mod.ts new file mode 100644 index 0000000000..6a8018af41 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.1/mod.ts @@ -0,0 +1 @@ +export const foo = 1; \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.1_meta.json b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.1_meta.json new file mode 100644 index 0000000000..6c213a9c05 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.1_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.2/mod.ts b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.2/mod.ts new file mode 100644 index 0000000000..6a8018af41 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.2/mod.ts @@ -0,0 +1 @@ +export const foo = 1; \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.2_meta.json b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.2_meta.json new file mode 100644 index 0000000000..6c213a9c05 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-only-pre-release/2.0.0-beta.2_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/has-only-pre-release/meta.json b/tests/registry/jsr/@denotest/has-only-pre-release/meta.json new file mode 100644 index 0000000000..cce8d0954f --- /dev/null +++ b/tests/registry/jsr/@denotest/has-only-pre-release/meta.json @@ -0,0 +1,6 @@ +{ + "versions": { + "2.0.0-beta.1": {}, + "2.0.0-beta.2": {} + } +} diff --git a/tests/registry/jsr/@denotest/has-pre-release/1.0.0/mod.ts b/tests/registry/jsr/@denotest/has-pre-release/1.0.0/mod.ts new file mode 100644 index 0000000000..6a8018af41 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/1.0.0/mod.ts @@ -0,0 +1 @@ +export const foo = 1; \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/has-pre-release/1.0.0_meta.json b/tests/registry/jsr/@denotest/has-pre-release/1.0.0_meta.json new file mode 100644 index 0000000000..c5807f588c --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/1.0.0_meta.json @@ -0,0 +1,3 @@ +{ + "exports": {} +} diff --git a/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.1/mod.ts b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.1/mod.ts new file mode 100644 index 0000000000..6a8018af41 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.1/mod.ts @@ -0,0 +1 @@ +export const foo = 1; \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.1_meta.json b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.1_meta.json new file mode 100644 index 0000000000..6c213a9c05 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.1_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.2/mod.ts b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.2/mod.ts new file mode 100644 index 0000000000..6a8018af41 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.2/mod.ts @@ -0,0 +1 @@ +export const foo = 1; \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.2_meta.json b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.2_meta.json new file mode 100644 index 0000000000..6c213a9c05 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/2.0.0-beta.2_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/has-pre-release/meta.json b/tests/registry/jsr/@denotest/has-pre-release/meta.json new file mode 100644 index 0000000000..ba7bc452d7 --- /dev/null +++ b/tests/registry/jsr/@denotest/has-pre-release/meta.json @@ -0,0 +1,7 @@ +{ + "versions": { + "1.0.0": {}, + "2.0.0-beta.1": {}, + "2.0.0-beta.2": {} + } +} diff --git a/tests/registry/npm/@denotest/has-pre-release/1.0.0/package.json b/tests/registry/npm/@denotest/has-pre-release/1.0.0/package.json new file mode 100644 index 0000000000..027b783d54 --- /dev/null +++ b/tests/registry/npm/@denotest/has-pre-release/1.0.0/package.json @@ -0,0 +1,7 @@ +{ + "name": "@denotest/has-pre-release", + "version": "1.0.0", + "publishConfig": { + "tag": "latest" + } +} \ No newline at end of file diff --git a/tests/registry/npm/@denotest/has-pre-release/2.0.0-beta.1/package.json b/tests/registry/npm/@denotest/has-pre-release/2.0.0-beta.1/package.json new file mode 100644 index 0000000000..b3d3b3e634 --- /dev/null +++ b/tests/registry/npm/@denotest/has-pre-release/2.0.0-beta.1/package.json @@ -0,0 +1,4 @@ +{ + "name": "@denotest/has-pre-release", + "version": "2.0.0-beta.1" +} \ No newline at end of file diff --git a/tests/registry/npm/@denotest/has-pre-release/2.0.0-beta.2/package.json b/tests/registry/npm/@denotest/has-pre-release/2.0.0-beta.2/package.json new file mode 100644 index 0000000000..b6f8ea4c76 --- /dev/null +++ b/tests/registry/npm/@denotest/has-pre-release/2.0.0-beta.2/package.json @@ -0,0 +1,4 @@ +{ + "name": "@denotest/has-pre-release", + "version": "2.0.0-beta.2" +} \ No newline at end of file diff --git a/tests/specs/cert/localhost_unsafe_ssl/localhost_unsafe_ssl.ts.out b/tests/specs/cert/localhost_unsafe_ssl/localhost_unsafe_ssl.ts.out index c7bdfde0ed..ffb84ebfde 100644 --- a/tests/specs/cert/localhost_unsafe_ssl/localhost_unsafe_ssl.ts.out +++ b/tests/specs/cert/localhost_unsafe_ssl/localhost_unsafe_ssl.ts.out @@ -1,3 +1,6 @@ DANGER: TLS certificate validation is disabled for: deno.land -error: Import 'https://localhost:5545/subdir/mod2.ts' failed: error sending request for url (https://localhost:5545/subdir/mod2.ts): client error[WILDCARD] - at file:///[WILDCARD]/cafile_url_imports.ts:[WILDCARD] +error: Import 'https://localhost:5545/subdir/mod2.ts' failed. + 0: error sending request for url (https://localhost:5545/subdir/mod2.ts): client error (Connect): invalid peer certificate: UnknownIssuer + 1: client error (Connect) + 2: invalid peer certificate: UnknownIssuer + at file:///[WILDLINE]/cafile_url_imports.ts:[WILDLINE] diff --git a/tests/specs/cli/otel_basic/__test__.jsonc b/tests/specs/cli/otel_basic/__test__.jsonc index e7f8d17c7a..f9826671e8 100644 --- a/tests/specs/cli/otel_basic/__test__.jsonc +++ b/tests/specs/cli/otel_basic/__test__.jsonc @@ -1,28 +1,27 @@ { - "steps": [ - { + "tests": { + "basic": { "args": "run -A main.ts basic.ts", "output": "basic.out" }, - { + "natural_exit": { "args": "run -A main.ts natural_exit.ts", "output": "natural_exit.out" }, - { + "deno_dot_exit": { "args": "run -A main.ts deno_dot_exit.ts", "output": "deno_dot_exit.out" }, - { + "uncaught": { "args": "run -A main.ts uncaught.ts", "output": "uncaught.out" }, - { + "metric": { + "envs": { + "OTEL_METRIC_EXPORT_INTERVAL": "1000" + }, "args": "run -A main.ts metric.ts", "output": "metric.out" - }, - { - "args": "run -A --unstable-otel context.ts", - "output": "" } - ] + } } diff --git a/tests/specs/cli/otel_basic/metric.out b/tests/specs/cli/otel_basic/metric.out index 26ed4a23c6..dd53734230 100644 --- a/tests/specs/cli/otel_basic/metric.out +++ b/tests/specs/cli/otel_basic/metric.out @@ -56,6 +56,31 @@ "isMonotonic": false } }, + { + "name": "gauge", + "description": "Example of a Gauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } + }, { "name": "histogram", "description": "Example of a Histogram", @@ -119,6 +144,265 @@ ], "aggregationTemporality": 2 } + }, + { + "name": "observable_counter", + "description": "Example of a ObservableCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "observable_up_down_counter", + "description": "Example of a ObservableUpDownCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": false + } + }, + { + "name": "observable_gauge", + "description": "Example of a ObservableGauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } + }, + { + "name": "counter", + "description": "Example of a Counter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "up_down_counter", + "description": "Example of a UpDownCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": -1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": false + } + }, + { + "name": "gauge", + "description": "Example of a Gauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } + }, + { + "name": "histogram", + "description": "Example of a Histogram", + "unit": "", + "metadata": [], + "histogram": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "count": 1, + "sum": 1, + "bucketCounts": [ + 0, + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "explicitBounds": [ + 0, + 5, + 10, + 25, + 50, + 75, + 100, + 250, + 500, + 750, + 1000, + 2500, + 5000, + 7500, + 10000 + ], + "exemplars": [], + "flags": 0, + "min": 1, + "max": 1 + } + ], + "aggregationTemporality": 2 + } + }, + { + "name": "observable_counter", + "description": "Example of a ObservableCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "observable_up_down_counter", + "description": "Example of a ObservableUpDownCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": false + } + }, + { + "name": "observable_gauge", + "description": "Example of a ObservableGauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } } ] } diff --git a/tests/specs/cli/otel_basic/metric.ts b/tests/specs/cli/otel_basic/metric.ts index 7d332f0432..2b472a6fb8 100644 --- a/tests/specs/cli/otel_basic/metric.ts +++ b/tests/specs/cli/otel_basic/metric.ts @@ -1,18 +1,8 @@ -import { - MeterProvider, - PeriodicExportingMetricReader, -} from "npm:@opentelemetry/sdk-metrics@1.28.0"; +import { metrics } from "npm:@opentelemetry/api@1"; -const meterProvider = new MeterProvider(); +metrics.setGlobalMeterProvider(new Deno.telemetry.MeterProvider()); -meterProvider.addMetricReader( - new PeriodicExportingMetricReader({ - exporter: new Deno.telemetry.MetricExporter(), - exportIntervalMillis: 100, - }), -); - -const meter = meterProvider.getMeter("m"); +const meter = metrics.getMeter("m"); const counter = meter.createCounter("counter", { description: "Example of a Counter", @@ -22,13 +12,82 @@ const upDownCounter = meter.createUpDownCounter("up_down_counter", { description: "Example of a UpDownCounter", }); +const gauge = meter.createGauge("gauge", { + description: "Example of a Gauge", +}); + const histogram = meter.createHistogram("histogram", { description: "Example of a Histogram", }); +const observableCounterPromise = Promise.withResolvers(); +const observableCounter = meter.createObservableCounter("observable_counter", { + description: "Example of a ObservableCounter", +}); +observableCounter.addCallback((res) => { + res.observe(1); + observableCounterPromise.resolve(); +}); + +const observableUpDownCounterPromise = Promise.withResolvers(); +const observableUpDownCounter = meter + .createObservableUpDownCounter("observable_up_down_counter", { + description: "Example of a ObservableUpDownCounter", + }); +observableUpDownCounter.addCallback((res) => { + res.observe(1); + observableUpDownCounterPromise.resolve(); +}); + +const observableGaugePromise = Promise.withResolvers(); +const observableGauge = meter.createObservableGauge("observable_gauge", { + description: "Example of a ObservableGauge", +}); +observableGauge.addCallback((res) => { + res.observe(1); + observableGaugePromise.resolve(); +}); + +const observableCounterBatch = meter.createObservableCounter( + "observable_counter_batch", + { description: "Example of a ObservableCounter, written in batch" }, +); +const observableUpDownCounterBatch = meter.createObservableUpDownCounter( + "observable_up_down_counter_batch", + { description: "Example of a ObservableUpDownCounter, written in batch" }, +); +const observableGaugeBatch = meter.createObservableGauge( + "observable_gauge_batch", + { + description: "Example of a ObservableGauge, written in batch", + }, +); + +const observableBatchPromise = Promise.withResolvers(); +meter.addBatchObservableCallback((observer) => { + observer.observe(observableCounter, 2); + observer.observe(observableUpDownCounter, 2); + observer.observe(observableGauge, 2); + observableBatchPromise.resolve(); +}, [ + observableCounterBatch, + observableUpDownCounterBatch, + observableGaugeBatch, +]); + const attributes = { attribute: 1 }; counter.add(1, attributes); upDownCounter.add(-1, attributes); +gauge.record(1, attributes); histogram.record(1, attributes); -await meterProvider.forceFlush(); +const timer = setTimeout(() => {}, 100000); + +await Promise.all([ + observableCounterPromise.promise, + observableUpDownCounterPromise.promise, + observableGaugePromise.promise, + observableBatchPromise.promise, +]); + +clearTimeout(timer); diff --git a/tests/specs/compile/code_cache/__test__.jsonc b/tests/specs/compile/code_cache/__test__.jsonc index 72353e27da..f1c3461adc 100644 --- a/tests/specs/compile/code_cache/__test__.jsonc +++ b/tests/specs/compile/code_cache/__test__.jsonc @@ -1,6 +1,9 @@ { "tempDir": true, "steps": [{ + "args": "run -A cleanup.ts", + "output": "[WILDCARD]" + }, { "if": "unix", "args": "compile --output using_code_cache --log-level=debug main.ts", "output": "[WILDCARD]" diff --git a/tests/specs/compile/code_cache/cleanup.ts b/tests/specs/compile/code_cache/cleanup.ts new file mode 100644 index 0000000000..d9e7c805f8 --- /dev/null +++ b/tests/specs/compile/code_cache/cleanup.ts @@ -0,0 +1,11 @@ +import { tmpdir } from "node:os"; + +// cleanup the code cache file from a previous run +try { + if (Deno.build.os === "windows") { + Deno.removeSync(tmpdir() + "\\deno-compile-using_code_cache.exe.cache"); + } else { + Deno.removeSync(tmpdir() + "\\deno-compile-using_code_cache.cache"); + } +} catch { +} diff --git a/tests/specs/compile/determinism/__test__.jsonc b/tests/specs/compile/determinism/__test__.jsonc index 97045744f1..b84a1fdf18 100644 --- a/tests/specs/compile/determinism/__test__.jsonc +++ b/tests/specs/compile/determinism/__test__.jsonc @@ -1,28 +1,31 @@ { "tempDir": true, "steps": [{ - "if": "unix", - "args": "compile --output main1 main.ts", + "args": "run -A setup.ts", "output": "[WILDCARD]" }, { "if": "unix", - "args": "compile --output main2 main.ts", + "args": "compile --no-config --output a/main a/main.ts", "output": "[WILDCARD]" }, { "if": "unix", - "args": "run --allow-read=. assert_equal.ts main1 main2", + "args": "compile --no-config --output b/main b/main.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "args": "run --allow-read=. assert_equal.ts a/main b/main", "output": "Same\n" }, { "if": "windows", - "args": "compile --output main1.exe main.ts", + "args": "compile --no-config --output a/main.exe a/main.ts", "output": "[WILDCARD]" }, { "if": "windows", - "args": "compile --output main2.exe main.ts", + "args": "compile --no-config --output b/main.exe b/main.ts", "output": "[WILDCARD]" }, { "if": "windows", - "args": "run --allow-read=. assert_equal.ts main1.exe main2.exe", + "args": "run --allow-read=. assert_equal.ts a/main.exe b/main.exe", "output": "Same\n" }] } diff --git a/tests/specs/compile/determinism/setup.ts b/tests/specs/compile/determinism/setup.ts new file mode 100644 index 0000000000..8bb5753079 --- /dev/null +++ b/tests/specs/compile/determinism/setup.ts @@ -0,0 +1,10 @@ +// for setup, we create two directories with the same file in each +// and then when compiling we ensure this directory name has no +// effect on the output +makeCopyDir("a"); +makeCopyDir("b"); + +function makeCopyDir(dirName) { + Deno.mkdirSync(dirName); + Deno.copyFileSync("main.ts", `${dirName}/main.ts`); +} diff --git a/tests/specs/compile/error/local/__test__.jsonc b/tests/specs/compile/error/local/__test__.jsonc new file mode 100644 index 0000000000..8d6a015a51 --- /dev/null +++ b/tests/specs/compile/error/local/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile --output main standalone_error.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 1 + }, { + "if": "windows", + "args": "compile --output main.exe standalone_error.ts", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 1 + }] +} diff --git a/tests/specs/compile/error/local/output.out b/tests/specs/compile/error/local/output.out new file mode 100644 index 0000000000..b346734ae6 --- /dev/null +++ b/tests/specs/compile/error/local/output.out @@ -0,0 +1,6 @@ +error: Uncaught (in promise) Error: boom! + throw new Error("boom!"); + ^ + at boom (file:///[WILDLINE]standalone_error.ts:2:9) + at foo (file:///[WILDLINE]standalone_error.ts:6:3) + at file:///[WILDLINE]standalone_error.ts:9:1 diff --git a/tests/testdata/compile/standalone_error.ts b/tests/specs/compile/error/local/standalone_error.ts similarity index 100% rename from tests/testdata/compile/standalone_error.ts rename to tests/specs/compile/error/local/standalone_error.ts diff --git a/tests/specs/compile/error/remote/__test__.jsonc b/tests/specs/compile/error/remote/__test__.jsonc new file mode 100644 index 0000000000..9ad9091ec6 --- /dev/null +++ b/tests/specs/compile/error/remote/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile -A --output main main.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 1 + }, { + "if": "windows", + "args": "compile -A --output main.exe main.ts", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 1 + }] +} diff --git a/tests/specs/compile/error/remote/main.ts b/tests/specs/compile/error/remote/main.ts new file mode 100644 index 0000000000..7a27276dd8 --- /dev/null +++ b/tests/specs/compile/error/remote/main.ts @@ -0,0 +1 @@ +import "http://localhost:4545/compile/standalone_error_module_with_imports_1.ts"; diff --git a/tests/specs/compile/error/remote/output.out b/tests/specs/compile/error/remote/output.out new file mode 100644 index 0000000000..3e23694c16 --- /dev/null +++ b/tests/specs/compile/error/remote/output.out @@ -0,0 +1,5 @@ +hello +error: Uncaught (in promise) Error: boom! +throw new Error(value); + ^ + at http://localhost:4545/compile/standalone_error_module_with_imports_2.ts:7:7 diff --git a/tests/specs/node/cjs_key_escaped_whitespace/__test__.jsonc b/tests/specs/node/cjs_key_escaped_whitespace/__test__.jsonc new file mode 100644 index 0000000000..ebaae5bfd6 --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -A main.js", + "output": "output.out" +} diff --git a/tests/specs/node/cjs_key_escaped_whitespace/main.js b/tests/specs/node/cjs_key_escaped_whitespace/main.js new file mode 100644 index 0000000000..9d4f2ee26c --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/main.js @@ -0,0 +1,2 @@ +const bang = await import("./module.cjs"); +console.log("imported:", bang); diff --git a/tests/specs/node/cjs_key_escaped_whitespace/module.cjs b/tests/specs/node/cjs_key_escaped_whitespace/module.cjs new file mode 100644 index 0000000000..5accc6196a --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/module.cjs @@ -0,0 +1,6 @@ +module.exports = { + "\nx": "test", + "\ty": "test", + "\rz": "test", + '"a': "test", +}; diff --git a/tests/specs/node/cjs_key_escaped_whitespace/output.out b/tests/specs/node/cjs_key_escaped_whitespace/output.out new file mode 100644 index 0000000000..49e92abdec --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/output.out @@ -0,0 +1,7 @@ +imported: [Module: null prototype] { + "\ty": "test", + "\nx": "test", + "\rz": "test", + '"a': "test", + default: { "\nx": "test", "\ty": "test", "\rz": "test", '"a': "test" } +} diff --git a/tests/specs/permission/allow_import_worker/denied.out b/tests/specs/permission/allow_import_worker/denied.out index 6e4dcaee09..af44ae21ee 100644 --- a/tests/specs/permission/allow_import_worker/denied.out +++ b/tests/specs/permission/allow_import_worker/denied.out @@ -3,5 +3,4 @@ await import(specifier); ^ at async file:///[WILDLINE] error: Uncaught (in promise) Error: Unhandled error in child worker. - at [WILDLINE] - at [WILDLINE] \ No newline at end of file + at [WILDCARD] \ No newline at end of file diff --git a/tests/specs/run/jsx_import_source/__test__.jsonc b/tests/specs/run/jsx_import_source/__test__.jsonc index cbda2dd32e..0350df7f17 100644 --- a/tests/specs/run/jsx_import_source/__test__.jsonc +++ b/tests/specs/run/jsx_import_source/__test__.jsonc @@ -19,6 +19,7 @@ "output": "jsx_import_source_dev.out" }, "jsx_import_source_pragma_with_config_vendor_dir": { + "tempDir": true, "args": "run --allow-import --reload --config jsx/deno-jsx.jsonc --no-lock --vendor jsx_import_source_pragma.tsx", "output": "jsx_import_source.out" }, diff --git a/tests/specs/task/dependencies/__test__.jsonc b/tests/specs/task/dependencies/__test__.jsonc index 84c98f11a4..c9032153b3 100644 --- a/tests/specs/task/dependencies/__test__.jsonc +++ b/tests/specs/task/dependencies/__test__.jsonc @@ -61,6 +61,18 @@ "cwd": "arg_task_with_deps", "args": "task a a", "output": "./arg_task_with_deps.out" + }, + "no_command": { + "cwd": "no_command", + "args": "task a", + "output": "./no_command.out", + "exitCode": 0 + }, + "no_command_list": { + "cwd": "no_command", + "args": "task", + "output": "./no_command_list.out", + "exitCode": 0 } } } diff --git a/tests/specs/task/dependencies/no_command.out b/tests/specs/task/dependencies/no_command.out new file mode 100644 index 0000000000..521b3541df --- /dev/null +++ b/tests/specs/task/dependencies/no_command.out @@ -0,0 +1,5 @@ +Task b echo 'b' +b +Task c echo 'c' +c +Task a (no command) diff --git a/tests/specs/task/dependencies/no_command/deno.json b/tests/specs/task/dependencies/no_command/deno.json new file mode 100644 index 0000000000..5588365a92 --- /dev/null +++ b/tests/specs/task/dependencies/no_command/deno.json @@ -0,0 +1,13 @@ +{ + "tasks": { + "a": { + "dependencies": ["b", "c"] + }, + "b": { + "command": "echo 'b'" + }, + "c": { + "command": "echo 'c'" + } + } +} diff --git a/tests/specs/task/dependencies/no_command_list.out b/tests/specs/task/dependencies/no_command_list.out new file mode 100644 index 0000000000..3d58c1cb06 --- /dev/null +++ b/tests/specs/task/dependencies/no_command_list.out @@ -0,0 +1,7 @@ +Available tasks: +- a + depends on: b, c +- b + echo 'b' +- c + echo 'c' diff --git a/tests/specs/task/workspace_regex_match/__test__.jsonc b/tests/specs/task/workspace_regex_match/__test__.jsonc new file mode 100644 index 0000000000..258c288d44 --- /dev/null +++ b/tests/specs/task/workspace_regex_match/__test__.jsonc @@ -0,0 +1,11 @@ +{ + "tempDir": true, + "tests": { + // Regression test for https://github.com/denoland/deno/issues/27370 + "root": { + "args": "task test-all", + "output": "root.out", + "exitCode": 0 + } + } +} diff --git a/tests/specs/task/workspace_regex_match/deno.json b/tests/specs/task/workspace_regex_match/deno.json new file mode 100644 index 0000000000..ce040ba5ab --- /dev/null +++ b/tests/specs/task/workspace_regex_match/deno.json @@ -0,0 +1,6 @@ +{ + "workspace": ["./subdir"], + "tasks": { + "test-all": "deno task --recursive test" + } +} diff --git a/tests/specs/task/workspace_regex_match/root.out b/tests/specs/task/workspace_regex_match/root.out new file mode 100644 index 0000000000..9da724a5c0 --- /dev/null +++ b/tests/specs/task/workspace_regex_match/root.out @@ -0,0 +1,3 @@ +Task test-all deno task --recursive test +Task test echo 'ok' +ok diff --git a/tests/specs/task/workspace_regex_match/subdir/deno.json b/tests/specs/task/workspace_regex_match/subdir/deno.json new file mode 100644 index 0000000000..78d768e396 --- /dev/null +++ b/tests/specs/task/workspace_regex_match/subdir/deno.json @@ -0,0 +1,5 @@ +{ + "tasks": { + "test": "echo 'ok'" + } +} diff --git a/tests/specs/update/pre_release/__test__.jsonc b/tests/specs/update/pre_release/__test__.jsonc new file mode 100644 index 0000000000..7e2ea39dab --- /dev/null +++ b/tests/specs/update/pre_release/__test__.jsonc @@ -0,0 +1,21 @@ +{ + "tempDir": true, + "steps": [ + { + "args": "i", + "output": "[WILDCARD]" + }, + { + "args": "outdated", + "output": "outdated.out" + }, + { + "args": "outdated --compatible", + "output": "outdated.out" + }, + { + "args": "outdated --update --latest", + "output": "update.out" + } + ] +} diff --git a/tests/specs/update/pre_release/deno.json b/tests/specs/update/pre_release/deno.json new file mode 100644 index 0000000000..07646b5059 --- /dev/null +++ b/tests/specs/update/pre_release/deno.json @@ -0,0 +1,7 @@ +{ + "imports": { + "@denotest/npm-has-pre-release": "npm:@denotest/has-pre-release@^2.0.0-beta.1", + "@denotest/jsr-has-pre-release": "jsr:@denotest/has-pre-release@^2.0.0-beta.1", + "@denotest/has-only-pre-release": "jsr:@denotest/has-only-pre-release@^2.0.0-beta.1" + } +} diff --git a/tests/specs/update/pre_release/deno.lock b/tests/specs/update/pre_release/deno.lock new file mode 100644 index 0000000000..33b136dd53 --- /dev/null +++ b/tests/specs/update/pre_release/deno.lock @@ -0,0 +1,28 @@ +{ + "version": "4", + "specifiers": { + "jsr:@denotest/has-only-pre-release@^2.0.0-beta.1": "2.0.0-beta.1", + "jsr:@denotest/has-pre-release@^2.0.0-beta.1": "2.0.0-beta.1", + "npm:@denotest/has-pre-release@^2.0.0-beta.1": "2.0.0-beta.1" + }, + "jsr": { + "@denotest/has-only-pre-release@2.0.0-beta.1": { + "integrity": "43fd680ea94bb5db5fe1a2d86101c47d0e2cc77323b881755cea9a0372e49537" + }, + "@denotest/has-pre-release@2.0.0-beta.1": { + "integrity": "43fd680ea94bb5db5fe1a2d86101c47d0e2cc77323b881755cea9a0372e49537" + } + }, + "npm": { + "@denotest/has-pre-release@2.0.0-beta.1": { + "integrity": "sha512-K1fHe1L2EUSLgijtzzALNpkkIO0SrX3z+IXvVjjOIE8HKd4T7lkpzDdoUp+WllwS3KXmuJh+9vIfY5lFp38pew==" + } + }, + "workspace": { + "dependencies": [ + "jsr:@denotest/has-only-pre-release@^2.0.0-beta.1", + "jsr:@denotest/has-pre-release@^2.0.0-beta.1", + "npm:@denotest/has-pre-release@^2.0.0-beta.1" + ] + } +} diff --git a/tests/specs/update/pre_release/outdated.out b/tests/specs/update/pre_release/outdated.out new file mode 100644 index 0000000000..b8369b25f7 --- /dev/null +++ b/tests/specs/update/pre_release/outdated.out @@ -0,0 +1,11 @@ +┌────────────────────────────────────┬──────────────┬──────────────┬──────────────┐ +│ Package │ Current │ Update │ Latest │ +├────────────────────────────────────┼──────────────┼──────────────┼──────────────┤ +│ jsr:@denotest/has-only-pre-release │ 2.0.0-beta.1 │ 2.0.0-beta.2 │ 2.0.0-beta.2 │ +├────────────────────────────────────┼──────────────┼──────────────┼──────────────┤ +│ jsr:@denotest/has-pre-release │ 2.0.0-beta.1 │ 2.0.0-beta.2 │ 2.0.0-beta.2 │ +├────────────────────────────────────┼──────────────┼──────────────┼──────────────┤ +│ npm:@denotest/has-pre-release │ 2.0.0-beta.1 │ 2.0.0-beta.2 │ 2.0.0-beta.2 │ +└────────────────────────────────────┴──────────────┴──────────────┴──────────────┘ + +[WILDCARD] diff --git a/tests/specs/update/pre_release/update.out b/tests/specs/update/pre_release/update.out new file mode 100644 index 0000000000..d019457f0a --- /dev/null +++ b/tests/specs/update/pre_release/update.out @@ -0,0 +1,5 @@ +[WILDCARD] +Updated 3 dependencies: + - jsr:@denotest/has-only-pre-release 2.0.0-beta.1 -> 2.0.0-beta.2 + - jsr:@denotest/has-pre-release 2.0.0-beta.1 -> 2.0.0-beta.2 + - npm:@denotest/has-pre-release 2.0.0-beta.1 -> 2.0.0-beta.2 diff --git a/tests/testdata/compile/standalone_error_module_with_imports_2.ts b/tests/testdata/compile/standalone_error_module_with_imports_2.ts index ef052b512e..c83d7ceea6 100644 --- a/tests/testdata/compile/standalone_error_module_with_imports_2.ts +++ b/tests/testdata/compile/standalone_error_module_with_imports_2.ts @@ -1,2 +1,7 @@ +// file has blank lines to make the input line +// different than the output console.log("hello"); -throw new Error("boom!"); + +const value: string = "boom!"; + +throw new Error(value); diff --git a/tests/unit/body_test.ts b/tests/unit/body_test.ts index 18cdb22be0..fb51fd0076 100644 --- a/tests/unit/body_test.ts +++ b/tests/unit/body_test.ts @@ -1,5 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -import { assert, assertEquals } from "./test_util.ts"; +import { assert, assertEquals, assertRejects } from "./test_util.ts"; // just a hack to get a body object // deno-lint-ignore no-explicit-any @@ -187,3 +187,14 @@ Deno.test( assertEquals(file.size, 1); }, ); + +Deno.test(async function bodyBadResourceError() { + const file = await Deno.open("README.md"); + file.close(); + const body = buildBody(file.readable); + await assertRejects( + () => body.arrayBuffer(), + Deno.errors.BadResource, + "Cannot read body as underlying resource unavailable", + ); +}); diff --git a/tests/unit/lint_plugin_test.ts b/tests/unit/lint_plugin_test.ts new file mode 100644 index 0000000000..38a7e1b091 --- /dev/null +++ b/tests/unit/lint_plugin_test.ts @@ -0,0 +1,769 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "./test_util.ts"; + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface LintReportData { + // deno-lint-ignore no-explicit-any + node: any; + message: string; +} +// TODO(@marvinhagemeister) Remove once we land "official" types +interface LintContext { + id: string; +} +// TODO(@marvinhagemeister) Remove once we land "official" types +// deno-lint-ignore no-explicit-any +type LintVisitor = Record void>; + +// TODO(@marvinhagemeister) Remove once we land "official" types +interface LintRule { + create(ctx: LintContext): LintVisitor; + destroy?(): void; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +interface LintPlugin { + name: string; + rules: Record; +} + +function runLintPlugin(plugin: LintPlugin, fileName: string, source: string) { + // deno-lint-ignore no-explicit-any + return (Deno as any)[(Deno as any).internal].runLintPlugin( + plugin, + fileName, + source, + ); +} + +function testPlugin( + source: string, + rule: LintRule, +) { + const plugin = { + name: "test-plugin", + rules: { + testRule: rule, + }, + }; + + return runLintPlugin(plugin, "source.tsx", source); +} + +interface VisitResult { + selector: string; + kind: "enter" | "exit"; + // deno-lint-ignore no-explicit-any + node: any; +} + +function testVisit( + source: string, + ...selectors: string[] +): VisitResult[] { + const result: VisitResult[] = []; + + testPlugin(source, { + create() { + const visitor: LintVisitor = {}; + + for (const s of selectors) { + visitor[s] = (node) => { + result.push({ + kind: s.endsWith(":exit") ? "exit" : "enter", + selector: s, + node, + }); + }; + } + + return visitor; + }, + }); + + return result; +} + +function testLintNode(source: string, ...selectors: string[]) { + // deno-lint-ignore no-explicit-any + const log: any[] = []; + + testPlugin(source, { + create() { + const visitor: LintVisitor = {}; + + for (const s of selectors) { + visitor[s] = (node) => { + log.push(node[Symbol.for("Deno.lint.toJsValue")]()); + }; + } + + return visitor; + }, + }); + + return log; +} + +Deno.test("Plugin - visitor enter/exit", () => { + const enter = testVisit( + "foo", + "Identifier", + ); + assertEquals(enter[0].node.type, "Identifier"); + + const exit = testVisit( + "foo", + "Identifier:exit", + ); + assertEquals(exit[0].node.type, "Identifier"); + + const both = testVisit("foo", "Identifier", "Identifier:exit"); + assertEquals(both.map((t) => t.selector), ["Identifier", "Identifier:exit"]); +}); + +Deno.test("Plugin - visitor descendant", () => { + let result = testVisit( + "if (false) foo; if (false) bar()", + "IfStatement CallExpression", + ); + assertEquals(result[0].node.type, "CallExpression"); + assertEquals(result[0].node.callee.name, "bar"); + + result = testVisit( + "if (false) foo; foo()", + "IfStatement IfStatement", + ); + assertEquals(result, []); + + result = testVisit( + "if (false) foo; foo()", + "* CallExpression", + ); + assertEquals(result[0].node.type, "CallExpression"); +}); + +Deno.test("Plugin - visitor child combinator", () => { + let result = testVisit( + "if (false) foo; if (false) { bar; }", + "IfStatement > ExpressionStatement > Identifier", + ); + assertEquals(result[0].node.name, "foo"); + + result = testVisit( + "if (false) foo; foo()", + "IfStatement IfStatement", + ); + assertEquals(result, []); +}); + +Deno.test("Plugin - visitor next sibling", () => { + const result = testVisit( + "if (false) foo; if (false) bar;", + "IfStatement + IfStatement Identifier", + ); + assertEquals(result[0].node.name, "bar"); +}); + +Deno.test("Plugin - visitor subsequent sibling", () => { + const result = testVisit( + "if (false) foo; if (false) bar; if (false) baz;", + "IfStatement ~ IfStatement Identifier", + ); + assertEquals(result.map((r) => r.node.name), ["bar", "baz"]); +}); + +Deno.test("Plugin - visitor attr", () => { + let result = testVisit( + "for (const a of b) {}", + "[await]", + ); + assertEquals(result[0].node.await, false); + + result = testVisit( + "for await (const a of b) {}", + "[await=true]", + ); + assertEquals(result[0].node.await, true); + + result = testVisit( + "for await (const a of b) {}", + "ForOfStatement[await=true]", + ); + assertEquals(result[0].node.await, true); + + result = testVisit( + "for (const a of b) {}", + "ForOfStatement[await != true]", + ); + assertEquals(result[0].node.await, false); + + result = testVisit( + "async function *foo() {}", + "FunctionDeclaration[async=true][generator=true]", + ); + assertEquals(result[0].node.type, "FunctionDeclaration"); + + result = testVisit( + "foo", + "[name='foo']", + ); + assertEquals(result[0].node.name, "foo"); +}); + +Deno.test("Plugin - visitor attr to check type", () => { + let result = testVisit( + "foo", + "Identifier[type]", + ); + assertEquals(result[0].node.type, "Identifier"); + + result = testVisit( + "foo", + "Identifier[type='Identifier']", + ); + assertEquals(result[0].node.type, "Identifier"); +}); + +Deno.test("Plugin - visitor attr non-existing", () => { + const result = testVisit( + "foo", + "[non-existing]", + ); + assertEquals(result, []); +}); + +Deno.test("Plugin - visitor attr length special case", () => { + let result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length=2]", + ); + assertEquals(result[0].node.arguments.length, 2); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length>1]", + ); + assertEquals(result[0].node.arguments.length, 2); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length<2]", + ); + assertEquals(result[0].node.arguments.length, 1); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length<=3]", + ); + assertEquals(result[0].node.arguments.length, 1); + assertEquals(result[1].node.arguments.length, 2); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length>=1]", + ); + assertEquals(result[0].node.arguments.length, 1); + assertEquals(result[1].node.arguments.length, 2); +}); + +Deno.test("Plugin - visitor :first-child", () => { + const result = testVisit( + "{ foo; bar }", + "BlockStatement ExpressionStatement:first-child Identifier", + ); + assertEquals(result[0].node.name, "foo"); +}); + +Deno.test("Plugin - visitor :last-child", () => { + const result = testVisit( + "{ foo; bar }", + "BlockStatement ExpressionStatement:last-child Identifier", + ); + assertEquals(result[0].node.name, "bar"); +}); + +Deno.test("Plugin - visitor :nth-child", () => { + let result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement ExpressionStatement:nth-child(2) Identifier", + ); + assertEquals(result[0].node.name, "bar"); + + result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement ExpressionStatement:nth-child(2n) Identifier", + ); + assertEquals(result[0].node.name, "foo"); + assertEquals(result[1].node.name, "baz"); + + result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement ExpressionStatement:nth-child(2n + 1) Identifier", + ); + assertEquals(result[0].node.name, "bar"); + assertEquals(result[1].node.name, "foobar"); + + result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement *:nth-child(2n + 1 of ExpressionStatement) Identifier", + ); + assertEquals(result[0].node.name, "bar"); + assertEquals(result[1].node.name, "foobar"); +}); + +Deno.test("Plugin - Program", () => { + const node = testLintNode("", "Program"); + assertEquals(node[0], { + type: "Program", + sourceType: "script", + range: [1, 1], + body: [], + }); +}); + +Deno.test("Plugin - BlockStatement", () => { + const node = testLintNode("{ foo; }", "BlockStatement"); + assertEquals(node[0], { + type: "BlockStatement", + range: [1, 9], + body: [{ + type: "ExpressionStatement", + range: [3, 7], + expression: { + type: "Identifier", + name: "foo", + range: [3, 6], + }, + }], + }); +}); + +Deno.test("Plugin - BreakStatement", () => { + let node = testLintNode("break;", "BreakStatement"); + assertEquals(node[0], { + type: "BreakStatement", + range: [1, 7], + label: null, + }); + + node = testLintNode("break foo;", "BreakStatement"); + assertEquals(node[0], { + type: "BreakStatement", + range: [1, 11], + label: { + type: "Identifier", + range: [7, 10], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - ContinueStatement", () => { + let node = testLintNode("continue;", "ContinueStatement"); + assertEquals(node[0], { + type: "ContinueStatement", + range: [1, 10], + label: null, + }); + + node = testLintNode("continue foo;", "ContinueStatement"); + assertEquals(node[0], { + type: "ContinueStatement", + range: [1, 14], + label: { + type: "Identifier", + range: [10, 13], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - DebuggerStatement", () => { + const node = testLintNode("debugger;", "DebuggerStatement"); + assertEquals(node[0], { + type: "DebuggerStatement", + range: [1, 10], + }); +}); + +Deno.test("Plugin - DoWhileStatement", () => { + const node = testLintNode("do {} while (foo);", "DoWhileStatement"); + assertEquals(node[0], { + type: "DoWhileStatement", + range: [1, 19], + test: { + type: "Identifier", + range: [14, 17], + name: "foo", + }, + body: { + type: "BlockStatement", + range: [4, 6], + body: [], + }, + }); +}); + +Deno.test("Plugin - ExpressionStatement", () => { + const node = testLintNode("foo;", "ExpressionStatement"); + assertEquals(node[0], { + type: "ExpressionStatement", + range: [1, 5], + expression: { + type: "Identifier", + range: [1, 4], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - ForInStatement", () => { + const node = testLintNode("for (a in b) {}", "ForInStatement"); + assertEquals(node[0], { + type: "ForInStatement", + range: [1, 16], + left: { + type: "Identifier", + range: [6, 7], + name: "a", + }, + right: { + type: "Identifier", + range: [11, 12], + name: "b", + }, + body: { + type: "BlockStatement", + range: [14, 16], + body: [], + }, + }); +}); + +Deno.test("Plugin - ForOfStatement", () => { + let node = testLintNode("for (a of b) {}", "ForOfStatement"); + assertEquals(node[0], { + type: "ForOfStatement", + range: [1, 16], + await: false, + left: { + type: "Identifier", + range: [6, 7], + name: "a", + }, + right: { + type: "Identifier", + range: [11, 12], + name: "b", + }, + body: { + type: "BlockStatement", + range: [14, 16], + body: [], + }, + }); + + node = testLintNode("for await (a of b) {}", "ForOfStatement"); + assertEquals(node[0], { + type: "ForOfStatement", + range: [1, 22], + await: true, + left: { + type: "Identifier", + range: [12, 13], + name: "a", + }, + right: { + type: "Identifier", + range: [17, 18], + name: "b", + }, + body: { + type: "BlockStatement", + range: [20, 22], + body: [], + }, + }); +}); + +Deno.test("Plugin - ForStatement", () => { + let node = testLintNode("for (;;) {}", "ForStatement"); + assertEquals(node[0], { + type: "ForStatement", + range: [1, 12], + init: null, + test: null, + update: null, + body: { + type: "BlockStatement", + range: [10, 12], + body: [], + }, + }); + + node = testLintNode("for (a; b; c) {}", "ForStatement"); + assertEquals(node[0], { + type: "ForStatement", + range: [1, 17], + init: { + type: "Identifier", + range: [6, 7], + name: "a", + }, + test: { + type: "Identifier", + range: [9, 10], + name: "b", + }, + update: { + type: "Identifier", + range: [12, 13], + name: "c", + }, + body: { + type: "BlockStatement", + range: [15, 17], + body: [], + }, + }); +}); + +Deno.test("Plugin - IfStatement", () => { + let node = testLintNode("if (foo) {}", "IfStatement"); + assertEquals(node[0], { + type: "IfStatement", + range: [1, 12], + test: { + type: "Identifier", + name: "foo", + range: [5, 8], + }, + consequent: { + type: "BlockStatement", + range: [10, 12], + body: [], + }, + alternate: null, + }); + + node = testLintNode("if (foo) {} else {}", "IfStatement"); + assertEquals(node[0], { + type: "IfStatement", + range: [1, 20], + test: { + type: "Identifier", + name: "foo", + range: [5, 8], + }, + consequent: { + type: "BlockStatement", + range: [10, 12], + body: [], + }, + alternate: { + type: "BlockStatement", + range: [18, 20], + body: [], + }, + }); +}); + +Deno.test("Plugin - LabeledStatement", () => { + const node = testLintNode("foo: {};", "LabeledStatement"); + assertEquals(node[0], { + type: "LabeledStatement", + range: [1, 8], + label: { + type: "Identifier", + name: "foo", + range: [1, 4], + }, + body: { + type: "BlockStatement", + range: [6, 8], + body: [], + }, + }); +}); + +Deno.test("Plugin - ReturnStatement", () => { + let node = testLintNode("return", "ReturnStatement"); + assertEquals(node[0], { + type: "ReturnStatement", + range: [1, 7], + argument: null, + }); + + node = testLintNode("return foo;", "ReturnStatement"); + assertEquals(node[0], { + type: "ReturnStatement", + range: [1, 12], + argument: { + type: "Identifier", + name: "foo", + range: [8, 11], + }, + }); +}); + +Deno.test("Plugin - SwitchStatement", () => { + const node = testLintNode( + `switch (foo) { + case foo: + case bar: + break; + default: + {} + }`, + "SwitchStatement", + ); + assertEquals(node[0], { + type: "SwitchStatement", + range: [1, 94], + discriminant: { + type: "Identifier", + range: [9, 12], + name: "foo", + }, + cases: [ + { + type: "SwitchCase", + range: [22, 31], + test: { + type: "Identifier", + range: [27, 30], + name: "foo", + }, + consequent: [], + }, + { + type: "SwitchCase", + range: [38, 62], + test: { + type: "Identifier", + range: [43, 46], + name: "bar", + }, + consequent: [ + { + type: "BreakStatement", + label: null, + range: [56, 62], + }, + ], + }, + { + type: "SwitchCase", + range: [69, 88], + test: null, + consequent: [ + { + type: "BlockStatement", + range: [86, 88], + body: [], + }, + ], + }, + ], + }); +}); + +Deno.test("Plugin - ThrowStatement", () => { + const node = testLintNode("throw foo;", "ThrowStatement"); + assertEquals(node[0], { + type: "ThrowStatement", + range: [1, 11], + argument: { + type: "Identifier", + range: [7, 10], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - TryStatement", () => { + let node = testLintNode("try {} catch {};", "TryStatement"); + assertEquals(node[0], { + type: "TryStatement", + range: [1, 16], + block: { + type: "BlockStatement", + range: [5, 7], + body: [], + }, + handler: { + type: "CatchClause", + range: [8, 16], + param: null, + body: { + type: "BlockStatement", + range: [14, 16], + body: [], + }, + }, + finalizer: null, + }); + + node = testLintNode("try {} catch (e) {};", "TryStatement"); + assertEquals(node[0], { + type: "TryStatement", + range: [1, 20], + block: { + type: "BlockStatement", + range: [5, 7], + body: [], + }, + handler: { + type: "CatchClause", + range: [8, 20], + param: { + type: "Identifier", + range: [15, 16], + name: "e", + }, + body: { + type: "BlockStatement", + range: [18, 20], + body: [], + }, + }, + finalizer: null, + }); + + node = testLintNode("try {} finally {};", "TryStatement"); + assertEquals(node[0], { + type: "TryStatement", + range: [1, 18], + block: { + type: "BlockStatement", + range: [5, 7], + body: [], + }, + handler: null, + finalizer: { + type: "BlockStatement", + range: [16, 18], + body: [], + }, + }); +}); + +Deno.test("Plugin - WhileStatement", () => { + const node = testLintNode("while (foo) {}", "WhileStatement"); + assertEquals(node[0], { + type: "WhileStatement", + range: [1, 15], + test: { + type: "Identifier", + range: [8, 11], + name: "foo", + }, + body: { + type: "BlockStatement", + range: [13, 15], + body: [], + }, + }); +}); diff --git a/tests/unit/lint_selectors_test.ts b/tests/unit/lint_selectors_test.ts new file mode 100644 index 0000000000..0909a4907a --- /dev/null +++ b/tests/unit/lint_selectors_test.ts @@ -0,0 +1,610 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "@std/assert/equals"; +import { + ATTR_BIN_NODE, + ATTR_EXISTS_NODE, + BinOp, + ELEM_NODE, + Lexer, + parseSelector, + PSEUDO_FIRST_CHILD, + PSEUDO_HAS, + PSEUDO_LAST_CHILD, + PSEUDO_NOT, + PSEUDO_NTH_CHILD, + RELATION_NODE, + splitSelectors, + Token, +} from "../../cli/js/40_lint_selector.js"; +import { assertThrows } from "@std/assert"; + +Deno.test("splitSelectors", () => { + assertEquals(splitSelectors("foo"), ["foo"]); + assertEquals(splitSelectors("foo, bar"), ["foo", "bar"]); + assertEquals(splitSelectors("foo:f(bar, baz)"), ["foo:f(bar, baz)"]); + assertEquals(splitSelectors("foo:f(bar, baz), foobar"), [ + "foo:f(bar, baz)", + "foobar", + ]); +}); + +interface LexState { + token: number; + value: string; +} + +function testLexer(input: string): LexState[] { + const out: LexState[] = []; + const l = new Lexer(input); + + while (l.token !== Token.EOF) { + out.push({ token: l.token, value: l.value }); + l.next(); + } + + return out; +} + +const Tags: Record = { Foo: 1, Bar: 2, FooBar: 3 }; +const Attrs: Record = { foo: 1, bar: 2, foobar: 3, attr: 4 }; +const toTag = (name: string): number => Tags[name]; +const toAttr = (name: string): number => Attrs[name]; + +const testParse = (input: string) => parseSelector(input, toTag, toAttr); + +Deno.test("Lexer - Elem", () => { + assertEquals(testLexer("Foo"), [ + { token: Token.Word, value: "Foo" }, + ]); + assertEquals(testLexer("foo-bar"), [ + { token: Token.Word, value: "foo-bar" }, + ]); + assertEquals(testLexer("foo_bar"), [ + { token: Token.Word, value: "foo_bar" }, + ]); + assertEquals(testLexer("Foo Bar Baz"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Bar" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Baz" }, + ]); + assertEquals(testLexer("Foo Bar Baz"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Bar" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Baz" }, + ]); +}); + +Deno.test("Lexer - Relation >", () => { + assertEquals(testLexer("Foo > Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("Foo>Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer(">Bar"), [ + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "Bar" }, + ]); +}); + +Deno.test("Lexer - Relation +", () => { + assertEquals(testLexer("Foo + Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("Foo+Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("+Bar"), [ + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "Bar" }, + ]); +}); + +Deno.test("Lexer - Relation ~", () => { + assertEquals(testLexer("Foo ~ Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("Foo~Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("~Bar"), [ + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); + + assertEquals(testLexer("Foo Bar ~ Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Bar" }, + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); +}); + +Deno.test("Lexer - Attr", () => { + assertEquals(testLexer("[attr]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr=1]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "=" }, + { token: Token.Word, value: "1" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr='foo']"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "=" }, + { token: Token.String, value: "foo" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr>=2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: ">=" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr<=2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "<=" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr>2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr<2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "<" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr!=2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "!=" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr.foo=1]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Dot, value: "" }, + { token: Token.Word, value: "foo" }, + { token: Token.Op, value: "=" }, + { token: Token.Word, value: "1" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr] [attr]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + { token: Token.Space, value: "" }, + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("Foo[attr][attr2=1]"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr2" }, + { token: Token.Op, value: "=" }, + { token: Token.Word, value: "1" }, + { token: Token.BracketClose, value: "" }, + ]); +}); + +Deno.test("Lexer - Pseudo", () => { + assertEquals(testLexer(":foo-bar"), [ + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + ]); + assertEquals(testLexer("Foo:foo-bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + ]); + assertEquals(testLexer(":foo-bar(baz)"), [ + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + { token: Token.BraceOpen, value: "" }, + { token: Token.Word, value: "baz" }, + { token: Token.BraceClose, value: "" }, + ]); + assertEquals(testLexer(":foo-bar(2n + 1)"), [ + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + { token: Token.BraceOpen, value: "" }, + { token: Token.Word, value: "2n" }, + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "1" }, + { token: Token.BraceClose, value: "" }, + ]); +}); + +Deno.test("Parser - Elem", () => { + assertEquals(testParse("Foo"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + ]]); +}); + +Deno.test("Parser - Relation (descendant)", () => { + assertEquals(testParse("Foo Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Space, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); +}); + +Deno.test("Parser - Relation", () => { + assertEquals(testParse("Foo > Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Greater, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); + + assertEquals(testParse("Foo ~ Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Tilde, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); + + assertEquals(testParse("Foo + Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Plus, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); +}); + +Deno.test("Parser - Attr", () => { + assertEquals(testParse("[foo]"), [[ + { + type: ATTR_EXISTS_NODE, + prop: [1], + }, + ]]); + + assertEquals(testParse("[foo][bar]"), [[ + { + type: ATTR_EXISTS_NODE, + prop: [1], + }, + { + type: ATTR_EXISTS_NODE, + prop: [2], + }, + ]]); + + assertEquals(testParse("[foo=1]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: 1, + }, + ]]); + assertEquals(testParse("[foo=true]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: true, + }, + ]]); + assertEquals(testParse("[foo=false]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: false, + }, + ]]); + assertEquals(testParse("[foo=null]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: null, + }, + ]]); + assertEquals(testParse("[foo='str']"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: "str", + }, + ]]); + assertEquals(testParse('[foo="str"]'), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: "str", + }, + ]]); + assertEquals(testParse("[foo=/str/]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: /str/, + }, + ]]); + assertEquals(testParse("[foo=/str/g]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: /str/g, + }, + ]]); +}); + +Deno.test("Parser - Attr nested", () => { + assertEquals(testParse("[foo.bar]"), [[ + { + type: ATTR_EXISTS_NODE, + prop: [1, 2], + }, + ]]); + + assertEquals(testParse("[foo.bar = 2]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1, 2], + value: 2, + }, + ]]); +}); + +Deno.test("Parser - Pseudo no value", () => { + assertEquals(testParse(":first-child"), [[ + { + type: PSEUDO_FIRST_CHILD, + }, + ]]); + assertEquals(testParse(":last-child"), [[ + { + type: PSEUDO_LAST_CHILD, + }, + ]]); +}); + +Deno.test("Parser - Pseudo nth-child", () => { + assertEquals(testParse(":nth-child(2)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step: 0, + stepOffset: 1, + repeat: false, + }, + ]]); + assertEquals(testParse(":nth-child(2n)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step: 2, + stepOffset: 0, + repeat: true, + }, + ]]); + assertEquals(testParse(":nth-child(-2n)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step: -2, + stepOffset: 0, + repeat: true, + }, + ]]); + assertEquals(testParse(":nth-child(2n + 1)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: "+", + step: 2, + stepOffset: 1, + repeat: true, + }, + ]]); + assertEquals(testParse(":nth-child(2n + 1 of Foo[attr])"), [[ + { + type: PSEUDO_NTH_CHILD, + of: [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { type: ATTR_EXISTS_NODE, prop: [4] }, + ], + op: "+", + step: 2, + stepOffset: 1, + repeat: true, + }, + ]]); + + // Invalid selectors + assertThrows(() => testParse(":nth-child(2n + 1 of Foo[attr], Bar)")); + assertThrows(() => testParse(":nth-child(2n - 1 foo)")); +}); + +Deno.test("Parser - Pseudo has/is/where", () => { + assertEquals(testParse(":has(Foo:has(Foo), Bar)"), [[ + { + type: PSEUDO_HAS, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_HAS, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); + assertEquals(testParse(":where(Foo:where(Foo), Bar)"), [[ + { + type: PSEUDO_HAS, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_HAS, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); + assertEquals(testParse(":is(Foo:is(Foo), Bar)"), [[ + { + type: PSEUDO_HAS, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_HAS, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); +}); + +Deno.test("Parser - Pseudo not", () => { + assertEquals(testParse(":not(Foo:not(Foo), Bar)"), [[ + { + type: PSEUDO_NOT, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_NOT, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); +}); + +Deno.test("Parser - mixed", () => { + assertEquals(testParse("Foo[foo=true] Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { type: ATTR_BIN_NODE, op: BinOp.Equal, prop: [1], value: true }, + { type: RELATION_NODE, op: BinOp.Space }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); +}); diff --git a/tests/unit/ops_test.ts b/tests/unit/ops_test.ts index 6de55f8b66..631e5c5736 100644 --- a/tests/unit/ops_test.ts +++ b/tests/unit/ops_test.ts @@ -1,6 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -const EXPECTED_OP_COUNT = 12; +const EXPECTED_OP_COUNT = 13; Deno.test(function checkExposedOps() { // @ts-ignore TS doesn't allow to index with symbol diff --git a/tests/unit/quic_test.ts b/tests/unit/quic_test.ts new file mode 100644 index 0000000000..f5423327de --- /dev/null +++ b/tests/unit/quic_test.ts @@ -0,0 +1,172 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "./test_util.ts"; + +const cert = Deno.readTextFileSync("tests/testdata/tls/localhost.crt"); +const key = Deno.readTextFileSync("tests/testdata/tls/localhost.key"); +const caCerts = [Deno.readTextFileSync("tests/testdata/tls/RootCA.pem")]; + +async function pair(opt?: Deno.QuicTransportOptions): Promise< + [Deno.QuicConn, Deno.QuicConn, Deno.QuicListener] +> { + const listener = await Deno.listenQuic({ + hostname: "localhost", + port: 0, + cert, + key, + alpnProtocols: ["deno-test"], + ...opt, + }); + + const [server, client] = await Promise.all([ + listener.accept(), + Deno.connectQuic({ + hostname: "localhost", + port: listener.addr.port, + caCerts, + alpnProtocols: ["deno-test"], + ...opt, + }), + ]); + + assertEquals(server.protocol, "deno-test"); + assertEquals(client.protocol, "deno-test"); + assertEquals(client.remoteAddr, listener.addr); + + return [server, client, listener]; +} + +Deno.test("bidirectional stream", async () => { + const [server, client, listener] = await pair(); + + const encoded = (new TextEncoder()).encode("hi!"); + + { + const bi = await server.createBidirectionalStream({ sendOrder: 42 }); + assertEquals(bi.writable.sendOrder, 42); + bi.writable.sendOrder = 0; + assertEquals(bi.writable.sendOrder, 0); + await bi.writable.getWriter().write(encoded); + } + + { + const { value: bi } = await client.incomingBidirectionalStreams + .getReader() + .read(); + const { value: data } = await bi!.readable.getReader().read(); + assertEquals(data, encoded); + } + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("unidirectional stream", async () => { + const [server, client, listener] = await pair(); + + const encoded = (new TextEncoder()).encode("hi!"); + + { + const uni = await server.createUnidirectionalStream({ sendOrder: 42 }); + assertEquals(uni.sendOrder, 42); + uni.sendOrder = 0; + assertEquals(uni.sendOrder, 0); + await uni.getWriter().write(encoded); + } + + { + const { value: uni } = await client.incomingUnidirectionalStreams + .getReader() + .read(); + const { value: data } = await uni!.getReader().read(); + assertEquals(data, encoded); + } + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("datagrams", async () => { + const [server, client, listener] = await pair(); + + const encoded = (new TextEncoder()).encode("hi!"); + + await server.sendDatagram(encoded); + + const data = await client.readDatagram(); + assertEquals(data, encoded); + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("closing", async () => { + const [server, client, listener] = await pair(); + + server.close({ closeCode: 42, reason: "hi!" }); + + assertEquals(await client.closed, { closeCode: 42, reason: "hi!" }); + + listener.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("max concurrent streams", async () => { + const [server, client, listener] = await pair({ + maxConcurrentBidirectionalStreams: 1, + maxConcurrentUnidirectionalStreams: 1, + }); + + { + await server.createBidirectionalStream(); + await server.createBidirectionalStream() + .then(() => { + throw new Error("expected failure"); + }, () => { + // success! + }); + } + + { + await server.createUnidirectionalStream(); + await server.createUnidirectionalStream() + .then(() => { + throw new Error("expected failure"); + }, () => { + // success! + }); + } + + listener.close({ closeCode: 0, reason: "" }); + server.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("incoming", async () => { + const listener = await Deno.listenQuic({ + hostname: "localhost", + port: 0, + cert, + key, + alpnProtocols: ["deno-test"], + }); + + const connect = () => + Deno.connectQuic({ + hostname: "localhost", + port: listener.addr.port, + caCerts, + alpnProtocols: ["deno-test"], + }); + + const c1p = connect(); + const i1 = await listener.incoming(); + const server = await i1.accept(); + const client = await c1p; + + assertEquals(server.protocol, "deno-test"); + assertEquals(client.protocol, "deno-test"); + assertEquals(client.remoteAddr, listener.addr); + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); diff --git a/tests/unit_node/_fs/_fs_access_test.ts b/tests/unit_node/_fs/_fs_access_test.ts index f8010b0b8b..0881769f2c 100644 --- a/tests/unit_node/_fs/_fs_access_test.ts +++ b/tests/unit_node/_fs/_fs_access_test.ts @@ -28,6 +28,8 @@ Deno.test( try { await fs.promises.access(file, fs.constants.R_OK); await fs.promises.access(file, fs.constants.W_OK); + await fs.promises.access(file, fs.constants.X_OK); + await fs.promises.access(file, fs.constants.F_OK); } finally { await Deno.remove(file); } @@ -60,6 +62,8 @@ Deno.test( try { fs.accessSync(file, fs.constants.R_OK); fs.accessSync(file, fs.constants.W_OK); + fs.accessSync(file, fs.constants.X_OK); + fs.accessSync(file, fs.constants.F_OK); } finally { Deno.removeSync(file); } diff --git a/tests/unit_node/_fs/_fs_handle_test.ts b/tests/unit_node/_fs/_fs_handle_test.ts index e26b82aa06..84d72c0745 100644 --- a/tests/unit_node/_fs/_fs_handle_test.ts +++ b/tests/unit_node/_fs/_fs_handle_test.ts @@ -117,3 +117,85 @@ Deno.test("[node/fs filehandle.writeFile] Write to file", async function () { assertEquals(decoder.decode(data), "hello world"); }); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file with length", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hello world"); + + await fileHandle.truncate(5); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + assertEquals(decoder.decode(data), "hello"); + }, +); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file without length", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hello world"); + + await fileHandle.truncate(); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + assertEquals(decoder.decode(data), ""); + }, +); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file with extension", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hi"); + + await fileHandle.truncate(5); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + const expected = new Uint8Array(5); + expected.set(new TextEncoder().encode("hi")); + + assertEquals(data, expected); + assertEquals(data.length, 5); + assertEquals(decoder.decode(data.subarray(0, 2)), "hi"); + // Verify null bytes + assertEquals(data[2], 0); + assertEquals(data[3], 0); + assertEquals(data[4], 0); + }, +); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file with negative length", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hello world"); + + await fileHandle.truncate(-1); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + assertEquals(decoder.decode(data), ""); + assertEquals(data.length, 0); + }, +); diff --git a/tests/unit_node/crypto/crypto_cipher_gcm_test.ts b/tests/unit_node/crypto/crypto_cipher_gcm_test.ts index b379a43696..16f6f56a9c 100644 --- a/tests/unit_node/crypto/crypto_cipher_gcm_test.ts +++ b/tests/unit_node/crypto/crypto_cipher_gcm_test.ts @@ -119,3 +119,23 @@ Deno.test({ ); }, }); + +// Issue #27441 +// https://github.com/denoland/deno/issues/27441 +Deno.test({ + name: "aes-256-gcm supports IV of non standard length", + fn() { + const decipher = crypto.createDecipheriv( + "aes-256-gcm", + Buffer.from("eYLEiLFQnpjYksWTiKpwv2sKhw+WJb5Fo/aY2YqXswc=", "base64"), + Buffer.from("k5oP3kb8tTbZaL3PxbFWN8ToOb8vfv2b1EuPz1LbmYU=", "base64"), // 256 bits IV + ); + const decrypted = decipher.update( + "s0/KBsFec29XLrGbAnLiNA==", + "base64", + "utf-8", + ); + assertEquals(decrypted, "this is a secret"); + decipher.final(); + }, +}); diff --git a/tests/unit_node/crypto/crypto_cipher_test.ts b/tests/unit_node/crypto/crypto_cipher_test.ts index 65a5b29eeb..e40625c5a4 100644 --- a/tests/unit_node/crypto/crypto_cipher_test.ts +++ b/tests/unit_node/crypto/crypto_cipher_test.ts @@ -361,6 +361,19 @@ Deno.test({ name: "getCiphers", fn() { assertEquals(crypto.getCiphers().includes("aes-128-cbc"), true); + + const getZeroKey = (cipher: string) => zeros(+cipher.match(/\d+/)![0] / 8); + const getZeroIv = (cipher: string) => { + if (cipher.includes("gcm") || cipher.includes("ecb")) { + return zeros(12); + } + return zeros(16); + }; + + for (const cipher of crypto.getCiphers()) { + crypto.createCipheriv(cipher, getZeroKey(cipher), getZeroIv(cipher)) + .final(); + } }, }); diff --git a/tests/unit_node/crypto/crypto_key_test.ts b/tests/unit_node/crypto/crypto_key_test.ts index 5d206acc72..82306d02fe 100644 --- a/tests/unit_node/crypto/crypto_key_test.ts +++ b/tests/unit_node/crypto/crypto_key_test.ts @@ -700,3 +700,19 @@ Deno.test("generateKeyPair promisify", async () => { assert(publicKey.startsWith("-----BEGIN PUBLIC KEY-----")); assert(privateKey.startsWith("-----BEGIN PRIVATE KEY-----")); }); + +Deno.test("RSA export private JWK", function () { + // @ts-ignore @types/node broken + const { privateKey, publicKey } = generateKeyPairSync("rsa", { + modulusLength: 4096, + publicKeyEncoding: { + format: "jwk", + }, + privateKeyEncoding: { + format: "jwk", + }, + }); + + assertEquals((privateKey as any).kty, "RSA"); + assertEquals((privateKey as any).n, (publicKey as any).n); +}); diff --git a/tests/unit_node/crypto/crypto_misc_test.ts b/tests/unit_node/crypto/crypto_misc_test.ts index 007009339d..9f72683398 100644 --- a/tests/unit_node/crypto/crypto_misc_test.ts +++ b/tests/unit_node/crypto/crypto_misc_test.ts @@ -1,7 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import { randomFillSync, randomUUID, timingSafeEqual } from "node:crypto"; import { Buffer } from "node:buffer"; -import { assert, assertEquals } from "../../unit/test_util.ts"; +import { assert, assertEquals, assertThrows } from "../../unit/test_util.ts"; import { assertNotEquals } from "@std/assert"; Deno.test("[node/crypto.getRandomUUID] works the same way as Web Crypto API", () => { @@ -36,3 +36,10 @@ Deno.test("[node/crypto.timingSafeEqual] compares equal Buffer with different by assert(timingSafeEqual(a, b)); }); + +Deno.test("[node/crypto.timingSafeEqual] RangeError on Buffer with different byteLength", () => { + const a = Buffer.from([212, 213]); + const b = Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 212, 213, 0]); + + assertThrows(() => timingSafeEqual(a, b), RangeError); +}); diff --git a/tests/unit_node/crypto/gcmEncryptExtIV128.json b/tests/unit_node/crypto/gcmEncryptExtIV128.json index 64896642d4..f0b4bca1f1 100644 --- a/tests/unit_node/crypto/gcmEncryptExtIV128.json +++ b/tests/unit_node/crypto/gcmEncryptExtIV128.json @@ -51373,5 +51373,322 @@ 102, 238 ] + }, + { + "key": [ + 131, + 249, + 217, + 125, + 74, + 183, + 89, + 253, + 220, + 195, + 239, + 84, + 160, + 226, + 168, + 236 + ], + "nonce": [ + 207 + ], + "aad": [ + 109, + 212, + 158, + 174, + 180, + 16, + 61, + 172, + 143, + 151, + 227, + 35, + 73, + 70, + 221, + 45 + ], + "plaintext": [ + 119, + 230, + 50, + 156, + 249, + 66, + 79, + 113, + 200, + 8, + 223, + 145, + 112, + 191, + 210, + 152 + ], + "ciphertext": [ + 80, + 222, + 134, + 167, + 169, + 42, + 138, + 94, + 163, + 61, + 181, + 105, + 107, + 150, + 205, + 119 + ], + "tag": [ + 170, + 24, + 30, + 132, + 188, + 139, + 75, + 245, + 166, + 137, + 39, + 196, + 9, + 212, + 34, + 203 + ] + }, + { + "key": [ + 202, + 145, + 226, + 65, + 68, + 9, + 164, + 57, + 176, + 101, + 115, + 215, + 114, + 249, + 10, + 251 + ], + "nonce": [ + 23, + 112, + 8, + 249, + 32, + 160, + 97, + 105, + 204, + 223, + 117, + 58, + 51, + 133, + 83, + 254, + 253, + 70, + 132, + 88, + 105, + 201, + 36, + 77, + 164, + 73, + 151, + 248, + 61, + 76, + 232, + 5, + 161, + 135, + 7, + 200, + 77, + 17, + 79, + 156, + 104, + 66, + 123, + 34, + 132, + 21, + 145, + 230, + 202, + 236, + 245, + 195, + 231, + 42, + 37, + 22, + 122, + 168, + 96, + 197, + 27, + 220, + 26, + 165, + 109, + 205, + 105, + 242, + 154, + 47, + 53, + 231, + 10, + 50, + 43, + 158, + 186, + 9, + 42, + 152, + 214, + 106, + 149, + 107, + 77, + 41, + 67, + 131, + 160, + 235, + 171, + 38, + 247, + 196, + 223, + 26, + 93, + 64, + 96, + 223, + 196, + 90, + 20, + 21, + 81, + 0, + 234, + 125, + 158, + 50, + 222, + 187, + 101, + 55, + 64, + 107, + 117, + 114, + 145, + 113, + 5, + 5, + 20, + 46, + 118, + 89, + 252, + 119 + ], + "aad": [ + 191, + 235, + 21, + 252, + 247, + 177, + 95, + 14, + 20, + 192, + 68, + 57, + 182, + 121, + 80, + 189 + ], + "plaintext": [ + 40, + 0, + 62, + 48, + 196, + 164, + 202, + 158, + 65, + 170, + 254, + 250, + 193, + 225, + 195, + 222 + ], + "ciphertext": [ + 0, + 228, + 114, + 151, + 31, + 58, + 119, + 112, + 170, + 113, + 88, + 253, + 146, + 241, + 123, + 183 + ], + "tag": [ + 22, + 102, + 27, + 133, + 235, + 81, + 100, + 108, + 148, + 207, + 43, + 228, + 228, + 45, + 122, + 142 + ] } ] diff --git a/tests/unit_node/crypto/gcmEncryptExtIV256.json b/tests/unit_node/crypto/gcmEncryptExtIV256.json index cb8ba30869..808c47ec6e 100644 --- a/tests/unit_node/crypto/gcmEncryptExtIV256.json +++ b/tests/unit_node/crypto/gcmEncryptExtIV256.json @@ -57373,5 +57373,354 @@ 246, 57 ] + }, + { + "key": [ + 187, + 70, + 53, + 215, + 102, + 221, + 14, + 74, + 112, + 25, + 209, + 114, + 76, + 115, + 110, + 31, + 44, + 1, + 106, + 249, + 226, + 158, + 125, + 58, + 162, + 192, + 222, + 35, + 231, + 128, + 175, + 38 + ], + "nonce": [ + 171 + ], + "aad": [ + 15, + 133, + 199, + 219, + 235, + 103, + 75, + 122, + 112, + 195, + 81, + 37, + 211, + 97, + 147, + 80 + ], + "plaintext": [ + 208, + 92, + 232, + 120, + 217, + 70, + 98, + 209, + 82, + 11, + 24, + 75, + 75, + 239, + 60, + 69 + ], + "ciphertext": [ + 81, + 186, + 162, + 106, + 106, + 113, + 156, + 22, + 0, + 100, + 95, + 243, + 191, + 223, + 165, + 59 + ], + "tag": [ + 107, + 213, + 78, + 81, + 132, + 235, + 48, + 9, + 52, + 179, + 146, + 195, + 43, + 124, + 26, + 110 + ] + }, + { + "key": [ + 252, + 188, + 126, + 182, + 39, + 22, + 220, + 127, + 121, + 43, + 97, + 148, + 210, + 109, + 109, + 86, + 158, + 174, + 224, + 122, + 157, + 60, + 55, + 202, + 66, + 133, + 64, + 144, + 102, + 30, + 24, + 69 + ], + "nonce": [ + 76, + 140, + 70, + 36, + 39, + 155, + 35, + 180, + 149, + 199, + 136, + 132, + 76, + 118, + 210, + 37, + 235, + 242, + 56, + 38, + 89, + 156, + 62, + 28, + 244, + 219, + 29, + 162, + 214, + 90, + 127, + 117, + 68, + 216, + 232, + 111, + 204, + 51, + 251, + 17, + 61, + 49, + 116, + 184, + 199, + 144, + 49, + 34, + 203, + 89, + 103, + 246, + 16, + 115, + 130, + 204, + 90, + 198, + 231, + 160, + 228, + 202, + 79, + 8, + 222, + 62, + 145, + 29, + 72, + 62, + 104, + 37, + 61, + 63, + 136, + 108, + 254, + 52, + 155, + 249, + 50, + 153, + 162, + 142, + 102, + 91, + 192, + 150, + 165, + 28, + 232, + 76, + 230, + 148, + 11, + 52, + 160, + 55, + 114, + 36, + 131, + 185, + 106, + 123, + 37, + 80, + 127, + 90, + 4, + 100, + 60, + 103, + 48, + 250, + 170, + 182, + 24, + 230, + 35, + 26, + 114, + 119, + 20, + 214, + 243, + 102, + 250, + 155 + ], + "aad": [ + 60, + 24, + 42, + 241, + 156, + 70, + 255, + 74, + 203, + 218, + 206, + 207, + 112, + 180, + 47, + 181 + ], + "plaintext": [ + 34, + 20, + 79, + 193, + 47, + 123, + 197, + 82, + 43, + 136, + 183, + 108, + 141, + 237, + 28, + 118 + ], + "ciphertext": [ + 200, + 217, + 129, + 7, + 192, + 203, + 60, + 15, + 210, + 24, + 154, + 233, + 114, + 128, + 213, + 98 + ], + "tag": [ + 41, + 6, + 119, + 35, + 48, + 236, + 217, + 163, + 184, + 168, + 40, + 118, + 164, + 235, + 222, + 234 + ] } ] diff --git a/tests/unit_node/http_test.ts b/tests/unit_node/http_test.ts index e6c36eea19..f30a4a20a3 100644 --- a/tests/unit_node/http_test.ts +++ b/tests/unit_node/http_test.ts @@ -10,6 +10,7 @@ import http, { } from "node:http"; import url from "node:url"; import https from "node:https"; +import zlib from "node:zlib"; import net, { Socket } from "node:net"; import fs from "node:fs"; import { text } from "node:stream/consumers"; @@ -1823,3 +1824,60 @@ Deno.test("[node/http] ServerResponse socket", async () => { await promise; }); + +Deno.test("[node/http] decompress brotli response", { + permissions: { net: true }, +}, async () => { + let received = false; + const ac = new AbortController(); + const server = Deno.serve({ port: 5928, signal: ac.signal }, (_req) => { + received = true; + return Response.json([ + ["accept-language", "*"], + ["host", "localhost:3000"], + ["user-agent", "Deno/2.1.1"], + ], {}); + }); + const { promise, resolve, reject } = Promise.withResolvers(); + let body = ""; + + const request = http.get( + "http://localhost:5928/", + { + headers: { + "accept-encoding": "gzip, deflate, br, zstd", + }, + }, + (resp) => { + const decompress = zlib.createBrotliDecompress(); + resp.on("data", (chunk) => { + decompress.write(chunk); + }); + + resp.on("end", () => { + decompress.end(); + }); + + decompress.on("data", (chunk) => { + body += chunk; + }); + + decompress.on("end", () => { + resolve(); + }); + }, + ); + request.on("error", reject); + request.end(() => { + assert(received); + }); + + await promise; + ac.abort(); + await server.finished; + + assertEquals(JSON.parse(body), [["accept-language", "*"], [ + "host", + "localhost:3000", + ], ["user-agent", "Deno/2.1.1"]]); +}); diff --git a/tests/unit_node/inspector_test.ts b/tests/unit_node/inspector_test.ts index a53e977bb6..0eb3f5a07b 100644 --- a/tests/unit_node/inspector_test.ts +++ b/tests/unit_node/inspector_test.ts @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import inspector, { Session } from "node:inspector"; +import inspectorPromises, { + Session as SessionPromise, +} from "node:inspector/promises"; import { assertEquals } from "@std/assert/equals"; Deno.test("[node/inspector] - importing inspector works", () => { @@ -9,3 +12,11 @@ Deno.test("[node/inspector] - importing inspector works", () => { Deno.test("[node/inspector] - Session constructor should not throw", () => { new Session(); }); + +Deno.test("[node/inspector/promises] - importing inspector works", () => { + assertEquals(typeof inspectorPromises.open, "function"); +}); + +Deno.test("[node/inspector/promises] - Session constructor should not throw", () => { + new SessionPromise(); +}); diff --git a/tests/unit_node/worker_threads_test.ts b/tests/unit_node/worker_threads_test.ts index 808fd6116e..5f38d51d4d 100644 --- a/tests/unit_node/worker_threads_test.ts +++ b/tests/unit_node/worker_threads_test.ts @@ -841,3 +841,26 @@ Deno.test({ assertEquals(result, true); }, }); + +Deno.test("[node/worker_threads] Worker runs async ops correctly", async () => { + const recvMessage = Promise.withResolvers(); + const timer = setTimeout(() => recvMessage.reject(), 1000); + const worker = new workerThreads.Worker( + ` + import { parentPort } from "node:worker_threads"; + setTimeout(() => { + parentPort.postMessage("Hello from worker"); + }, 10); + `, + { eval: true }, + ); + + worker.on("message", (msg) => { + assertEquals(msg, "Hello from worker"); + worker.terminate(); + recvMessage.resolve(); + clearTimeout(timer); + }); + + await recvMessage.promise; +}); diff --git a/tests/util/server/src/npm.rs b/tests/util/server/src/npm.rs index 081989ddb5..0261b2532c 100644 --- a/tests/util/server/src/npm.rs +++ b/tests/util/server/src/npm.rs @@ -267,6 +267,7 @@ fn get_npm_package( let mut tarballs = HashMap::new(); let mut versions = serde_json::Map::new(); let mut latest_version = semver::Version::parse("0.0.0").unwrap(); + let mut dist_tags = serde_json::Map::new(); for entry in fs::read_dir(&package_folder)? { let entry = entry?; let file_type = entry.file_type()?; @@ -345,6 +346,14 @@ fn get_npm_package( } } + if let Some(publish_config) = version_info.get("publishConfig") { + if let Some(tag) = publish_config.get("tag") { + if let Some(tag) = tag.as_str() { + dist_tags.insert(tag.to_string(), version.clone().into()); + } + } + } + versions.insert(version.clone(), version_info.into()); let version = semver::Version::parse(&version)?; if version.cmp(&latest_version).is_gt() { @@ -352,8 +361,9 @@ fn get_npm_package( } } - let mut dist_tags = serde_json::Map::new(); - dist_tags.insert("latest".to_string(), latest_version.to_string().into()); + if !dist_tags.contains_key("latest") { + dist_tags.insert("latest".to_string(), latest_version.to_string().into()); + } // create the registry file for this package let mut registry_file = serde_json::Map::new(); diff --git a/tests/util/server/src/servers/mod.rs b/tests/util/server/src/servers/mod.rs index 0b1d99aeb9..4345c27cde 100644 --- a/tests/util/server/src/servers/mod.rs +++ b/tests/util/server/src/servers/mod.rs @@ -577,11 +577,6 @@ async fn main_server( ); Ok(res) } - (_, "/bad_redirect") => { - let mut res = Response::new(empty_body()); - *res.status_mut() = StatusCode::FOUND; - Ok(res) - } (_, "/server_error") => { let mut res = Response::new(empty_body()); *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; diff --git a/tools/core_import_map.json b/tools/core_import_map.json index bc0674277e..d38221eb4c 100644 --- a/tools/core_import_map.json +++ b/tools/core_import_map.json @@ -250,6 +250,7 @@ "ext:deno_node/_util/std_fmt_colors.ts": "../ext/node/polyfills/_util/std_fmt_colors.ts", "ext:deno_telemetry/telemetry.ts": "../ext/deno_telemetry/telemetry.ts", "ext:deno_telemetry/util.ts": "../ext/deno_telemetry/util.ts", + "ext:cli/40_lint_selector.js": "../cli/js/40_lint_selector.js", "@std/archive": "../tests/util/std/archive/mod.ts", "@std/archive/tar": "../tests/util/std/archive/tar.ts", "@std/archive/untar": "../tests/util/std/archive/untar.ts",