diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 6fbcd8f242..bc3f15380b 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 31; +const cacheVersion = 32; const ubuntuX86Runner = "ubuntu-24.04"; const ubuntuX86XlRunner = "ubuntu-24.04-xl"; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2f5d8f5b6d..cc1aa89669 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -184,8 +184,8 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '31-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '31-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' + key: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' if: '!(matrix.skip)' - uses: dsherret/rust-toolchain-file@v1 if: '!(matrix.skip)' @@ -379,7 +379,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '31-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -689,7 +689,7 @@ jobs: !./target/*/gn_root !./target/*/*.zip !./target/*/*.tar.gz - key: '31-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-24.04 diff --git a/Cargo.lock b/Cargo.lock index b0b486bf46..26b84f9ce5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -39,9 +39,9 @@ dependencies = [ [[package]] name = "aead-gcm-stream" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4947a169074c7e038fa43051d1c4e073f4488b0e4b0a30658f1e1a1b06449ce8" +checksum = "e70c8dec860340effb00f6945c49c0daaa6dac963602750db862eabb74bf7886" dependencies = [ "aead", "aes", @@ -677,6 +677,28 @@ dependencies = [ "itoa", ] +[[package]] +name = "capacity_builder" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f2d24a6dcf0cd402a21b65d35340f3a49ff3475dc5fdac91d22d2733e6641c6" +dependencies = [ + "capacity_builder_macros", + "ecow", + "hipstr", + "itoa", +] + +[[package]] +name = "capacity_builder_macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b4a6cae9efc04cc6cbb8faf338d2c497c165c83e74509cf4dbedea948bbf6e5" +dependencies = [ + "quote", + "syn 2.0.87", +] + [[package]] name = "caseless" version = "0.2.1" @@ -728,6 +750,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.37" @@ -839,6 +867,7 @@ dependencies = [ "regex", "reqwest", "serde", + "sys_traits", "test_server", "tokio", "url", @@ -1224,7 +1253,7 @@ dependencies = [ "boxed_error", "bytes", "cache_control", - "capacity_builder", + "capacity_builder 0.5.0", "chrono", "clap", "clap_complete", @@ -1246,7 +1275,7 @@ dependencies = [ "deno_npm", "deno_npm_cache", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_resolver", "deno_runtime", "deno_semver", @@ -1312,6 +1341,7 @@ dependencies = [ "spki", "sqlformat", "strsim", + "sys_traits", "tar", "tempfile", "test_server", @@ -1391,7 +1421,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.177.0" +version = "0.178.0" dependencies = [ "bencher", "deno_core", @@ -1400,7 +1430,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.177.0" +version = "0.178.0" dependencies = [ "async-trait", "deno_core", @@ -1411,7 +1441,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.115.0" +version = "0.116.0" dependencies = [ "async-trait", "deno_core", @@ -1424,9 +1454,9 @@ dependencies = [ [[package]] name = "deno_cache_dir" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54df1c5177ace01d92b872584ab9af8290681bb150fd9b423c37a494ad5ddbdc" +checksum = "e73ed17f285731a23df9779ca1e0e721de866db6776ed919ebd9235e0a107c4c" dependencies = [ "async-trait", "base32", @@ -1437,7 +1467,7 @@ dependencies = [ "data-url", "deno_error", "deno_media_type", - "deno_path_util", + "deno_path_util 0.3.0", "http 1.1.0", "indexmap 2.3.0", "log", @@ -1446,13 +1476,14 @@ dependencies = [ "serde", "serde_json", "sha2", + "sys_traits", "thiserror 1.0.64", "url", ] [[package]] name = "deno_canvas" -version = "0.52.0" +version = "0.53.0" dependencies = [ "deno_core", "deno_webgpu", @@ -1463,12 +1494,12 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.39.3" -source = "git+https://github.com/denoland/deno_config.git?rev=0d588fb1831bf4d33d3279a1a75db6138d81a75b#0d588fb1831bf4d33d3279a1a75db6138d81a75b" +version = "0.42.0" +source = "git+https://github.com/denoland/deno_config.git?rev=4cbb63704442a7834dc6bed2e7e310a0d46ade09#4cbb63704442a7834dc6bed2e7e310a0d46ade09" dependencies = [ "anyhow", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_semver", "glob", "ignore", @@ -1480,22 +1511,23 @@ dependencies = [ "phf", "serde", "serde_json", + "sys_traits", "thiserror 1.0.64", "url", ] [[package]] name = "deno_console" -version = "0.183.0" +version = "0.184.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.326.0" +version = "0.327.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed157162dc5320a2b46ffeeaec24788339df0f2437cfaea78a8d82696715ad7f" +checksum = "eaf8dff204b9c2415deb47b9f30d4d38b0925d0d88f1f9074e8e76f59e6d7ded" dependencies = [ "anyhow", "az", @@ -1503,7 +1535,7 @@ dependencies = [ "bit-set", "bit-vec", "bytes", - "capacity_builder", + "capacity_builder 0.1.3", "cooked-waker", "deno_core_icudata", "deno_ops", @@ -1535,7 +1567,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695" [[package]] name = "deno_cron" -version = "0.63.0" +version = "0.64.0" dependencies = [ "anyhow", "async-trait", @@ -1548,7 +1580,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.197.0" +version = "0.198.0" dependencies = [ "aes", "aes-gcm", @@ -1594,7 +1626,7 @@ dependencies = [ "comrak", "deno_ast", "deno_graph", - "deno_path_util", + "deno_path_util 0.2.2", "handlebars", "html-escape", "import_map", @@ -1638,13 +1670,13 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.207.0" +version = "0.208.0" dependencies = [ "base64 0.21.7", "bytes", "data-url", "deno_core", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_tls", "dyn-clone", @@ -1674,7 +1706,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.170.0" +version = "0.171.0" dependencies = [ "deno_core", "deno_permissions", @@ -1694,22 +1726,24 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.93.0" +version = "0.94.0" dependencies = [ "async-trait", "base32", "boxed_error", "deno_core", "deno_io", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "filetime", + "getrandom", "junction", "libc", "nix", "rand", "rayon", "serde", + "sys_traits", "thiserror 2.0.3", "winapi", "windows-sys 0.59.0", @@ -1717,14 +1751,16 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.86.3" +version = "0.86.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc78ed0b4bbcb4197300f0d6e7d1edc2d2c5019cdb9dedba7ff229158441885b" +checksum = "83af194ca492ea7b624d21055f933676d3f3d27586de93be31c8f1babcc73510" dependencies = [ "anyhow", "async-trait", + "capacity_builder 0.5.0", "data-url", "deno_ast", + "deno_path_util 0.3.0", "deno_semver", "deno_unsync", "encoding_rs", @@ -1739,6 +1775,7 @@ dependencies = [ "serde", "serde_json", "sha2", + "sys_traits", "thiserror 2.0.3", "twox-hash", "url", @@ -1747,7 +1784,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.181.0" +version = "0.182.0" dependencies = [ "async-compression", "async-trait", @@ -1786,7 +1823,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.93.0" +version = "0.94.0" dependencies = [ "async-trait", "deno_core", @@ -1807,7 +1844,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.91.0" +version = "0.92.0" dependencies = [ "anyhow", "async-trait", @@ -1817,7 +1854,7 @@ dependencies = [ "chrono", "deno_core", "deno_fetch", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_tls", "denokv_proto", @@ -1857,9 +1894,9 @@ dependencies = [ [[package]] name = "deno_lockfile" -version = "0.23.2" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "559c19feb00af0c34f0bd4a20e56e12463fafd5c5069d6005f3ce33008027eea" +checksum = "632e835a53ed667d62fdd766c5780fe8361c831d3e3fbf1a760a0b7896657587" dependencies = [ "deno_semver", "serde", @@ -1880,7 +1917,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.114.0" +version = "0.115.0" dependencies = [ "deno_core", "deno_permissions", @@ -1908,7 +1945,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.175.0" +version = "0.176.0" dependencies = [ "deno_core", "deno_permissions", @@ -1916,6 +1953,7 @@ dependencies = [ "hickory-proto", "hickory-resolver", "pin-project", + "quinn", "rustls-tokio-stream", "serde", "socket2", @@ -1925,7 +1963,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.121.0" +version = "0.122.0" dependencies = [ "aead-gcm-stream", "aes", @@ -1945,7 +1983,7 @@ dependencies = [ "deno_media_type", "deno_net", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_whoami", "der", @@ -2017,12 +2055,13 @@ dependencies = [ [[package]] name = "deno_npm" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f125a5dba7839c46394a0a9c835da9fe60f5f412587ab4956a76492a1cc6a8" +checksum = "5f818ad5dc4c206b50b5cfa6f10b4b94b127e15c8342c152768eba40c225ca23" dependencies = [ - "anyhow", "async-trait", + "capacity_builder 0.5.0", + "deno_error", "deno_lockfile", "deno_semver", "futures", @@ -2044,7 +2083,9 @@ dependencies = [ "boxed_error", "deno_cache_dir", "deno_core", + "deno_error", "deno_npm", + "deno_path_util 0.3.0", "deno_semver", "deno_unsync", "faster-hex", @@ -2057,6 +2098,7 @@ dependencies = [ "rand", "ring", "serde_json", + "sys_traits", "tar", "tempfile", "thiserror 2.0.3", @@ -2065,9 +2107,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.202.0" +version = "0.203.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd8ac1af251e292388e516dd339b9a3b982a6d1e7f8644c08e34671ca39003c" +checksum = "b146ca74cac431843486ade58e2accc16c11315fb2c6934590a52a73c56b7ec3" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -2081,16 +2123,18 @@ dependencies = [ [[package]] name = "deno_package_json" -version = "0.2.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80b0a3d81c592624a1ae15332a04b4dc2b7c163ef1dfc7c60171f736d1babdf5" +checksum = "e1d3c0f699ba2040669204ce24ab73720499fc290af843e4ce0fc8a9b3d67735" dependencies = [ + "boxed_error", "deno_error", - "deno_path_util", + "deno_path_util 0.3.0", "deno_semver", "indexmap 2.3.0", "serde", "serde_json", + "sys_traits", "thiserror 2.0.3", "url", ] @@ -2107,13 +2151,26 @@ dependencies = [ "url", ] +[[package]] +name = "deno_path_util" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420e8211aaba7fde83ccaa9a5dad855c3b940ed988d70c95159acd600a70dc87" +dependencies = [ + "deno_error", + "percent-encoding", + "sys_traits", + "thiserror 2.0.3", + "url", +] + [[package]] name = "deno_permissions" version = "0.43.0" dependencies = [ - "capacity_builder", + "capacity_builder 0.5.0", "deno_core", - "deno_path_util", + "deno_path_util 0.3.0", "deno_terminal 0.2.0", "fqdn", "libc", @@ -2137,9 +2194,10 @@ dependencies = [ "deno_config", "deno_media_type", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "deno_semver", "node_resolver", + "sys_traits", "test_server", "thiserror 2.0.3", "url", @@ -2167,7 +2225,7 @@ dependencies = [ "deno_napi", "deno_net", "deno_node", - "deno_path_util", + "deno_path_util 0.3.0", "deno_permissions", "deno_telemetry", "deno_terminal 0.2.0", @@ -2216,11 +2274,14 @@ dependencies = [ [[package]] name = "deno_semver" -version = "0.6.1" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d1259270d66a5e6d29bb75c9289656541874f79ae9ff6c9f1c790846d5c07ba" +checksum = "4775271f9b5602482698f76d24ea9ed8ba27af7f587a7e9a876916300c542435" dependencies = [ + "capacity_builder 0.5.0", "deno_error", + "ecow", + "hipstr", "monch", "once_cell", "serde", @@ -2248,7 +2309,7 @@ dependencies = [ [[package]] name = "deno_telemetry" -version = "0.5.0" +version = "0.6.0" dependencies = [ "async-trait", "deno_core", @@ -2289,7 +2350,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.170.0" +version = "0.171.0" dependencies = [ "deno_core", "deno_native_certs", @@ -2339,7 +2400,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.183.0" +version = "0.184.0" dependencies = [ "deno_bench_util", "deno_console", @@ -2351,7 +2412,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.214.0" +version = "0.215.0" dependencies = [ "async-trait", "base64-simd 0.8.0", @@ -2373,7 +2434,7 @@ dependencies = [ [[package]] name = "deno_webgpu" -version = "0.150.0" +version = "0.151.0" dependencies = [ "deno_core", "raw-window-handle", @@ -2386,7 +2447,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.183.0" +version = "0.184.0" dependencies = [ "deno_bench_util", "deno_core", @@ -2394,7 +2455,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.188.0" +version = "0.189.0" dependencies = [ "bytes", "deno_core", @@ -2416,7 +2477,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.178.0" +version = "0.179.0" dependencies = [ "deno_core", "deno_web", @@ -2887,6 +2948,15 @@ dependencies = [ "spki", ] +[[package]] +name = "ecow" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e42fc0a93992b20c58b99e59d61eaf1635a25bfbe49e4275c34ba0aee98119ba" +dependencies = [ + "serde", +] + [[package]] name = "ed25519" version = "2.2.3" @@ -3823,6 +3893,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "hipstr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97971ffc85d4c98de12e2608e992a43f5294ebb625fdb045b27c731b64c4c6d6" +dependencies = [ + "serde", + "serde_bytes", + "sptr", +] + [[package]] name = "hkdf" version = "0.12.4" @@ -4023,9 +4104,9 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ "hyper 1.4.1", "hyper-util", @@ -4930,7 +5011,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.113.0" +version = "0.114.0" dependencies = [ "quote", "serde", @@ -4992,13 +5073,14 @@ dependencies = [ "boxed_error", "deno_media_type", "deno_package_json", - "deno_path_util", + "deno_path_util 0.3.0", "futures", "lazy-regex", "once_cell", "path-clean", "regex", "serde_json", + "sys_traits", "thiserror 2.0.3", "tokio", "url", @@ -5906,49 +5988,54 @@ dependencies = [ [[package]] name = "quinn" -version = "0.11.2" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" dependencies = [ "bytes", "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 1.1.0", + "rustc-hash 2.0.0", "rustls", - "thiserror 1.0.64", + "socket2", + "thiserror 2.0.3", "tokio", "tracing", ] [[package]] name = "quinn-proto" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" dependencies = [ "bytes", + "getrandom", "rand", "ring", "rustc-hash 2.0.0", "rustls", + "rustls-pki-types", "slab", - "thiserror 1.0.64", + "thiserror 2.0.3", "tinyvec", "tracing", + "web-time", ] [[package]] name = "quinn-udp" -version = "0.5.2" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" dependencies = [ + "cfg_aliases 0.2.1", "libc", "once_cell", "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -6425,6 +6512,9 @@ name = "rustls-pki-types" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +dependencies = [ + "web-time", +] [[package]] name = "rustls-tokio-stream" @@ -6708,9 +6798,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.235.0" +version = "0.236.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07afd8b67b4a442ecc2823038473ac0e9e5682de93c213323b60661afdd7eb4" +checksum = "e23b3abce64010612f88f4ff689a959736f99eb3dc0dbf1c7903434b8bd8cda5" dependencies = [ "num-bigint", "serde", @@ -6986,6 +7076,12 @@ dependencies = [ "der", ] +[[package]] +name = "sptr" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" + [[package]] name = "sqlformat" version = "0.3.2" @@ -7581,6 +7677,17 @@ dependencies = [ "syn 2.0.87", ] +[[package]] +name = "sys_traits" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5a12729b699487bb50163466e87be7197871d83d04cc6815d430cf7c893bbd7" +dependencies = [ + "getrandom", + "libc", + "windows-sys 0.59.0", +] + [[package]] name = "tagptr" version = "0.2.0" @@ -8521,6 +8628,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-root-certs" version = "0.26.6" @@ -8548,7 +8665,7 @@ dependencies = [ "arrayvec", "bit-vec", "bitflags 2.6.0", - "cfg_aliases", + "cfg_aliases 0.1.1", "codespan-reporting", "document-features", "indexmap 2.3.0", @@ -8580,7 +8697,7 @@ dependencies = [ "bit-set", "bitflags 2.6.0", "block", - "cfg_aliases", + "cfg_aliases 0.1.1", "core-graphics-types", "d3d12", "glow", diff --git a/Cargo.toml b/Cargo.toml index a2d0685dd0..a41ff4c775 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,20 +48,20 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.44.0", features = ["transpiling"] } -deno_core = { version = "0.326.0" } +deno_core = { version = "0.327.0" } -deno_bench_util = { version = "0.177.0", path = "./bench_util" } +deno_bench_util = { version = "0.178.0", path = "./bench_util" } # TODO(nayeemrmn): Use proper version when https://github.com/denoland/deno_config/pull/143 lands! -deno_config = { git = "https://github.com/denoland/deno_config.git", rev = "0d588fb1831bf4d33d3279a1a75db6138d81a75b", features = ["workspace", "sync"] } -deno_lockfile = "=0.23.2" +deno_config = { git = "https://github.com/denoland/deno_config.git", rev = "4cbb63704442a7834dc6bed2e7e310a0d46ade09", features = ["workspace", "sync"] } +deno_lockfile = "=0.24.0" deno_media_type = { version = "0.2.0", features = ["module_specifier"] } -deno_npm = "=0.26.0" -deno_path_util = "=0.2.2" +deno_npm = "=0.27.0" +deno_path_util = "=0.3.0" deno_permissions = { version = "0.43.0", path = "./runtime/permissions" } deno_runtime = { version = "0.192.0", path = "./runtime" } -deno_semver = "=0.6.1" +deno_semver = "=0.7.1" deno_terminal = "0.2.0" -napi_sym = { version = "0.113.0", path = "./ext/napi/sym" } +napi_sym = { version = "0.114.0", path = "./ext/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } denokv_proto = "0.8.4" @@ -70,29 +70,29 @@ denokv_remote = "0.8.4" denokv_sqlite = { default-features = false, version = "0.8.4" } # exts -deno_broadcast_channel = { version = "0.177.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.115.0", path = "./ext/cache" } -deno_canvas = { version = "0.52.0", path = "./ext/canvas" } -deno_console = { version = "0.183.0", path = "./ext/console" } -deno_cron = { version = "0.63.0", path = "./ext/cron" } -deno_crypto = { version = "0.197.0", path = "./ext/crypto" } -deno_fetch = { version = "0.207.0", path = "./ext/fetch" } -deno_ffi = { version = "0.170.0", path = "./ext/ffi" } -deno_fs = { version = "0.93.0", path = "./ext/fs" } -deno_http = { version = "0.181.0", path = "./ext/http" } -deno_io = { version = "0.93.0", path = "./ext/io" } -deno_kv = { version = "0.91.0", path = "./ext/kv" } -deno_napi = { version = "0.114.0", path = "./ext/napi" } -deno_net = { version = "0.175.0", path = "./ext/net" } -deno_node = { version = "0.121.0", path = "./ext/node" } -deno_telemetry = { version = "0.5.0", path = "./ext/telemetry" } -deno_tls = { version = "0.170.0", path = "./ext/tls" } -deno_url = { version = "0.183.0", path = "./ext/url" } -deno_web = { version = "0.214.0", path = "./ext/web" } -deno_webgpu = { version = "0.150.0", path = "./ext/webgpu" } -deno_webidl = { version = "0.183.0", path = "./ext/webidl" } -deno_websocket = { version = "0.188.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.178.0", path = "./ext/webstorage" } +deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.116.0", path = "./ext/cache" } +deno_canvas = { version = "0.53.0", path = "./ext/canvas" } +deno_console = { version = "0.184.0", path = "./ext/console" } +deno_cron = { version = "0.64.0", path = "./ext/cron" } +deno_crypto = { version = "0.198.0", path = "./ext/crypto" } +deno_fetch = { version = "0.208.0", path = "./ext/fetch" } +deno_ffi = { version = "0.171.0", path = "./ext/ffi" } +deno_fs = { version = "0.94.0", path = "./ext/fs" } +deno_http = { version = "0.182.0", path = "./ext/http" } +deno_io = { version = "0.94.0", path = "./ext/io" } +deno_kv = { version = "0.92.0", path = "./ext/kv" } +deno_napi = { version = "0.115.0", path = "./ext/napi" } +deno_net = { version = "0.176.0", path = "./ext/net" } +deno_node = { version = "0.122.0", path = "./ext/node" } +deno_telemetry = { version = "0.6.0", path = "./ext/telemetry" } +deno_tls = { version = "0.171.0", path = "./ext/tls" } +deno_url = { version = "0.184.0", path = "./ext/url" } +deno_web = { version = "0.215.0", path = "./ext/web" } +deno_webgpu = { version = "0.151.0", path = "./ext/webgpu" } +deno_webidl = { version = "0.184.0", path = "./ext/webidl" } +deno_websocket = { version = "0.189.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.179.0", path = "./ext/webstorage" } # resolvers deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" } @@ -109,7 +109,7 @@ boxed_error = "0.2.3" brotli = "6.0.0" bytes = "1.4.0" cache_control = "=0.2.0" -capacity_builder = "0.1.3" +capacity_builder = "0.5.0" cbc = { version = "=0.1.2", features = ["alloc"] } # Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS. # Instead use util::time::utc_now() @@ -119,9 +119,9 @@ console_static_text = "=0.8.1" dashmap = "5.5.3" data-encoding = "2.3.3" data-url = "=0.3.1" -deno_cache_dir = "=0.15.0" +deno_cache_dir = "=0.16.0" deno_error = "=0.5.2" -deno_package_json = { version = "0.2.1", default-features = false } +deno_package_json = { version = "0.4.0", default-features = false } deno_unsync = "0.4.2" dlopen2 = "0.6.1" ecb = "=0.1.2" @@ -194,6 +194,7 @@ slab = "0.4" smallvec = "1.8" socket2 = { version = "0.5.3", features = ["all"] } spki = "0.7.2" +sys_traits = "=0.1.1" tar = "=0.4.40" tempfile = "3.4.0" termcolor = "1.1.3" diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index 8a20f07638..014b74f264 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.177.0" +version = "0.178.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 70708002c7..2cf12f14d4 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -74,7 +74,7 @@ deno_config.workspace = true deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] } deno_error.workspace = true -deno_graph = { version = "=0.86.3" } +deno_graph = { version = "=0.86.6" } deno_lint = { version = "=0.68.2", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true @@ -158,6 +158,7 @@ shell-escape = "=0.1.5" spki = { version = "0.7", features = ["pem"] } sqlformat = "=0.3.2" strsim = "0.11.1" +sys_traits = { workspace = true, features = ["libc", "real", "winapi"] } tar.workspace = true tempfile.workspace = true text-size = "=1.1.0" diff --git a/cli/args/deno_json.rs b/cli/args/deno_json.rs index 8853107eef..47dcbb91ea 100644 --- a/cli/args/deno_json.rs +++ b/cli/args/deno_json.rs @@ -8,62 +8,6 @@ use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; -#[cfg(test)] // happens to only be used by the tests at the moment -pub struct DenoConfigFsAdapter<'a>( - pub &'a dyn deno_runtime::deno_fs::FileSystem, -); - -#[cfg(test)] -impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> { - fn read_to_string_lossy( - &self, - path: &std::path::Path, - ) -> Result, std::io::Error> { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } - - fn stat_sync( - &self, - path: &std::path::Path, - ) -> Result { - self - .0 - .stat_sync(path) - .map(|stat| deno_config::fs::FsMetadata { - is_file: stat.is_file, - is_directory: stat.is_directory, - is_symlink: stat.is_symlink, - }) - .map_err(|err| err.into_io_error()) - } - - fn read_dir( - &self, - path: &std::path::Path, - ) -> Result, std::io::Error> { - self - .0 - .read_dir_sync(path) - .map_err(|err| err.into_io_error()) - .map(|entries| { - entries - .into_iter() - .map(|e| deno_config::fs::FsDirEntry { - path: path.join(e.name), - metadata: deno_config::fs::FsMetadata { - is_file: e.is_file, - is_directory: e.is_directory, - is_symlink: e.is_symlink, - }, - }) - .collect() - }) - } -} - pub fn import_map_deps( import_map: &serde_json::Value, ) -> HashSet { diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 8256da376f..1058dcea94 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1006,6 +1006,8 @@ impl Flags { OtelConfig { tracing_enabled: !disabled && otel_var("OTEL_DENO_TRACING").unwrap_or(default), + metrics_enabled: !disabled + && otel_var("OTEL_DENO_METRICS").unwrap_or(default), console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() { Ok(_) if disabled => OtelConsoleConfig::Ignore, Ok("ignore") => OtelConsoleConfig::Ignore, diff --git a/cli/args/lockfile.rs b/cli/args/lockfile.rs index 1075f93a6f..0648b6e5ed 100644 --- a/cli/args/lockfile.rs +++ b/cli/args/lockfile.rs @@ -12,12 +12,13 @@ use deno_core::parking_lot::MutexGuard; use deno_core::serde_json; use deno_lockfile::WorkspaceMemberConfig; use deno_package_json::PackageJsonDepValue; +use deno_path_util::fs::atomic_write_file_with_retries; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::PackageJson; use deno_semver::jsr::JsrDepPackageReq; use crate::args::deno_json::import_map_deps; use crate::cache; -use crate::util::fs::atomic_write_file_with_retries; use crate::Flags; use crate::args::DenoSubcommand; @@ -91,8 +92,9 @@ impl CliLockfile { // do an atomic write to reduce the chance of multiple deno // processes corrupting the file atomic_write_file_with_retries( + &FsSysTraitsAdapter::new_real(), &lockfile.filename, - bytes, + &bytes, cache::CACHE_PERM, ) .context("Failed writing lockfile.")?; diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 3842afc44d..1354068ebc 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -30,7 +30,9 @@ use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; use deno_path_util::normalize_path; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_semver::npm::NpmPackageReqReference; +use deno_semver::StackString; use deno_telemetry::OtelConfig; use deno_telemetry::OtelRuntimeConfig; use import_map::resolve_import_map_value_from_specifier; @@ -82,9 +84,9 @@ use std::net::SocketAddr; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use sys_traits::EnvHomeDir; use thiserror::Error; -use crate::cache; use crate::cache::DenoDirProvider; use crate::file_fetcher::CliFileFetcher; use crate::util::fs::canonicalize_path_maybe_not_exists; @@ -520,7 +522,7 @@ fn discover_npmrc( // TODO(bartlomieju): update to read both files - one in the project root and one and // home dir and then merge them. // 3. Try `.npmrc` in the user's home directory - if let Some(home_dir) = cache::home_dir() { + if let Some(home_dir) = sys_traits::impls::RealSys.env_home_dir() { match try_to_read_npmrc(&home_dir) { Ok(Some((source, path))) => { return try_to_parse_npmrc(source, &path).map(|r| (r, Some(path))); @@ -821,7 +823,6 @@ impl CliOptions { log::debug!("package.json auto-discovery is disabled"); } WorkspaceDiscoverOptions { - fs: Default::default(), // use real fs deno_json_cache: None, pkg_json_cache: Some(&node_resolver::PackageJsonThreadLocalCache), workspace_cache: None, @@ -843,6 +844,7 @@ impl CliOptions { ConfigFlag::Discover => { if let Some(start_paths) = flags.config_path_args(&initial_cwd) { WorkspaceDirectory::discover( + &FsSysTraitsAdapter::new_real(), WorkspaceDiscoverStart::Paths(&start_paths), &resolve_workspace_discover_options(), )? @@ -853,6 +855,7 @@ impl CliOptions { ConfigFlag::Path(path) => { let config_path = normalize_path(initial_cwd.join(path)); WorkspaceDirectory::discover( + &FsSysTraitsAdapter::new_real(), WorkspaceDiscoverStart::ConfigFile(&config_path), &resolve_workspace_discover_options(), )? @@ -984,24 +987,24 @@ impl CliOptions { // https://nodejs.org/api/process.html match target.as_str() { "aarch64-apple-darwin" => NpmSystemInfo { - os: "darwin".to_string(), - cpu: "arm64".to_string(), + os: "darwin".into(), + cpu: "arm64".into(), }, "aarch64-unknown-linux-gnu" => NpmSystemInfo { - os: "linux".to_string(), - cpu: "arm64".to_string(), + os: "linux".into(), + cpu: "arm64".into(), }, "x86_64-apple-darwin" => NpmSystemInfo { - os: "darwin".to_string(), - cpu: "x64".to_string(), + os: "darwin".into(), + cpu: "x64".into(), }, "x86_64-unknown-linux-gnu" => NpmSystemInfo { - os: "linux".to_string(), - cpu: "x64".to_string(), + os: "linux".into(), + cpu: "x64".into(), }, "x86_64-pc-windows-msvc" => NpmSystemInfo { - os: "win32".to_string(), - cpu: "x64".to_string(), + os: "win32".into(), + cpu: "x64".into(), }, value => { log::warn!( @@ -1396,9 +1399,9 @@ impl CliOptions { Ok(DenoLintConfig { default_jsx_factory: (!transpile_options.jsx_automatic) - .then(|| transpile_options.jsx_factory.clone()), + .then_some(transpile_options.jsx_factory), default_jsx_fragment_factory: (!transpile_options.jsx_automatic) - .then(|| transpile_options.jsx_fragment_factory.clone()), + .then_some(transpile_options.jsx_fragment_factory), }) } @@ -1981,15 +1984,17 @@ pub fn has_flag_env_var(name: &str) -> bool { pub fn npm_pkg_req_ref_to_binary_command( req_ref: &NpmPackageReqReference, ) -> String { - let binary_name = req_ref.sub_path().unwrap_or(req_ref.req().name.as_str()); - binary_name.to_string() + req_ref + .sub_path() + .map(|s| s.to_string()) + .unwrap_or_else(|| req_ref.req().name.to_string()) } pub fn config_to_deno_graph_workspace_member( config: &ConfigFile, ) -> Result { - let name = match &config.json.name { - Some(name) => name.clone(), + let name: StackString = match &config.json.name { + Some(name) => name.as_str().into(), None => bail!("Missing 'name' field in config file."), }; let version = match &config.json.version { diff --git a/cli/args/package_json.rs b/cli/args/package_json.rs index b0f0a2f9ba..50d1c04799 100644 --- a/cli/args/package_json.rs +++ b/cli/args/package_json.rs @@ -11,19 +11,20 @@ use deno_package_json::PackageJsonDepValueParseError; use deno_package_json::PackageJsonDepWorkspaceReq; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::VersionReq; use thiserror::Error; #[derive(Debug)] pub struct InstallNpmRemotePkg { - pub alias: Option, + pub alias: Option, pub base_dir: PathBuf, pub req: PackageReq, } #[derive(Debug)] pub struct InstallNpmWorkspacePkg { - pub alias: Option, + pub alias: Option, pub target_dir: PathBuf, } @@ -31,7 +32,7 @@ pub struct InstallNpmWorkspacePkg { #[error("Failed to install '{}'\n at {}", alias, location)] pub struct PackageJsonDepValueParseWithLocationError { pub location: Url, - pub alias: String, + pub alias: StackString, #[source] pub source: PackageJsonDepValueParseError, } @@ -100,10 +101,8 @@ impl NpmInstallDepsProvider { let mut pkg_pkgs = Vec::with_capacity( deps.dependencies.len() + deps.dev_dependencies.len(), ); - for (alias, dep) in deps - .dependencies - .into_iter() - .chain(deps.dev_dependencies.into_iter()) + for (alias, dep) in + deps.dependencies.iter().chain(deps.dev_dependencies.iter()) { let dep = match dep { Ok(dep) => dep, @@ -111,8 +110,8 @@ impl NpmInstallDepsProvider { pkg_json_dep_errors.push( PackageJsonDepValueParseWithLocationError { location: pkg_json.specifier(), - alias, - source: err, + alias: alias.clone(), + source: err.clone(), }, ); continue; @@ -121,28 +120,28 @@ impl NpmInstallDepsProvider { match dep { PackageJsonDepValue::Req(pkg_req) => { let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| { - pkg.matches_req(&pkg_req) + pkg.matches_req(pkg_req) // do not resolve to the current package && pkg.pkg_json.path != pkg_json.path }); if let Some(pkg) = workspace_pkg { workspace_pkgs.push(InstallNpmWorkspacePkg { - alias: Some(alias), + alias: Some(alias.clone()), target_dir: pkg.pkg_json.dir_path().to_path_buf(), }); } else { pkg_pkgs.push(InstallNpmRemotePkg { - alias: Some(alias), + alias: Some(alias.clone()), base_dir: pkg_json.dir_path().to_path_buf(), - req: pkg_req, + req: pkg_req.clone(), }); } } PackageJsonDepValue::Workspace(workspace_version_req) => { let version_req = match workspace_version_req { PackageJsonDepWorkspaceReq::VersionReq(version_req) => { - version_req + version_req.clone() } PackageJsonDepWorkspaceReq::Tilde | PackageJsonDepWorkspaceReq::Caret => { @@ -150,10 +149,10 @@ impl NpmInstallDepsProvider { } }; if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| { - pkg.matches_name_and_version_req(&alias, &version_req) + pkg.matches_name_and_version_req(alias, &version_req) }) { workspace_pkgs.push(InstallNpmWorkspacePkg { - alias: Some(alias), + alias: Some(alias.clone()), target_dir: pkg.pkg_json.dir_path().to_path_buf(), }); } diff --git a/cli/cache/deno_dir.rs b/cli/cache/deno_dir.rs index 7b7059c224..d83ea8ebd5 100644 --- a/cli/cache/deno_dir.rs +++ b/cli/cache/deno_dir.rs @@ -1,5 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use deno_cache_dir::DenoDirResolutionError; use once_cell::sync::OnceCell; use super::DiskCache; @@ -11,7 +12,7 @@ use std::path::PathBuf; /// where functionality wants to continue if the DENO_DIR can't be created. pub struct DenoDirProvider { maybe_custom_root: Option, - deno_dir: OnceCell>, + deno_dir: OnceCell>, } impl DenoDirProvider { @@ -22,12 +23,21 @@ impl DenoDirProvider { } } - pub fn get_or_create(&self) -> Result<&DenoDir, std::io::Error> { + pub fn get_or_create(&self) -> Result<&DenoDir, DenoDirResolutionError> { self .deno_dir .get_or_init(|| DenoDir::new(self.maybe_custom_root.clone())) .as_ref() - .map_err(|err| std::io::Error::new(err.kind(), err.to_string())) + .map_err(|err| match err { + DenoDirResolutionError::NoCacheOrHomeDir => { + DenoDirResolutionError::NoCacheOrHomeDir + } + DenoDirResolutionError::FailedCwd { source } => { + DenoDirResolutionError::FailedCwd { + source: std::io::Error::new(source.kind(), source.to_string()), + } + } + }) } } @@ -42,27 +52,13 @@ pub struct DenoDir { } impl DenoDir { - pub fn new(maybe_custom_root: Option) -> std::io::Result { - let maybe_custom_root = - maybe_custom_root.or_else(|| env::var("DENO_DIR").map(String::into).ok()); - let root: PathBuf = if let Some(root) = maybe_custom_root { - root - } else if let Some(cache_dir) = dirs::cache_dir() { - // We use the OS cache dir because all files deno writes are cache files - // Once that changes we need to start using different roots if DENO_DIR - // is not set, and keep a single one if it is. - cache_dir.join("deno") - } else if let Some(home_dir) = dirs::home_dir() { - // fallback path - home_dir.join(".deno") - } else { - panic!("Could not set the Deno root directory") - }; - let root = if root.is_absolute() { - root - } else { - std::env::current_dir()?.join(root) - }; + pub fn new( + maybe_custom_root: Option, + ) -> Result { + let root = deno_cache_dir::resolve_deno_dir( + &sys_traits::impls::RealSys, + maybe_custom_root, + )?; assert!(root.is_absolute()); let gen_path = root.join("gen"); @@ -166,112 +162,3 @@ impl DenoDir { self.root.join("dl") } } - -/// To avoid the poorly managed dirs crate -#[cfg(not(windows))] -pub mod dirs { - use std::path::PathBuf; - - pub fn cache_dir() -> Option { - if cfg!(target_os = "macos") { - home_dir().map(|h| h.join("Library/Caches")) - } else { - std::env::var_os("XDG_CACHE_HOME") - .map(PathBuf::from) - .or_else(|| home_dir().map(|h| h.join(".cache"))) - } - } - - pub fn home_dir() -> Option { - std::env::var_os("HOME") - .and_then(|h| if h.is_empty() { None } else { Some(h) }) - .or_else(|| { - // TODO(bartlomieju): - #[allow(clippy::undocumented_unsafe_blocks)] - unsafe { - fallback() - } - }) - .map(PathBuf::from) - } - - // This piece of code is taken from the deprecated home_dir() function in Rust's standard library: https://github.com/rust-lang/rust/blob/master/src/libstd/sys/unix/os.rs#L579 - // The same code is used by the dirs crate - unsafe fn fallback() -> Option { - let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) { - n if n < 0 => 512_usize, - n => n as usize, - }; - let mut buf = Vec::with_capacity(amt); - let mut passwd: libc::passwd = std::mem::zeroed(); - let mut result = std::ptr::null_mut(); - match libc::getpwuid_r( - libc::getuid(), - &mut passwd, - buf.as_mut_ptr(), - buf.capacity(), - &mut result, - ) { - 0 if !result.is_null() => { - let ptr = passwd.pw_dir as *const _; - let bytes = std::ffi::CStr::from_ptr(ptr).to_bytes().to_vec(); - Some(std::os::unix::ffi::OsStringExt::from_vec(bytes)) - } - _ => None, - } - } -} - -/// To avoid the poorly managed dirs crate -// Copied from -// https://github.com/dirs-dev/dirs-sys-rs/blob/ec7cee0b3e8685573d847f0a0f60aae3d9e07fa2/src/lib.rs#L140-L164 -// MIT license. Copyright (c) 2018-2019 dirs-rs contributors -#[cfg(windows)] -pub mod dirs { - use std::ffi::OsString; - use std::os::windows::ffi::OsStringExt; - use std::path::PathBuf; - use winapi::shared::winerror; - use winapi::um::combaseapi; - use winapi::um::knownfolders; - use winapi::um::shlobj; - use winapi::um::shtypes; - use winapi::um::winbase; - use winapi::um::winnt; - - fn known_folder(folder_id: shtypes::REFKNOWNFOLDERID) -> Option { - // SAFETY: winapi calls - unsafe { - let mut path_ptr: winnt::PWSTR = std::ptr::null_mut(); - let result = shlobj::SHGetKnownFolderPath( - folder_id, - 0, - std::ptr::null_mut(), - &mut path_ptr, - ); - if result == winerror::S_OK { - let len = winbase::lstrlenW(path_ptr) as usize; - let path = std::slice::from_raw_parts(path_ptr, len); - let ostr: OsString = OsStringExt::from_wide(path); - combaseapi::CoTaskMemFree(path_ptr as *mut winapi::ctypes::c_void); - Some(PathBuf::from(ostr)) - } else { - None - } - } - } - - pub fn cache_dir() -> Option { - known_folder(&knownfolders::FOLDERID_LocalAppData) - } - - pub fn home_dir() -> Option { - if let Some(userprofile) = std::env::var_os("USERPROFILE") { - if !userprofile.is_empty() { - return Some(PathBuf::from(userprofile)); - } - } - - known_folder(&knownfolders::FOLDERID_Profile) - } -} diff --git a/cli/cache/disk_cache.rs b/cli/cache/disk_cache.rs index 2fee1efe09..b22b2e3cc7 100644 --- a/cli/cache/disk_cache.rs +++ b/cli/cache/disk_cache.rs @@ -1,11 +1,12 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use super::CACHE_PERM; -use crate::util::fs::atomic_write_file_with_retries; use deno_cache_dir::url_to_filename; use deno_core::url::Host; use deno_core::url::Url; +use deno_path_util::fs::atomic_write_file_with_retries; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use std::ffi::OsStr; use std::fs; use std::path::Component; @@ -120,7 +121,12 @@ impl DiskCache { pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> { let path = self.location.join(filename); - atomic_write_file_with_retries(&path, data, CACHE_PERM) + atomic_write_file_with_retries( + &FsSysTraitsAdapter::new_real(), + &path, + data, + CACHE_PERM, + ) } } diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index 31968be0c2..c8b0eacaa4 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -5,9 +5,6 @@ use crate::file_fetcher::CliFetchNoFollowErrorKind; use crate::file_fetcher::CliFileFetcher; use crate::file_fetcher::FetchNoFollowOptions; use crate::file_fetcher::FetchPermissionsOptionRef; -use crate::util::fs::atomic_write_file_with_retries; -use crate::util::fs::atomic_write_file_with_retries_and_fs; -use crate::util::fs::AtomicWriteFileFsAdapter; use deno_ast::MediaType; use deno_cache_dir::file_fetcher::CacheSetting; @@ -21,15 +18,12 @@ use deno_graph::source::CacheInfo; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; -use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_permissions::PermissionsContainer; use node_resolver::InNpmPackageChecker; -use std::borrow::Cow; use std::collections::HashMap; -use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use std::time::SystemTime; mod cache_db; mod caches; @@ -50,7 +44,6 @@ pub use caches::Caches; pub use check::TypeCheckCache; pub use code_cache::CodeCache; pub use common::FastInsecureHasher; -pub use deno_dir::dirs::home_dir; pub use deno_dir::DenoDir; pub use deno_dir::DenoDirProvider; pub use disk_cache::DiskCache; @@ -63,121 +56,12 @@ pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::ParsedSourceCache; /// Permissions used to save a file in the disk caches. -pub const CACHE_PERM: u32 = 0o644; +pub use deno_cache_dir::CACHE_PERM; -#[derive(Debug, Clone)] -pub struct RealDenoCacheEnv; - -impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv { - fn read_file_bytes( - &self, - path: &Path, - ) -> std::io::Result> { - std::fs::read(path).map(Cow::Owned) - } - - fn atomic_write_file( - &self, - path: &Path, - bytes: &[u8], - ) -> std::io::Result<()> { - atomic_write_file_with_retries(path, bytes, CACHE_PERM) - } - - fn canonicalize_path(&self, path: &Path) -> std::io::Result { - crate::util::fs::canonicalize_path(path) - } - - fn create_dir_all(&self, path: &Path) -> std::io::Result<()> { - std::fs::create_dir_all(path) - } - - fn modified(&self, path: &Path) -> std::io::Result> { - match std::fs::metadata(path) { - Ok(metadata) => Ok(Some( - metadata.modified().unwrap_or_else(|_| SystemTime::now()), - )), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), - Err(err) => Err(err), - } - } - - fn is_file(&self, path: &Path) -> bool { - path.is_file() - } - - fn time_now(&self) -> SystemTime { - SystemTime::now() - } -} - -#[derive(Debug, Clone)] -pub struct DenoCacheEnvFsAdapter<'a>( - pub &'a dyn deno_runtime::deno_fs::FileSystem, -); - -impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> { - fn read_file_bytes( - &self, - path: &Path, - ) -> std::io::Result> { - self - .0 - .read_file_sync(path, None) - .map_err(|err| err.into_io_error()) - } - - fn atomic_write_file( - &self, - path: &Path, - bytes: &[u8], - ) -> std::io::Result<()> { - atomic_write_file_with_retries_and_fs( - &AtomicWriteFileFsAdapter { - fs: self.0, - write_mode: CACHE_PERM, - }, - path, - bytes, - ) - } - - fn canonicalize_path(&self, path: &Path) -> std::io::Result { - self.0.realpath_sync(path).map_err(|e| e.into_io_error()) - } - - fn create_dir_all(&self, path: &Path) -> std::io::Result<()> { - self - .0 - .mkdir_sync(path, true, None) - .map_err(|e| e.into_io_error()) - } - - fn modified(&self, path: &Path) -> std::io::Result> { - self - .0 - .stat_sync(path) - .map(|stat| { - stat - .mtime - .map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts)) - }) - .map_err(|e| e.into_io_error()) - } - - fn is_file(&self, path: &Path) -> bool { - self.0.is_file_sync(path) - } - - fn time_now(&self) -> SystemTime { - SystemTime::now() - } -} - -pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache; -pub type LocalHttpCache = deno_cache_dir::LocalHttpCache; +pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache; +pub type LocalHttpCache = deno_cache_dir::LocalHttpCache; pub type LocalLspHttpCache = - deno_cache_dir::LocalLspHttpCache; + deno_cache_dir::LocalLspHttpCache; pub use deno_cache_dir::HttpCache; pub struct FetchCacherOptions { @@ -192,11 +76,11 @@ pub struct FetchCacherOptions { pub struct FetchCacher { pub file_header_overrides: HashMap>, file_fetcher: Arc, - fs: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, module_info_cache: Arc, permissions: PermissionsContainer, + sys: FsSysTraitsAdapter, is_deno_publish: bool, cache_info_enabled: bool, } @@ -204,18 +88,18 @@ pub struct FetchCacher { impl FetchCacher { pub fn new( file_fetcher: Arc, - fs: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, module_info_cache: Arc, + sys: FsSysTraitsAdapter, options: FetchCacherOptions, ) -> Self { Self { file_fetcher, - fs, global_http_cache, in_npm_pkg_checker, module_info_cache, + sys, file_header_overrides: options.file_header_overrides, permissions: options.permissions, is_deno_publish: options.is_deno_publish, @@ -277,9 +161,8 @@ impl Loader for FetchCacher { // symlinked to `/my-project-2/node_modules`), so first we checked if the path // is in a node_modules dir to avoid needlessly canonicalizing, then now compare // against the canonicalized specifier. - let specifier = crate::node::resolve_specifier_into_node_modules( - specifier, - self.fs.as_ref(), + let specifier = node_resolver::resolve_specifier_into_node_modules( + &self.sys, specifier, ); if self.in_npm_pkg_checker.in_npm_package(&specifier) { return Box::pin(futures::future::ready(Ok(Some( diff --git a/cli/emit.rs b/cli/emit.rs index 3cd23b7abb..733a89d832 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -5,6 +5,7 @@ use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; use crate::resolver::CjsTracker; +use deno_ast::EmittedSourceText; use deno_ast::ModuleKind; use deno_ast::SourceMapOption; use deno_ast::SourceRange; @@ -132,6 +133,7 @@ impl Emitter { &transpile_and_emit_options.0, &transpile_and_emit_options.1, ) + .map(|r| r.text) } }) .await @@ -166,7 +168,8 @@ impl Emitter { source.clone(), &self.transpile_and_emit_options.0, &self.transpile_and_emit_options.1, - )?; + )? + .text; helper.post_emit_parsed_source( specifier, &transpiled_source, @@ -177,6 +180,31 @@ impl Emitter { } } + pub fn emit_parsed_source_for_deno_compile( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + module_kind: deno_ast::ModuleKind, + source: &Arc, + ) -> Result<(String, String), AnyError> { + let mut emit_options = self.transpile_and_emit_options.1.clone(); + emit_options.inline_sources = false; + emit_options.source_map = SourceMapOption::Separate; + // strip off the path to have more deterministic builds as we don't care + // about the source name because we manually provide the source map to v8 + emit_options.source_map_base = Some(deno_path_util::url_parent(specifier)); + let source = EmitParsedSourceHelper::transpile( + &self.parsed_source_cache, + specifier, + media_type, + module_kind, + source.clone(), + &self.transpile_and_emit_options.0, + &emit_options, + )?; + Ok((source.text, source.source_map.unwrap())) + } + /// Expects a file URL, panics otherwise. pub async fn load_and_emit_for_hmr( &self, @@ -282,7 +310,7 @@ impl<'a> EmitParsedSourceHelper<'a> { source: Arc, transpile_options: &deno_ast::TranspileOptions, emit_options: &deno_ast::EmitOptions, - ) -> Result { + ) -> Result { // nothing else needs the parsed source at this point, so remove from // the cache in order to not transpile owned let parsed_source = parsed_source_cache @@ -302,8 +330,7 @@ impl<'a> EmitParsedSourceHelper<'a> { source } }; - debug_assert!(transpiled_source.source_map.is_none()); - Ok(transpiled_source.text) + Ok(transpiled_source) } pub fn post_emit_parsed_source( diff --git a/cli/factory.rs b/cli/factory.rs index c92f3112a5..5d2aeaa582 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -12,7 +12,6 @@ use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; use crate::cache::CodeCache; -use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; use crate::cache::EmitCache; @@ -44,7 +43,6 @@ use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::resolver::CjsTracker; use crate::resolver::CliDenoResolver; -use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; use crate::resolver::CliResolverOptions; @@ -88,9 +86,10 @@ use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::DenoResolverOptions; use deno_resolver::NodeAndNpmReqResolver; use deno_runtime::deno_fs; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJsonResolver; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsOptions; @@ -331,8 +330,8 @@ impl CliFactory { pub fn global_http_cache(&self) -> Result<&Arc, AnyError> { self.services.global_http_cache.get_or_try_init(|| { Ok(Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter(self.fs().clone()), self.deno_dir()?.remote_folder_path(), - crate::cache::RealDenoCacheEnv, ))) }) } @@ -409,7 +408,7 @@ impl CliFactory { let global_path = self.deno_dir()?.npm_folder_path(); let cli_options = self.cli_options()?; Ok(Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(fs.as_ref()), + &FsSysTraitsAdapter(fs.clone()), global_path, cli_options.npmrc().get_all_known_registries_urls(), ))) @@ -429,7 +428,7 @@ impl CliFactory { create_cli_npm_resolver(if cli_options.use_byonm() { CliNpmResolverCreateOptions::Byonm( CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), + sys: FsSysTraitsAdapter(fs.clone()), pkg_json_resolver: self.pkg_json_resolver().clone(), root_node_modules_dir: Some( match cli_options.node_modules_dir_path() { @@ -447,6 +446,13 @@ impl CliFactory { } else { CliNpmResolverCreateOptions::Managed( CliManagedNpmResolverCreateOptions { + http_client_provider: self.http_client_provider().clone(), + npm_install_deps_provider: Arc::new( + NpmInstallDepsProvider::from_workspace( + cli_options.workspace(), + ), + ), + sys: FsSysTraitsAdapter(self.fs().clone()), snapshot: match cli_options.resolve_npm_resolution_snapshot()? { Some(snapshot) => { CliNpmResolverManagedSnapshotOption::Specified(Some( @@ -465,19 +471,12 @@ impl CliFactory { }, }, maybe_lockfile: cli_options.maybe_lockfile().cloned(), - fs: fs.clone(), - http_client_provider: self.http_client_provider().clone(), npm_cache_dir: self.npm_cache_dir()?.clone(), cache_setting: cli_options.cache_setting(), text_only_progress_bar: self.text_only_progress_bar().clone(), maybe_node_modules_path: cli_options .node_modules_dir_path() .cloned(), - npm_install_deps_provider: Arc::new( - NpmInstallDepsProvider::from_workspace( - cli_options.workspace(), - ), - ), npm_system_info: cli_options.npm_system_info(), npmrc: cli_options.npmrc().clone(), lifecycle_scripts: cli_options.lifecycle_scripts_config(), @@ -500,7 +499,7 @@ impl CliFactory { .get_or_try_init(|| { Ok(self.cli_options()?.unstable_sloppy_imports().then(|| { Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new( - self.fs().clone(), + FsSysTraitsAdapter(self.fs().clone()), ))) })) }) @@ -668,14 +667,15 @@ impl CliFactory { .get_or_try_init_async( async { Ok(Arc::new(NodeResolver::new( - DenoFsNodeResolverEnv::new(self.fs().clone()), self.in_npm_pkg_checker()?.clone(), + RealIsBuiltInNodeModuleChecker, self .npm_resolver() .await? .clone() .into_npm_pkg_folder_resolver(), self.pkg_json_resolver().clone(), + FsSysTraitsAdapter(self.fs().clone()), ))) } .boxed_local(), @@ -703,7 +703,6 @@ impl CliFactory { Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, - DenoFsNodeResolverEnv::new(self.fs().clone()), self.in_npm_pkg_checker()?.clone(), node_resolver, self @@ -712,6 +711,7 @@ impl CliFactory { .clone() .into_npm_pkg_folder_resolver(), self.pkg_json_resolver().clone(), + FsSysTraitsAdapter(self.fs().clone()), ))) }) .await @@ -727,7 +727,7 @@ impl CliFactory { let npm_resolver = self.npm_resolver().await?; Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), - fs: CliDenoResolverFs(self.fs().clone()), + sys: FsSysTraitsAdapter(self.fs().clone()), in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(), node_resolver: self.node_resolver().await?.clone(), npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), @@ -738,7 +738,7 @@ impl CliFactory { pub fn pkg_json_resolver(&self) -> &Arc { self.services.pkg_json_resolver.get_or_init(|| { - Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new( + Arc::new(PackageJsonResolver::new(FsSysTraitsAdapter( self.fs().clone(), ))) }) @@ -778,7 +778,6 @@ impl CliFactory { self.cjs_tracker()?.clone(), cli_options.clone(), self.file_fetcher()?.clone(), - self.fs().clone(), self.global_http_cache()?.clone(), self.in_npm_pkg_checker()?.clone(), cli_options.maybe_lockfile().cloned(), @@ -788,6 +787,7 @@ impl CliFactory { self.parsed_source_cache().clone(), self.resolver().await?.clone(), self.root_permissions_container()?.clone(), + FsSysTraitsAdapter(self.fs().clone()), ))) }) .await @@ -973,7 +973,6 @@ impl CliFactory { None }, self.emitter()?.clone(), - fs.clone(), in_npm_pkg_checker.clone(), self.main_module_graph_container().await?.clone(), self.module_load_preparer().await?.clone(), @@ -988,6 +987,7 @@ impl CliFactory { ), self.parsed_source_cache().clone(), self.resolver().await?.clone(), + FsSysTraitsAdapter(self.fs().clone()), )), node_resolver.clone(), npm_resolver.clone(), diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 1b286c76b7..38f3dd1847 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -1,11 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::cache::HttpCache; -use crate::cache::RealDenoCacheEnv; -use crate::colors; -use crate::http_util::get_response_body_with_progress; -use crate::http_util::HttpClientProvider; -use crate::util::progress_bar::ProgressBar; +use std::borrow::Cow; +use std::collections::HashMap; +use std::sync::Arc; use boxed_error::Boxed; use deno_ast::MediaType; @@ -27,7 +24,7 @@ use deno_core::url::Url; use deno_core::ModuleSpecifier; use deno_error::JsError; use deno_graph::source::LoaderChecksum; - +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_permissions::CheckSpecifierKind; use deno_runtime::deno_permissions::PermissionCheckError; use deno_runtime::deno_permissions::PermissionsContainer; @@ -35,12 +32,14 @@ use deno_runtime::deno_web::BlobStore; use http::header; use http::HeaderMap; use http::StatusCode; -use std::borrow::Cow; -use std::collections::HashMap; -use std::env; -use std::sync::Arc; use thiserror::Error; +use crate::cache::HttpCache; +use crate::colors; +use crate::http_util::get_response_body_with_progress; +use crate::http_util::HttpClientProvider; +use crate::util::progress_bar::ProgressBar; + #[derive(Debug, Clone, Eq, PartialEq)] pub struct TextDecodedFile { pub media_type: MediaType, @@ -268,7 +267,7 @@ pub struct FetchNoFollowOptions<'a> { type DenoCacheDirFileFetcher = deno_cache_dir::file_fetcher::FileFetcher< BlobStoreAdapter, - RealDenoCacheEnv, + FsSysTraitsAdapter, HttpClientAdapter, >; @@ -290,9 +289,11 @@ impl CliFileFetcher { download_log_level: log::Level, ) -> Self { let memory_files = Arc::new(MemoryFiles::default()); + let sys = FsSysTraitsAdapter::new_real(); + let auth_tokens = AuthTokens::new_from_sys(&sys); let file_fetcher = DenoCacheDirFileFetcher::new( BlobStoreAdapter(blob_store), - RealDenoCacheEnv, + sys, http_cache, HttpClientAdapter { http_client_provider: http_client_provider.clone(), @@ -303,7 +304,7 @@ impl CliFileFetcher { FileFetcherOptions { allow_remote, cache_setting, - auth_tokens: AuthTokens::new(env::var("DENO_AUTH_TOKENS").ok()), + auth_tokens, }, ); Self { @@ -497,7 +498,6 @@ fn validate_scheme(specifier: &Url) -> Result<(), UnsupportedSchemeError> { #[cfg(test)] mod tests { use crate::cache::GlobalHttpCache; - use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use super::*; @@ -538,7 +538,10 @@ mod tests { let temp_dir = maybe_temp_dir.unwrap_or_default(); let location = temp_dir.path().join("remote").to_path_buf(); let blob_store: Arc = Default::default(); - let cache = Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)); + let cache = Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), + location, + )); let file_fetcher = CliFileFetcher::new( cache.clone(), Arc::new(HttpClientProvider::new(None, None)), @@ -752,8 +755,8 @@ mod tests { let location = temp_dir.path().join("remote").to_path_buf(); let file_fetcher = CliFileFetcher::new( Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), location, - crate::cache::RealDenoCacheEnv, )), Arc::new(HttpClientProvider::new(None, None)), Default::default(), @@ -781,8 +784,8 @@ mod tests { resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap(); let http_cache = Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), location.clone(), - crate::cache::RealDenoCacheEnv, )); let file_modified_01 = { let file_fetcher = CliFileFetcher::new( @@ -808,8 +811,8 @@ mod tests { let file_modified_02 = { let file_fetcher = CliFileFetcher::new( Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), location, - crate::cache::RealDenoCacheEnv, )), Arc::new(HttpClientProvider::new(None, None)), Default::default(), @@ -938,8 +941,8 @@ mod tests { let redirected_specifier = resolve_url("http://localhost:4546/subdir/mismatch_ext.ts").unwrap(); let http_cache = Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), location.clone(), - crate::cache::RealDenoCacheEnv, )); let metadata_file_modified_01 = { @@ -1073,8 +1076,8 @@ mod tests { let location = temp_dir.path().join("remote").to_path_buf(); let file_fetcher = CliFileFetcher::new( Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), location, - crate::cache::RealDenoCacheEnv, )), Arc::new(HttpClientProvider::new(None, None)), Default::default(), @@ -1110,7 +1113,10 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("remote").to_path_buf(); let file_fetcher_01 = CliFileFetcher::new( - Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)), + Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), + location.clone(), + )), Arc::new(HttpClientProvider::new(None, None)), Default::default(), None, @@ -1119,7 +1125,10 @@ mod tests { log::Level::Info, ); let file_fetcher_02 = CliFileFetcher::new( - Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)), + Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), + location, + )), Arc::new(HttpClientProvider::new(None, None)), Default::default(), None, diff --git a/cli/graph_util.rs b/cli/graph_util.rs index cbf0508ce4..b64fce9e5f 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -1,5 +1,41 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::error::Error; +use std::ops::Deref; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_config::deno_json::JsxImportSourceConfig; +use deno_config::workspace::JsrPackageConfig; +use deno_core::anyhow::bail; +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; +use deno_core::ModuleSpecifier; +use deno_graph::source::Loader; +use deno_graph::source::LoaderChecksum; +use deno_graph::source::ResolutionKind; +use deno_graph::source::ResolveError; +use deno_graph::FillFromLockfileOptions; +use deno_graph::GraphKind; +use deno_graph::JsrLoadError; +use deno_graph::ModuleError; +use deno_graph::ModuleGraph; +use deno_graph::ModuleGraphError; +use deno_graph::ModuleLoadError; +use deno_graph::ResolutionError; +use deno_graph::SpecifierError; +use deno_graph::WorkspaceFastCheckOption; +use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; +use deno_runtime::deno_fs::FsSysTraitsAdapter; +use deno_runtime::deno_node; +use deno_runtime::deno_permissions::PermissionsContainer; +use deno_semver::jsr::JsrDepPackageReq; +use deno_semver::package::PackageNv; +use deno_semver::SmallStackString; +use import_map::ImportMapError; +use node_resolver::InNpmPackageChecker; + use crate::args::config_to_deno_graph_workspace_member; use crate::args::jsr_url; use crate::args::CliLockfile; @@ -22,40 +58,6 @@ use crate::resolver::SloppyImportsCachedFs; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::file_watcher::WatcherCommunicator; -use deno_config::deno_json::JsxImportSourceConfig; -use deno_config::workspace::JsrPackageConfig; -use deno_core::anyhow::bail; -use deno_graph::source::LoaderChecksum; -use deno_graph::source::ResolutionKind; -use deno_graph::FillFromLockfileOptions; -use deno_graph::JsrLoadError; -use deno_graph::ModuleLoadError; -use deno_graph::WorkspaceFastCheckOption; - -use deno_core::error::custom_error; -use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; -use deno_core::ModuleSpecifier; -use deno_graph::source::Loader; -use deno_graph::source::ResolveError; -use deno_graph::GraphKind; -use deno_graph::ModuleError; -use deno_graph::ModuleGraph; -use deno_graph::ModuleGraphError; -use deno_graph::ResolutionError; -use deno_graph::SpecifierError; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; -use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node; -use deno_runtime::deno_permissions::PermissionsContainer; -use deno_semver::jsr::JsrDepPackageReq; -use deno_semver::package::PackageNv; -use import_map::ImportMapError; -use node_resolver::InNpmPackageChecker; -use std::error::Error; -use std::ops::Deref; -use std::path::PathBuf; -use std::sync::Arc; #[derive(Clone)] pub struct GraphValidOptions { @@ -76,7 +78,7 @@ pub struct GraphValidOptions { /// for the CLI. pub fn graph_valid( graph: &ModuleGraph, - fs: &Arc, + sys: &FsSysTraitsAdapter, roots: &[ModuleSpecifier], options: GraphValidOptions, ) -> Result<(), AnyError> { @@ -86,7 +88,7 @@ pub fn graph_valid( let mut errors = graph_walk_errors( graph, - fs, + sys, roots, GraphWalkErrorsOptions { check_js: options.check_js, @@ -136,7 +138,7 @@ pub struct GraphWalkErrorsOptions { /// and enhances them with CLI information. pub fn graph_walk_errors<'a>( graph: &'a ModuleGraph, - fs: &'a Arc, + sys: &'a FsSysTraitsAdapter, roots: &'a [ModuleSpecifier], options: GraphWalkErrorsOptions, ) -> impl Iterator + 'a { @@ -171,7 +173,7 @@ pub fn graph_walk_errors<'a>( } ModuleGraphError::ModuleError(error) => { enhanced_integrity_error_message(error) - .or_else(|| enhanced_sloppy_imports_error_message(fs, error)) + .or_else(|| enhanced_sloppy_imports_error_message(sys, error)) .unwrap_or_else(|| format_deno_graph_error(error)) } }; @@ -430,7 +432,6 @@ pub struct ModuleGraphBuilder { cjs_tracker: Arc, cli_options: Arc, file_fetcher: Arc, - fs: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, lockfile: Option>, @@ -440,6 +441,7 @@ pub struct ModuleGraphBuilder { parsed_source_cache: Arc, resolver: Arc, root_permissions_container: PermissionsContainer, + sys: FsSysTraitsAdapter, } impl ModuleGraphBuilder { @@ -449,7 +451,6 @@ impl ModuleGraphBuilder { cjs_tracker: Arc, cli_options: Arc, file_fetcher: Arc, - fs: Arc, global_http_cache: Arc, in_npm_pkg_checker: Arc, lockfile: Option>, @@ -459,13 +460,13 @@ impl ModuleGraphBuilder { parsed_source_cache: Arc, resolver: Arc, root_permissions_container: PermissionsContainer, + sys: FsSysTraitsAdapter, ) -> Self { Self { caches, cjs_tracker, cli_options, file_fetcher, - fs, global_http_cache, in_npm_pkg_checker, lockfile, @@ -475,6 +476,7 @@ impl ModuleGraphBuilder { parsed_source_cache, resolver, root_permissions_container, + sys, } } @@ -590,7 +592,7 @@ impl ModuleGraphBuilder { is_dynamic: options.is_dynamic, passthrough_jsr_specifiers: false, executor: Default::default(), - file_system: &DenoGraphFsAdapter(self.fs.as_ref()), + file_system: &self.sys, jsr_url_provider: &CliJsrUrlProvider, npm_resolver: Some(&graph_npm_resolver), module_analyzer: &analyzer, @@ -678,7 +680,7 @@ impl ModuleGraphBuilder { for (from, to) in graph.packages.mappings() { lockfile.insert_package_specifier( JsrDepPackageReq::jsr(from.clone()), - to.version.to_string(), + to.version.to_custom_string::(), ); } } @@ -744,10 +746,10 @@ impl ModuleGraphBuilder { ) -> cache::FetchCacher { cache::FetchCacher::new( self.file_fetcher.clone(), - self.fs.clone(), self.global_http_cache.clone(), self.in_npm_pkg_checker.clone(), self.module_info_cache.clone(), + self.sys.clone(), cache::FetchCacherOptions { file_header_overrides: self.cli_options.resolve_file_header_overrides(), permissions, @@ -776,7 +778,7 @@ impl ModuleGraphBuilder { ) -> Result<(), AnyError> { graph_valid( graph, - &self.fs, + &self.sys, roots, GraphValidOptions { kind: if self.cli_options.type_check_mode().is_true() { @@ -832,13 +834,13 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { } fn enhanced_sloppy_imports_error_message( - fs: &Arc, + sys: &FsSysTraitsAdapter, error: &ModuleError, ) -> Option { match error { ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error | ModuleError::Missing(specifier, _) => { - let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone())) + let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone())) .resolve(specifier, SloppyImportsResolutionKind::Execution)? .as_suggestion_message(); Some(format!( @@ -1048,71 +1050,6 @@ impl deno_graph::source::Reporter for FileWatcherReporter { } } -pub struct DenoGraphFsAdapter<'a>( - pub &'a dyn deno_runtime::deno_fs::FileSystem, -); - -impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> { - fn read_dir( - &self, - dir_url: &deno_graph::ModuleSpecifier, - ) -> Vec { - use deno_core::anyhow; - use deno_graph::source::DirEntry; - use deno_graph::source::DirEntryKind; - - let dir_path = match dir_url.to_file_path() { - Ok(path) => path, - // ignore, treat as non-analyzable - Err(()) => return vec![], - }; - let entries = match self.0.read_dir_sync(&dir_path) { - Ok(dir) => dir, - Err(err) - if matches!( - err.kind(), - std::io::ErrorKind::PermissionDenied | std::io::ErrorKind::NotFound - ) => - { - return vec![]; - } - Err(err) => { - return vec![DirEntry { - kind: DirEntryKind::Error( - anyhow::Error::from(err) - .context("Failed to read directory.".to_string()), - ), - url: dir_url.clone(), - }]; - } - }; - let mut dir_entries = Vec::with_capacity(entries.len()); - for entry in entries { - let entry_path = dir_path.join(&entry.name); - dir_entries.push(if entry.is_directory { - DirEntry { - kind: DirEntryKind::Dir, - url: ModuleSpecifier::from_directory_path(&entry_path).unwrap(), - } - } else if entry.is_file { - DirEntry { - kind: DirEntryKind::File, - url: ModuleSpecifier::from_file_path(&entry_path).unwrap(), - } - } else if entry.is_symlink { - DirEntry { - kind: DirEntryKind::Symlink, - url: ModuleSpecifier::from_file_path(&entry_path).unwrap(), - } - } else { - continue; - }); - } - - dir_entries - } -} - pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String { format!( "{}:{}:{}", diff --git a/cli/js/40_lint.js b/cli/js/40_lint.js new file mode 100644 index 0000000000..d29dc3e850 --- /dev/null +++ b/cli/js/40_lint.js @@ -0,0 +1,1122 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +// @ts-check + +import { + compileSelector, + parseSelector, + splitSelectors, +} from "ext:cli/40_lint_selector.js"; +import { core, internals } from "ext:core/mod.js"; +const { + op_lint_create_serialized_ast, +} = core.ops; + +// Keep in sync with Rust +// These types are expected to be present on every node. Note that this +// isn't set in stone. We could revise this at a future point. +const AST_PROP_TYPE = 1; +const AST_PROP_PARENT = 2; +const AST_PROP_RANGE = 3; +const AST_PROP_LENGTH = 4; + +// Keep in sync with Rust +// Each node property is tagged with this enum to denote +// what kind of value it holds. +/** @enum {number} */ +const PropFlags = { + /** This is an offset to another node */ + Ref: 0, + /** This is an array of offsets to other nodes (like children of a BlockStatement) */ + RefArr: 1, + /** + * This is a string id. The actual string needs to be looked up in + * the string table that was included in the message. + */ + String: 2, + /** This value is either 0 = false, or 1 = true */ + Bool: 3, + /** No value, it's null */ + Null: 4, + /** No value, it's undefined */ + Undefined: 5, +}; + +/** @typedef {import("./40_lint_types.d.ts").AstContext} AstContext */ +/** @typedef {import("./40_lint_types.d.ts").VisitorFn} VisitorFn */ +/** @typedef {import("./40_lint_types.d.ts").CompiledVisitor} CompiledVisitor */ +/** @typedef {import("./40_lint_types.d.ts").LintState} LintState */ +/** @typedef {import("./40_lint_types.d.ts").RuleContext} RuleContext */ +/** @typedef {import("./40_lint_types.d.ts").NodeFacade} NodeFacade */ +/** @typedef {import("./40_lint_types.d.ts").LintPlugin} LintPlugin */ +/** @typedef {import("./40_lint_types.d.ts").TransformFn} TransformFn */ +/** @typedef {import("./40_lint_types.d.ts").MatchContext} MatchContext */ + +/** @type {LintState} */ +const state = { + plugins: [], + installedPlugins: new Set(), +}; + +/** + * Every rule gets their own instance of this class. This is the main + * API lint rules interact with. + * @implements {RuleContext} + */ +export class Context { + id; + + fileName; + + /** + * @param {string} id + * @param {string} fileName + */ + constructor(id, fileName) { + this.id = id; + this.fileName = fileName; + } +} + +/** + * @param {LintPlugin} plugin + */ +export function installPlugin(plugin) { + if (typeof plugin !== "object") { + throw new Error("Linter plugin must be an object"); + } + if (typeof plugin.name !== "string") { + throw new Error("Linter plugin name must be a string"); + } + if (typeof plugin.rules !== "object") { + throw new Error("Linter plugin rules must be an object"); + } + if (state.installedPlugins.has(plugin.name)) { + throw new Error(`Linter plugin ${plugin.name} has already been registered`); + } + state.plugins.push(plugin); + state.installedPlugins.add(plugin.name); +} + +/** + * @param {AstContext} ctx + * @param {number} offset + * @returns + */ +function getNode(ctx, offset) { + if (offset === 0) return null; + const cached = ctx.nodes.get(offset); + if (cached !== undefined) return cached; + + const node = new Node(ctx, offset); + ctx.nodes.set(offset, /** @type {*} */ (cached)); + return node; +} + +/** + * Find the offset of a specific property of a specific node. This will + * be used later a lot more for selectors. + * @param {Uint8Array} buf + * @param {number} search + * @param {number} offset + * @returns {number} + */ +function findPropOffset(buf, offset, search) { + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const propCount = buf[offset]; + offset += 1; + + for (let i = 0; i < propCount; i++) { + const maybe = offset; + const prop = buf[offset++]; + const kind = buf[offset++]; + if (prop === search) return maybe; + + if (kind === PropFlags.Ref) { + offset += 4; + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4 + (len * 4); + } else if (kind === PropFlags.String) { + offset += 4; + } else if (kind === PropFlags.Bool) { + offset++; + } else if (kind === PropFlags.Null || kind === PropFlags.Undefined) { + // No value + } else { + offset++; + } + } + + return -1; +} + +const INTERNAL_CTX = Symbol("ctx"); +const INTERNAL_OFFSET = Symbol("offset"); + +// This class is a facade for all materialized nodes. Instead of creating a +// unique class per AST node, we have one class with getters for every +// possible node property. This allows us to lazily materialize child node +// only when they are needed. +class Node { + [INTERNAL_CTX]; + [INTERNAL_OFFSET]; + + /** + * @param {AstContext} ctx + * @param {number} offset + */ + constructor(ctx, offset) { + this[INTERNAL_CTX] = ctx; + this[INTERNAL_OFFSET] = offset; + } + + /** + * Logging a class with only getters prints just the class name. This + * makes debugging difficult because you don't see any of the properties. + * For that reason we'll intercept inspection and serialize the node to + * a plain JSON structure which can be logged and allows users to see all + * properties and their values. + * + * This is only expected to be used during development of a rule. + * @param {*} _ + * @param {Deno.InspectOptions} options + * @returns {string} + */ + [Symbol.for("Deno.customInspect")](_, options) { + const json = toJsValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET]); + return Deno.inspect(json, options); + } + + [Symbol.for("Deno.lint.toJsValue")]() { + return toJsValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET]); + } +} + +/** @type {Set} */ +const appliedGetters = new Set(); + +/** + * Add getters for all potential properties found in the message. + * @param {AstContext} ctx + */ +function setNodeGetters(ctx) { + if (appliedGetters.size === ctx.strByProp.length) return; + + for (let i = 0; i < ctx.strByProp.length; i++) { + const id = ctx.strByProp[i]; + if (id === 0 || appliedGetters.has(i)) continue; + appliedGetters.add(i); + + const name = getString(ctx.strTable, id); + + Object.defineProperty(Node.prototype, name, { + get() { + return readValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET], i); + }, + }); + } +} + +/** + * Serialize a node recursively to plain JSON + * @param {AstContext} ctx + * @param {number} offset + * @returns {*} + */ +function toJsValue(ctx, offset) { + const { buf } = ctx; + + /** @type {Record} */ + const node = { + type: readValue(ctx, offset, AST_PROP_TYPE), + range: readValue(ctx, offset, AST_PROP_RANGE), + }; + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const count = buf[offset++]; + for (let i = 0; i < count; i++) { + const prop = buf[offset++]; + const kind = buf[offset++]; + const name = getString(ctx.strTable, ctx.strByProp[prop]); + + if (kind === PropFlags.Ref) { + const v = readU32(buf, offset); + offset += 4; + node[name] = v === 0 ? null : toJsValue(ctx, v); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + const nodes = new Array(len); + for (let i = 0; i < len; i++) { + const v = readU32(buf, offset); + if (v === 0) continue; + nodes[i] = toJsValue(ctx, v); + offset += 4; + } + node[name] = nodes; + } else if (kind === PropFlags.Bool) { + const v = buf[offset++]; + node[name] = v === 1; + } else if (kind === PropFlags.String) { + const v = readU32(buf, offset); + offset += 4; + node[name] = getString(ctx.strTable, v); + } else if (kind === PropFlags.Null) { + node[name] = null; + } else if (kind === PropFlags.Undefined) { + node[name] = undefined; + } + } + + return node; +} + +/** + * Read a specific property from a node + * @param {AstContext} ctx + * @param {number} offset + * @param {number} search + * @returns {*} + */ +function readValue(ctx, offset, search) { + const { buf } = ctx; + const type = buf[offset]; + + if (search === AST_PROP_TYPE) { + return getString(ctx.strTable, ctx.strByType[type]); + } else if (search === AST_PROP_RANGE) { + const start = readU32(buf, offset + 1 + 4); + const end = readU32(buf, offset + 1 + 4 + 4); + return [start, end]; + } else if (search === AST_PROP_PARENT) { + const pos = readU32(buf, offset + 1); + return getNode(ctx, pos); + } + + offset = findPropOffset(ctx.buf, offset, search); + if (offset === -1) return undefined; + + const kind = buf[offset + 1]; + offset += 2; + + if (kind === PropFlags.Ref) { + const value = readU32(buf, offset); + return getNode(ctx, value); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + + const nodes = new Array(len); + for (let i = 0; i < len; i++) { + nodes[i] = getNode(ctx, readU32(buf, offset)); + offset += 4; + } + return nodes; + } else if (kind === PropFlags.Bool) { + return buf[offset] === 1; + } else if (kind === PropFlags.String) { + const v = readU32(buf, offset); + return getString(ctx.strTable, v); + } else if (kind === PropFlags.Null) { + return null; + } else if (kind === PropFlags.Undefined) { + return undefined; + } + + throw new Error(`Unknown prop kind: ${kind}`); +} + +const DECODER = new TextDecoder(); + +/** + * TODO: Check if it's faster to use the `ArrayView` API instead. + * @param {Uint8Array} buf + * @param {number} i + * @returns {number} + */ +function readU32(buf, i) { + return (buf[i] << 24) + (buf[i + 1] << 16) + (buf[i + 2] << 8) + + buf[i + 3]; +} + +/** + * Get a string by id and error if it wasn't found + * @param {AstContext["strTable"]} strTable + * @param {number} id + * @returns {string} + */ +function getString(strTable, id) { + const name = strTable.get(id); + if (name === undefined) { + throw new Error(`Missing string id: ${id}`); + } + + return name; +} + +/** + * @param {AstContext["buf"]} buf + * @param {number} child + * @returns {null | [number, number]} + */ +function findChildOffset(buf, child) { + let offset = readU32(buf, child + 1); + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const propCount = buf[offset++]; + for (let i = 0; i < propCount; i++) { + const _prop = buf[offset++]; + const kind = buf[offset++]; + + switch (kind) { + case PropFlags.Ref: { + const start = offset; + const value = readU32(buf, offset); + offset += 4; + if (value === child) { + return [start, -1]; + } + break; + } + case PropFlags.RefArr: { + const start = offset; + + const len = readU32(buf, offset); + offset += 4; + + for (let j = 0; j < len; j++) { + const value = readU32(buf, offset); + offset += 4; + if (value === child) { + return [start, j]; + } + } + + break; + } + case PropFlags.String: + offset += 4; + break; + case PropFlags.Bool: + offset++; + break; + case PropFlags.Null: + case PropFlags.Undefined: + break; + } + } + + return null; +} + +/** @implements {MatchContext} */ +class MatchCtx { + /** + * @param {AstContext["buf"]} buf + * @param {AstContext["strTable"]} strTable + * @param {AstContext["strByType"]} strByType + */ + constructor(buf, strTable, strByType) { + this.buf = buf; + this.strTable = strTable; + this.strByType = strByType; + } + + /** + * @param {number} offset + * @returns {number} + */ + getParent(offset) { + return readU32(this.buf, offset + 1); + } + + /** + * @param {number} offset + * @returns {number} + */ + getType(offset) { + return this.buf[offset]; + } + + /** + * @param {number} offset + * @param {number[]} propIds + * @param {number} idx + * @returns {unknown} + */ + getAttrPathValue(offset, propIds, idx) { + const { buf } = this; + + const propId = propIds[idx]; + + switch (propId) { + case AST_PROP_TYPE: { + const type = this.getType(offset); + return getString(this.strTable, this.strByType[type]); + } + case AST_PROP_PARENT: + case AST_PROP_RANGE: + throw new Error(`Not supported`); + } + + offset = findPropOffset(buf, offset, propId); + if (offset === -1) return undefined; + const _prop = buf[offset++]; + const kind = buf[offset++]; + + if (kind === PropFlags.Ref) { + const value = readU32(buf, offset); + // Checks need to end with a value, not a node + if (idx === propIds.length - 1) return undefined; + return this.getAttrPathValue(value, propIds, idx + 1); + } else if (kind === PropFlags.RefArr) { + const count = readU32(buf, offset); + offset += 4; + + if (idx < propIds.length - 1 && propIds[idx + 1] === AST_PROP_LENGTH) { + return count; + } + + // TODO(@marvinhagemeister): Allow traversing into array children? + } + + // Cannot traverse into primitives further + if (idx < propIds.length - 1) return undefined; + + if (kind === PropFlags.String) { + const s = readU32(buf, offset); + return getString(this.strTable, s); + } else if (kind === PropFlags.Bool) { + return buf[offset] === 1; + } else if (kind === PropFlags.Null) { + return null; + } else if (kind === PropFlags.Undefined) { + return undefined; + } + + return undefined; + } + + /** + * @param {number} offset + * @param {number[]} propIds + * @param {number} idx + * @returns {boolean} + */ + hasAttrPath(offset, propIds, idx) { + const { buf } = this; + + const propId = propIds[idx]; + // If propId is 0 then the property doesn't exist in the AST + if (propId === 0) return false; + + switch (propId) { + case AST_PROP_TYPE: + case AST_PROP_PARENT: + case AST_PROP_RANGE: + return true; + } + + offset = findPropOffset(buf, offset, propId); + if (offset === -1) return false; + if (idx === propIds.length - 1) return true; + + const _prop = buf[offset++]; + const kind = buf[offset++]; + if (kind === PropFlags.Ref) { + const value = readU32(buf, offset); + return this.hasAttrPath(value, propIds, idx + 1); + } else if (kind === PropFlags.RefArr) { + const _count = readU32(buf, offset); + offset += 4; + + if (idx < propIds.length - 1 && propIds[idx + 1] === AST_PROP_LENGTH) { + return true; + } + + // TODO(@marvinhagemeister): Allow traversing into array children? + } + + // Primitives cannot be traversed further. This means we + // didn't found the attribute. + if (idx < propIds.length - 1) return false; + + return true; + } + + /** + * @param {number} offset + * @returns {number} + */ + getFirstChild(offset) { + const { buf } = this; + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const count = buf[offset++]; + for (let i = 0; i < count; i++) { + const _prop = buf[offset++]; + const kind = buf[offset++]; + + switch (kind) { + case PropFlags.Ref: { + const v = readU32(buf, offset); + offset += 4; + return v; + } + case PropFlags.RefArr: { + const len = readU32(buf, offset); + offset += 4; + for (let j = 0; j < len; j++) { + const v = readU32(buf, offset); + offset += 4; + return v; + } + + return len; + } + + case PropFlags.String: + offset += 4; + break; + case PropFlags.Bool: + offset++; + break; + case PropFlags.Null: + case PropFlags.Undefined: + break; + } + } + + return -1; + } + + /** + * @param {number} offset + * @returns {number} + */ + getLastChild(offset) { + const { buf } = this; + + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + let last = -1; + + const count = buf[offset++]; + for (let i = 0; i < count; i++) { + const _prop = buf[offset++]; + const kind = buf[offset++]; + + switch (kind) { + case PropFlags.Ref: { + const v = readU32(buf, offset); + offset += 4; + last = v; + break; + } + case PropFlags.RefArr: { + const len = readU32(buf, offset); + offset += 4; + for (let j = 0; j < len; j++) { + const v = readU32(buf, offset); + last = v; + offset += 4; + } + + break; + } + + case PropFlags.String: + offset += 4; + break; + case PropFlags.Bool: + offset++; + break; + case PropFlags.Null: + case PropFlags.Undefined: + break; + } + } + + return last; + } + + /** + * @param {number} id + * @returns {number[]} + */ + getSiblings(id) { + const { buf } = this; + + const result = findChildOffset(buf, id); + // Happens for program nodes + if (result === null) return []; + + if (result[1] === -1) { + return [id]; + } + + let offset = result[0]; + const count = readU32(buf, offset); + offset += 4; + + /** @type {number[]} */ + const out = []; + for (let i = 0; i < count; i++) { + const v = readU32(buf, offset); + offset += 4; + out.push(v); + } + + return out; + } +} + +/** + * @param {Uint8Array} buf + * @param {AstContext} buf + */ +function createAstContext(buf) { + /** @type {Map} */ + const strTable = new Map(); + + // The buffer has a few offsets at the end which allows us to easily + // jump to the relevant sections of the message. + const typeMapOffset = readU32(buf, buf.length - 16); + const propMapOffset = readU32(buf, buf.length - 12); + const strTableOffset = readU32(buf, buf.length - 8); + + // Offset of the topmost node in the AST Tree. + const rootOffset = readU32(buf, buf.length - 4); + + let offset = strTableOffset; + const stringCount = readU32(buf, offset); + offset += 4; + + // TODO(@marvinhagemeister): We could lazily decode the strings on an as needed basis. + // Not sure if this matters much in practice though. + let id = 0; + for (let i = 0; i < stringCount; i++) { + const len = readU32(buf, offset); + offset += 4; + + const strBytes = buf.slice(offset, offset + len); + offset += len; + const s = DECODER.decode(strBytes); + strTable.set(id, s); + id++; + } + + if (strTable.size !== stringCount) { + throw new Error( + `Could not deserialize string table. Expected ${stringCount} items, but got ${strTable.size}`, + ); + } + + offset = typeMapOffset; + const typeCount = readU32(buf, offset); + offset += 4; + + const typeByStr = new Map(); + const strByType = new Array(typeCount).fill(0); + for (let i = 0; i < typeCount; i++) { + const v = readU32(buf, offset); + offset += 4; + + strByType[i] = v; + typeByStr.set(strTable.get(v), i); + } + + offset = propMapOffset; + const propCount = readU32(buf, offset); + offset += 4; + + const propByStr = new Map(); + const strByProp = new Array(propCount).fill(0); + for (let i = 0; i < propCount; i++) { + const v = readU32(buf, offset); + offset += 4; + + strByProp[i] = v; + propByStr.set(strTable.get(v), i); + } + + /** @type {AstContext} */ + const ctx = { + buf, + strTable, + rootOffset, + nodes: new Map(), + strTableOffset, + strByProp, + strByType, + typeByStr, + propByStr, + matcher: new MatchCtx(buf, strTable, strByType), + }; + + setNodeGetters(ctx); + + // DEV ONLY: Enable this to inspect the buffer message + // _dump(ctx); + + return ctx; +} + +/** + * @param {*} _node + */ +const NOOP = (_node) => {}; + +/** + * Kick off the actual linting process of JS plugins. + * @param {string} fileName + * @param {Uint8Array} serializedAst + */ +export function runPluginsForFile(fileName, serializedAst) { + const ctx = createAstContext(serializedAst); + + /** @type {Map}>} */ + const bySelector = new Map(); + + const destroyFns = []; + + // Instantiate and merge visitors. This allows us to only traverse + // the AST once instead of per plugin. When ever we enter or exit a + // node we'll call all visitors that match. + for (let i = 0; i < state.plugins.length; i++) { + const plugin = state.plugins[i]; + + for (const name of Object.keys(plugin.rules)) { + const rule = plugin.rules[name]; + const id = `${plugin.name}/${name}`; + const ctx = new Context(id, fileName); + const visitor = rule.create(ctx); + + // deno-lint-ignore guard-for-in + for (let key in visitor) { + const fn = visitor[key]; + if (fn === undefined) continue; + + // Support enter and exit callbacks on a visitor. + // Exit callbacks are marked by having `:exit` at the end. + let isExit = false; + if (key.endsWith(":exit")) { + isExit = true; + key = key.slice(0, -":exit".length); + } + + const selectors = splitSelectors(key); + + for (let j = 0; j < selectors.length; j++) { + const key = selectors[j]; + + let info = bySelector.get(key); + if (info === undefined) { + info = { enter: NOOP, exit: NOOP }; + bySelector.set(key, info); + } + const prevFn = isExit ? info.exit : info.enter; + + /** + * @param {*} node + */ + const wrapped = (node) => { + prevFn(node); + + try { + fn(node); + } catch (err) { + throw new Error(`Visitor "${name}" of plugin "${id}" errored`, { + cause: err, + }); + } + }; + + if (isExit) { + info.exit = wrapped; + } else { + info.enter = wrapped; + } + } + } + + if (typeof rule.destroy === "function") { + const destroyFn = rule.destroy.bind(rule); + destroyFns.push(() => { + try { + destroyFn(ctx); + } catch (err) { + throw new Error(`Destroy hook of "${id}" errored`, { cause: err }); + } + }); + } + } + } + + // Create selectors + /** @type {TransformFn} */ + const toElem = (str) => { + const id = ctx.typeByStr.get(str); + return id === undefined ? 0 : id; + }; + /** @type {TransformFn} */ + const toAttr = (str) => { + const id = ctx.propByStr.get(str); + return id === undefined ? 0 : id; + }; + + /** @type {CompiledVisitor[]} */ + const visitors = []; + for (const [sel, info] of bySelector.entries()) { + // Selectors are already split here. + // TODO(@marvinhagemeister): Avoid array allocation (not sure if that matters) + const parsed = parseSelector(sel, toElem, toAttr)[0]; + const matcher = compileSelector(parsed); + + visitors.push({ info, matcher }); + } + + // Traverse ast with all visitors at the same time to avoid traversing + // multiple times. + try { + traverse(ctx, visitors, ctx.rootOffset); + } finally { + ctx.nodes.clear(); + + // Optional: Destroy rules + for (let i = 0; i < destroyFns.length; i++) { + destroyFns[i](); + } + } +} + +/** + * @param {AstContext} ctx + * @param {CompiledVisitor[]} visitors + * @param {number} offset + */ +function traverse(ctx, visitors, offset) { + // The 0 offset is used to denote an empty/placeholder node + if (offset === 0) return; + + const originalOffset = offset; + + const { buf } = ctx; + + /** @type {VisitorFn[] | null} */ + let exits = null; + + for (let i = 0; i < visitors.length; i++) { + const v = visitors[i]; + + if (v.matcher(ctx.matcher, offset)) { + if (v.info.exit !== NOOP) { + if (exits === null) { + exits = [v.info.exit]; + } else { + exits.push(v.info.exit); + } + } + + if (v.info.enter !== NOOP) { + const node = /** @type {*} */ (getNode(ctx, offset)); + v.info.enter(node); + } + } + } + + // Search for node references in the properties of the current node. All + // other properties can be ignored. + try { + // type + parentId + SpanLo + SpanHi + offset += 1 + 4 + 4 + 4; + + const propCount = buf[offset]; + offset += 1; + + for (let i = 0; i < propCount; i++) { + const kind = buf[offset + 1]; + offset += 2; // propId + propFlags + + if (kind === PropFlags.Ref) { + const next = readU32(buf, offset); + offset += 4; + traverse(ctx, visitors, next); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + + for (let j = 0; j < len; j++) { + const child = readU32(buf, offset); + offset += 4; + traverse(ctx, visitors, child); + } + } else if (kind === PropFlags.String) { + offset += 4; + } else if (kind === PropFlags.Bool) { + offset += 1; + } else if (kind === PropFlags.Null || kind === PropFlags.Undefined) { + // No value + } + } + } finally { + if (exits !== null) { + for (let i = 0; i < exits.length; i++) { + const node = /** @type {*} */ (getNode(ctx, originalOffset)); + exits[i](node); + } + } + } +} + +/** + * This is useful debugging helper to display the buffer's contents. + * @param {AstContext} ctx + */ +function _dump(ctx) { + const { buf, strTableOffset, strTable, strByType, strByProp } = ctx; + + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(strTable); + + for (let i = 0; i < strByType.length; i++) { + const v = strByType[i]; + // @ts-ignore dump fn + // deno-lint-ignore no-console + if (v > 0) console.log(" > type:", i, getString(ctx.strTable, v), v); + } + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(); + for (let i = 0; i < strByProp.length; i++) { + const v = strByProp[i]; + // @ts-ignore dump fn + // deno-lint-ignore no-console + if (v > 0) console.log(" > prop:", i, getString(ctx.strTable, v), v); + } + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(); + + let offset = 0; + + while (offset < strTableOffset) { + const type = buf[offset]; + const name = getString(ctx.strTable, ctx.strByType[type]); + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(`${name}, offset: ${offset}, type: ${type}`); + offset += 1; + + const parent = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` parent: ${parent}`); + + const start = readU32(buf, offset); + offset += 4; + const end = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` range: ${start} -> ${end}`); + + const count = buf[offset++]; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` prop count: ${count}`); + + for (let i = 0; i < count; i++) { + const prop = buf[offset++]; + const kind = buf[offset++]; + const name = getString(ctx.strTable, ctx.strByProp[prop]); + + let kindName = "unknown"; + for (const k in PropFlags) { + // @ts-ignore dump fn + if (kind === PropFlags[k]) { + kindName = k; + } + } + + if (kind === PropFlags.Ref) { + const v = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: ${v} (${kindName}, ${prop})`); + } else if (kind === PropFlags.RefArr) { + const len = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: Array(${len}) (${kindName}, ${prop})`); + + for (let j = 0; j < len; j++) { + const v = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` - ${v} (${prop})`); + } + } else if (kind === PropFlags.Bool) { + const v = buf[offset]; + offset += 1; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: ${v} (${kindName}, ${prop})`); + } else if (kind === PropFlags.String) { + const v = readU32(buf, offset); + offset += 4; + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log( + ` ${name}: ${getString(ctx.strTable, v)} (${kindName}, ${prop})`, + ); + } else if (kind === PropFlags.Null) { + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: null (${kindName}, ${prop})`); + } else if (kind === PropFlags.Undefined) { + // @ts-ignore dump fn + // deno-lint-ignore no-console + console.log(` ${name}: undefined (${kindName}, ${prop})`); + } + } + } +} + +// TODO(bartlomieju): this is temporary, until we get plugins plumbed through +// the CLI linter +/** + * @param {LintPlugin} plugin + * @param {string} fileName + * @param {string} sourceText + */ +function runLintPlugin(plugin, fileName, sourceText) { + installPlugin(plugin); + const serializedAst = op_lint_create_serialized_ast(fileName, sourceText); + + try { + runPluginsForFile(fileName, serializedAst); + } finally { + // During testing we don't want to keep plugins around + state.installedPlugins.clear(); + } +} + +// TODO(bartlomieju): this is temporary, until we get plugins plumbed through +// the CLI linter +internals.runLintPlugin = runLintPlugin; diff --git a/cli/js/40_lint_selector.js b/cli/js/40_lint_selector.js new file mode 100644 index 0000000000..b78f7a5d0e --- /dev/null +++ b/cli/js/40_lint_selector.js @@ -0,0 +1,1014 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +// @ts-check + +/** @typedef {import("./40_lint_types.d.ts").LintState} LintState */ +/** @typedef {import("./40_lint_types.d.ts").AstContext} AstContext */ +/** @typedef {import("./40_lint_types.d.ts").MatchContext} MatchCtx */ +/** @typedef {import("./40_lint_types.d.ts").AttrExists} AttrExists */ +/** @typedef {import("./40_lint_types.d.ts").AttrBin} AttrBin */ +/** @typedef {import("./40_lint_types.d.ts").AttrSelector} AttrSelector */ +/** @typedef {import("./40_lint_types.d.ts").ElemSelector} ElemSelector */ +/** @typedef {import("./40_lint_types.d.ts").PseudoNthChild} PseudoNthChild */ +/** @typedef {import("./40_lint_types.d.ts").PseudoHas} PseudoHas */ +/** @typedef {import("./40_lint_types.d.ts").PseudoNot} PseudoNot */ +/** @typedef {import("./40_lint_types.d.ts").Relation} SRelation */ +/** @typedef {import("./40_lint_types.d.ts").Selector} Selector */ +/** @typedef {import("./40_lint_types.d.ts").SelectorParseCtx} SelectorParseCtx */ +/** @typedef {import("./40_lint_types.d.ts").NextFn} NextFn */ +/** @typedef {import("./40_lint_types.d.ts").MatcherFn} MatcherFn */ +/** @typedef {import("./40_lint_types.d.ts").TransformFn} Transformer */ + +const Char = { + Tab: 9, + Space: 32, + Bang: 33, + DoubleQuote: 34, + Quote: 39, + BraceOpen: 40, + BraceClose: 41, + Plus: 43, + Comma: 44, + Minus: 45, + Dot: 46, + Slash: 47, + n0: 49, + n9: 57, + Colon: 58, + Less: 60, + Equal: 61, + Greater: 62, + A: 65, + Z: 90, + BracketOpen: 91, + BackSlash: 92, + BracketClose: 93, + Underscore: 95, + a: 97, + z: 122, + Tilde: 126, +}; + +export const Token = { + EOF: 0, + Word: 1, + Space: 2, + Op: 3, + Colon: 4, + Comma: 7, + BraceOpen: 8, + BraceClose: 9, + BracketOpen: 10, + BracketClose: 11, + String: 12, + Number: 13, + Bool: 14, + Null: 15, + Undefined: 16, + Dot: 17, + Minus: 17, +}; + +export const BinOp = { + /** [attr="value"] or [attr=value] */ + Equal: 1, + /** [attr!="value"] or [attr!=value] */ + NotEqual: 2, + /** [attr>1] */ + Greater: 3, + /** [attr>=1] */ + GreaterThan: 4, + /** [attr<1] */ + Less: 5, + /** [attr<=1] */ + LessThan: 6, + Tilde: 7, + Plus: 8, + Space: 9, +}; + +/** + * @param {string} s + * @returns {number} + */ +function getAttrOp(s) { + switch (s) { + case "=": + return BinOp.Equal; + case "!=": + return BinOp.NotEqual; + case ">": + return BinOp.Greater; + case ">=": + return BinOp.GreaterThan; + case "<": + return BinOp.Less; + case "<=": + return BinOp.LessThan; + case "~": + return BinOp.Tilde; + case "+": + return BinOp.Plus; + default: + throw new Error(`Unknown attribute operator: '${s}'`); + } +} + +export class Lexer { + token = Token.Word; + start = 0; + end = 0; + ch = 0; + i = -1; + + value = ""; + + /** + * @param {string} input + */ + constructor(input) { + this.input = input; + this.step(); + this.next(); + } + + /** + * @param {number} token + */ + expect(token) { + if (this.token !== token) { + throw new Error( + `Expected token '${token}', but got '${this.token}'.\n\n${this.input}\n${ + " ".repeat(this.i) + }^`, + ); + } + } + + /** + * @param {number} token + */ + readAsWordUntil(token) { + const s = this.i; + while (this.token !== Token.EOF && this.token !== token) { + this.next(); + } + + this.start = s; + this.end = this.i - 1; + this.value = this.getSlice(); + } + + getSlice() { + return this.input.slice(this.start, this.end); + } + + step() { + this.i++; + if (this.i >= this.input.length) { + this.ch = -1; + } else { + this.ch = this.input.charCodeAt(this.i); + } + } + + next() { + this.value = ""; + + if (this.i >= this.input.length) { + this.token = Token.EOF; + return; + } + + // console.log( + // "NEXT", + // this.input, + // this.i, + // JSON.stringify(String.fromCharCode(this.ch)), + // ); + + while (true) { + switch (this.ch) { + case Char.Space: + while (this.isWhiteSpace()) { + this.step(); + } + + // Check if space preceeded operator + if (this.isOpContinue()) { + continue; + } + + this.token = Token.Space; + return; + case Char.BracketOpen: + this.token = Token.BracketOpen; + this.step(); + return; + case Char.BracketClose: + this.token = Token.BracketClose; + this.step(); + return; + case Char.BraceOpen: + this.token = Token.BraceOpen; + this.step(); + return; + case Char.BraceClose: + this.token = Token.BraceClose; + this.step(); + return; + case Char.Colon: + this.token = Token.Colon; + this.step(); + return; + case Char.Comma: + this.token = Token.Comma; + this.step(); + return; + case Char.Dot: + this.token = Token.Dot; + this.step(); + return; + case Char.Minus: + this.token = Token.Minus; + this.step(); + return; + + case Char.Plus: + case Char.Tilde: + case Char.Greater: + case Char.Equal: + case Char.Less: + case Char.Bang: { + this.token = Token.Op; + this.start = this.i; + this.step(); + + while (this.isOpContinue()) { + this.step(); + } + + this.end = this.i; + this.value = this.getSlice(); + + // Consume remaining space + while (this.isWhiteSpace()) { + this.step(); + } + + return; + } + + case Char.Quote: + case Char.DoubleQuote: { + this.token = Token.String; + const ch = this.ch; + + this.step(); + this.start = this.i; + + while (this.ch > 0 && this.ch !== ch) { + this.step(); + } + + this.end = this.i; + this.value = this.getSlice(); + this.step(); + + return; + } + + default: + this.start = this.i; + this.step(); + + while (this.isWordContinue()) { + this.step(); + } + + this.end = this.i; + this.value = this.getSlice(); + this.token = Token.Word; + return; + } + } + } + + isWordContinue() { + const ch = this.ch; + switch (ch) { + case Char.Minus: + case Char.Underscore: + return true; + default: + return (ch >= Char.a && ch <= Char.z) || + (ch >= Char.A && ch <= Char.Z) || + (ch >= Char.n0 && ch <= Char.n9); + } + } + + isOpContinue() { + const ch = this.ch; + switch (ch) { + case Char.Equal: + case Char.Bang: + case Char.Greater: + case Char.Less: + case Char.Tilde: + case Char.Plus: + return true; + default: + return false; + } + } + + isWhiteSpace() { + return this.ch === Char.Space || this.ch === Char.Tab; + } +} + +const NUMBER_REG = /^(\d+\.)?\d+$/; +const BIGINT_REG = /^\d+n$/; + +/** + * @param {string} raw + * @returns {any} + */ +function getFromRawValue(raw) { + switch (raw) { + case "true": + return true; + case "false": + return false; + case "null": + return null; + case "undefined": + return undefined; + default: + if (raw.startsWith("'") && raw.endsWith("'")) { + if (raw.length === 2) return ""; + return raw.slice(1, -1); + } else if (raw.startsWith('"') && raw.endsWith('"')) { + if (raw.length === 2) return ""; + return raw.slice(1, -1); + } else if (raw.startsWith("/")) { + const end = raw.lastIndexOf("/"); + if (end === -1) throw new Error(`Invalid RegExp pattern: ${raw}`); + const pattern = raw.slice(1, end); + const flags = end < raw.length - 1 ? raw.slice(end + 1) : undefined; + return new RegExp(pattern, flags); + } else if (NUMBER_REG.test(raw)) { + return Number(raw); + } else if (BIGINT_REG.test(raw)) { + return BigInt(raw.slice(0, -1)); + } + + return raw; + } +} + +export const ELEM_NODE = 1; +export const RELATION_NODE = 2; +export const ATTR_EXISTS_NODE = 3; +export const ATTR_BIN_NODE = 4; +export const PSEUDO_NTH_CHILD = 5; +export const PSEUDO_HAS = 6; +export const PSEUDO_NOT = 7; +export const PSEUDO_FIRST_CHILD = 8; +export const PSEUDO_LAST_CHILD = 9; + +/** + * Parse out all unique selectors of a selector list. + * @param {string} input + * @returns {string[]} + */ +export function splitSelectors(input) { + /** @type {string[]} */ + const out = []; + + let last = 0; + let depth = 0; + for (let i = 0; i < input.length; i++) { + const ch = input.charCodeAt(i); + switch (ch) { + case Char.BraceOpen: + depth++; + break; + case Char.BraceClose: + depth--; + break; + case Char.Comma: + if (depth === 0) { + out.push(input.slice(last, i).trim()); + last = i + 1; + } + break; + } + } + + if (last < input.length - 1) { + out.push(input.slice(last).trim()); + } + + return out; +} + +/** + * @param {string} input + * @param {Transformer} toElem + * @param {Transformer} toAttr + * @returns {Selector[]} + */ +export function parseSelector(input, toElem, toAttr) { + /** @type {Selector[]} */ + const result = []; + + /** @type {Selector[]} */ + const stack = [[]]; + + const lex = new Lexer(input); + + // Some subselectors like `:nth-child(.. of )` must have + // a single selector instead of selector list. + let throwOnComma = false; + + while (lex.token !== Token.EOF) { + const current = /** @type {Selector} */ (stack.at(-1)); + + if (lex.token === Token.Word) { + const value = lex.value; + const wildcard = value === "*"; + + const elem = !wildcard ? toElem(value) : 0; + current.push({ + type: ELEM_NODE, + elem, + wildcard, + }); + lex.next(); + + continue; + } else if (lex.token === Token.Space) { + lex.next(); + + if (lex.token === Token.Word) { + current.push({ + type: RELATION_NODE, + op: BinOp.Space, + }); + } + + continue; + } else if (lex.token === Token.BracketOpen) { + lex.next(); + lex.expect(Token.Word); + + // Check for value comparison + const prop = [toAttr(lex.value)]; + lex.next(); + + while (lex.token === Token.Dot) { + lex.next(); + lex.expect(Token.Word); + + prop.push(toAttr(lex.value)); + lex.next(); + } + + if (lex.token === Token.Op) { + const op = getAttrOp(lex.value); + lex.readAsWordUntil(Token.BracketClose); + + const value = getFromRawValue(lex.value); + current.push({ type: ATTR_BIN_NODE, prop, op, value }); + } else { + current.push({ + type: ATTR_EXISTS_NODE, + prop, + }); + } + + lex.expect(Token.BracketClose); + lex.next(); + continue; + } else if (lex.token === Token.Colon) { + lex.next(); + lex.expect(Token.Word); + + switch (lex.value) { + case "first-child": + current.push({ + type: PSEUDO_FIRST_CHILD, + }); + break; + case "last-child": + current.push({ + type: PSEUDO_LAST_CHILD, + }); + break; + case "nth-child": { + lex.next(); + lex.expect(Token.BraceOpen); + lex.next(); + + let mul = 1; + let repeat = false; + let step = 0; + if (lex.token === Token.Minus) { + mul = -1; + lex.next(); + } + + lex.expect(Token.Word); + const value = lex.getSlice(); + + if (value.endsWith("n")) { + repeat = true; + step = +value.slice(0, -1) * mul; + } else { + step = +value * mul; + } + + lex.next(); + + /** @type {PseudoNthChild} */ + const node = { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step, + stepOffset: 0, + repeat, + }; + current.push(node); + + if (lex.token === Token.Space) lex.next(); + + if (lex.token !== Token.BraceClose) { + if (lex.token === Token.Op) { + node.op = lex.value; + lex.next(); + + if (lex.token === Token.Space) lex.next(); + } else if (lex.token === Token.Minus) { + node.op = "-"; + lex.next(); + + if (lex.token === Token.Space) { + lex.next(); + } + } + + lex.expect(Token.Word); + node.stepOffset = +lex.value; + lex.next(); + + if (lex.token !== Token.BraceClose) { + lex.next(); // Space + + if (lex.token === Token.Word) { + if (/** @type {string} */ (lex.value) !== "of") { + throw new Error( + `Expected 'of' keyword in ':nth-child' but got: ${lex.value}`, + ); + } + + lex.next(); + lex.expect(Token.Space); + lex.next(); + throwOnComma = true; + stack.push([]); + } + + continue; + } + + lex.expect(Token.BraceClose); + } else if (!node.repeat) { + // :nth-child(2) -> step is actually stepOffset + node.stepOffset = node.step - 1; + node.step = 0; + } + + lex.next(); + + continue; + } + + case "has": + case "where": + case "is": { + lex.next(); + lex.expect(Token.BraceOpen); + lex.next(); + + current.push({ + type: PSEUDO_HAS, + selectors: [], + }); + stack.push([]); + + continue; + } + case "not": { + lex.next(); + lex.expect(Token.BraceOpen); + lex.next(); + + current.push({ + type: PSEUDO_NOT, + selectors: [], + }); + stack.push([]); + + continue; + } + default: + throw new Error(`Unknown pseudo selector: '${lex.value}'`); + } + } else if (lex.token === Token.Comma) { + if (throwOnComma) { + throw new Error(`Multiple selector arguments not supported here`); + } + + lex.next(); + if (lex.token === Token.Space) { + lex.next(); + } + + popSelector(result, stack); + stack.push([]); + continue; + } else if (lex.token === Token.BraceClose) { + throwOnComma = false; + popSelector(result, stack); + } else if (lex.token === Token.Op) { + current.push({ + type: RELATION_NODE, + op: getAttrOp(lex.value), + }); + } + + lex.next(); + } + + if (stack.length > 0) { + result.push(stack[0]); + } + + return result; +} + +/** + * @param {Selector[]} result + * @param {Selector[]} stack + */ +function popSelector(result, stack) { + const sel = /** @type {Selector} */ (stack.pop()); + + if (stack.length === 0) { + result.push(sel); + stack.push([]); + } else { + const prev = /** @type {Selector} */ (stack.at(-1)); + if (prev.length === 0) { + throw new Error(`Empty selector`); + } + + const node = prev.at(-1); + if (node === undefined) { + throw new Error(`Empty node`); + } + + if (node.type === PSEUDO_NTH_CHILD) { + node.of = sel; + } else if (node.type === PSEUDO_HAS || node.type === PSEUDO_NOT) { + node.selectors.push(sel); + } else { + throw new Error(`Multiple selectors not allowed here`); + } + } +} + +const TRUE_FN = () => { + return true; +}; + +/** + * @param {Selector} selector + * @returns {MatcherFn} + */ +export function compileSelector(selector) { + /** @type {MatcherFn} */ + let fn = TRUE_FN; + + for (let i = 0; i < selector.length; i++) { + const node = selector[i]; + + switch (node.type) { + case ELEM_NODE: + fn = matchElem(node, fn); + break; + case RELATION_NODE: + switch (node.op) { + case BinOp.Space: + fn = matchDescendant(fn); + break; + case BinOp.Greater: + fn = matchChild(fn); + break; + case BinOp.Plus: + fn = matchAdjacent(fn); + break; + case BinOp.Tilde: + fn = matchFollowing(fn); + break; + default: + throw new Error(`Unknown relation op ${node.op}`); + } + break; + case ATTR_EXISTS_NODE: + fn = matchAttrExists(node, fn); + break; + case ATTR_BIN_NODE: + fn = matchAttrBin(node, fn); + break; + case PSEUDO_FIRST_CHILD: + fn = matchFirstChild(fn); + break; + case PSEUDO_LAST_CHILD: + fn = matchLastChild(fn); + break; + case PSEUDO_NTH_CHILD: + fn = matchNthChild(node, fn); + break; + case PSEUDO_HAS: + // FIXME + // fn = matchIs(part, fn); + throw new Error("TODO: :has"); + case PSEUDO_NOT: + fn = matchNot(node.selectors, fn); + break; + default: + // @ts-ignore error handling + // deno-lint-ignore no-console + console.log(node); + throw new Error(`Unknown selector node`); + } + } + + return fn; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchFirstChild(next) { + return (ctx, id) => { + const parent = ctx.getParent(id); + const first = ctx.getFirstChild(parent); + return first === id && next(ctx, first); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchLastChild(next) { + return (ctx, id) => { + const parent = ctx.getParent(id); + const last = ctx.getLastChild(parent); + return last === id && next(ctx, id); + }; +} + +/** + * @param {PseudoNthChild} node + * @param {number} i + * @returns {number} + */ +function getNthAnB(node, i) { + const n = node.step * i; + + if (node.op === null) return n; + + switch (node.op) { + case "+": + return n + node.stepOffset; + case "-": + return n - node.stepOffset; + default: + throw new Error("Not supported nth-child operator: " + node.op); + } +} + +/** + * @param {PseudoNthChild} node + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchNthChild(node, next) { + const ofSelector = node.of !== null ? compileSelector(node.of) : TRUE_FN; + + // TODO(@marvinhagemeister): we should probably cache results here + + return (ctx, id) => { + const siblings = ctx.getSiblings(id); + const idx = siblings.indexOf(id); + + if (!node.repeat) { + return idx === node.stepOffset && next(ctx, id); + } + + for (let i = 0; i < siblings.length; i++) { + const n = getNthAnB(node, i); + + if (n > siblings.length - 1) return false; + + const search = siblings[n]; + if (id === search) { + if (node.of !== null && !ofSelector(ctx, id)) { + continue; + } else if (next(ctx, id)) { + return true; + } + } else if (n > idx) { + return false; + } + } + + return false; + }; +} + +/** + * @param {Selector[]} selectors + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchNot(selectors, next) { + /** @type {MatcherFn[]} */ + const compiled = []; + + for (let i = 0; i < selectors.length; i++) { + const sel = selectors[i]; + compiled.push(compileSelector(sel)); + } + + return (ctx, id) => { + for (let i = 0; i < compiled.length; i++) { + const fn = compiled[i]; + if (fn(ctx, id)) { + return false; + } + } + + return next(ctx, id); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchDescendant(next) { + // TODO(@marvinhagemeister): we should probably cache results here + return (ctx, id) => { + let current = ctx.getParent(id); + while (current > 0) { + if (next(ctx, current)) { + return true; + } + + current = ctx.getParent(current); + } + + return false; + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchChild(next) { + return (ctx, id) => { + const parent = ctx.getParent(id); + if (parent < 0) return false; + + return next(ctx, parent); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchAdjacent(next) { + return (ctx, id) => { + const siblings = ctx.getSiblings(id); + const idx = siblings.indexOf(id) - 1; + + if (idx < 0) return false; + + const prev = siblings[idx]; + return next(ctx, prev); + }; +} + +/** + * @param {NextFn} next + * @returns {MatcherFn} + */ +function matchFollowing(next) { + return (ctx, id) => { + const siblings = ctx.getSiblings(id); + const idx = siblings.indexOf(id) - 1; + + if (idx < 0) return false; + + for (let i = idx; i >= 0; i--) { + const sib = siblings[i]; + if (next(ctx, sib)) return true; + } + + return false; + }; +} + +/** + * @param {ElemSelector} part + * @param {MatcherFn} next + * @returns {MatcherFn} + */ +function matchElem(part, next) { + return (ctx, id) => { + // Placeholder node cannot be matched + if (id === 0) return false; + // Wildcard always matches + else if (part.wildcard) return next(ctx, id); + // 0 means it's the placeholder node which + // can never be matched. + else if (part.elem === 0) return false; + + const type = ctx.getType(id); + if (type > 0 && type === part.elem) return next(ctx, id); + + return false; + }; +} + +/** + * @param {AttrExists} attr + * @param {MatcherFn} next + * @returns {MatcherFn} + */ +function matchAttrExists(attr, next) { + return (ctx, id) => { + return ctx.hasAttrPath(id, attr.prop, 0) ? next(ctx, id) : false; + }; +} + +/** + * @param {AttrBin} attr + * @param {MatcherFn} next + * @returns {MatcherFn} + */ +function matchAttrBin(attr, next) { + return (ctx, id) => { + if (!ctx.hasAttrPath(id, attr.prop, 0)) return false; + const value = ctx.getAttrPathValue(id, attr.prop, 0); + if (!matchAttrValue(attr, value)) return false; + return next(ctx, id); + }; +} + +/** + * @param {AttrBin} attr + * @param {*} value + * @returns {boolean} + */ +function matchAttrValue(attr, value) { + switch (attr.op) { + case BinOp.Equal: + return value === attr.value; + case BinOp.NotEqual: + return value !== attr.value; + case BinOp.Greater: + return typeof value === "number" && typeof attr.value === "number" && + value > attr.value; + case BinOp.GreaterThan: + return typeof value === "number" && typeof attr.value === "number" && + value >= attr.value; + case BinOp.Less: + return typeof value === "number" && typeof attr.value === "number" && + value < attr.value; + case BinOp.LessThan: + return typeof value === "number" && typeof attr.value === "number" && + value <= attr.value; + default: + return false; + } +} diff --git a/cli/js/40_lint_types.d.ts b/cli/js/40_lint_types.d.ts new file mode 100644 index 0000000000..7b06e36098 --- /dev/null +++ b/cli/js/40_lint_types.d.ts @@ -0,0 +1,132 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +export interface NodeFacade { + type: string; + range: [number, number]; + [key: string]: unknown; +} + +export interface AstContext { + buf: Uint8Array; + strTable: Map; + strTableOffset: number; + rootOffset: number; + nodes: Map; + strByType: number[]; + strByProp: number[]; + typeByStr: Map; + propByStr: Map; + matcher: MatchContext; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface RuleContext { + id: string; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface LintRule { + create(ctx: RuleContext): Record void>; + destroy?(ctx: RuleContext): void; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface LintPlugin { + name: string; + rules: Record; +} + +export interface LintState { + plugins: LintPlugin[]; + installedPlugins: Set; +} + +export type VisitorFn = (node: unknown) => void; + +export interface CompiledVisitor { + matcher: (ctx: MatchContext, offset: number) => boolean; + info: { enter: VisitorFn; exit: VisitorFn }; +} + +export interface AttrExists { + type: 3; + prop: number[]; +} + +export interface AttrBin { + type: 4; + prop: number[]; + op: number; + // deno-lint-ignore no-explicit-any + value: any; +} + +export type AttrSelector = AttrExists | AttrBin; + +export interface ElemSelector { + type: 1; + wildcard: boolean; + elem: number; +} + +export interface PseudoNthChild { + type: 5; + op: string | null; + step: number; + stepOffset: number; + of: Selector | null; + repeat: boolean; +} + +export interface PseudoHas { + type: 6; + selectors: Selector[]; +} +export interface PseudoNot { + type: 7; + selectors: Selector[]; +} +export interface PseudoFirstChild { + type: 8; +} +export interface PseudoLastChild { + type: 9; +} + +export interface Relation { + type: 2; + op: number; +} + +export type Selector = Array< + | ElemSelector + | Relation + | AttrExists + | AttrBin + | PseudoNthChild + | PseudoNot + | PseudoHas + | PseudoFirstChild + | PseudoLastChild +>; + +export interface SelectorParseCtx { + root: Selector; + current: Selector; +} + +export interface MatchContext { + getFirstChild(id: number): number; + getLastChild(id: number): number; + getSiblings(id: number): number[]; + getParent(id: number): number; + getType(id: number): number; + hasAttrPath(id: number, propIds: number[], idx: number): boolean; + getAttrPathValue(id: number, propIds: number[], idx: number): unknown; +} + +export type NextFn = (ctx: MatchContext, id: number) => boolean; +export type MatcherFn = (ctx: MatchContext, id: number) => boolean; +export type TransformFn = (value: string) => number; + +export {}; diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 9f9cf14864..8fb3454bc8 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -36,6 +36,8 @@ use deno_semver::package::PackageNv; use deno_semver::package::PackageNvReference; use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; +use deno_semver::SmallStackString; +use deno_semver::StackString; use deno_semver::Version; use import_map::ImportMap; use node_resolver::NodeResolutionKind; @@ -278,9 +280,16 @@ impl<'a> TsResponseImportMapper<'a> { { let mut segments = jsr_path.split('/'); let name = if jsr_path.starts_with('@') { - format!("{}/{}", segments.next()?, segments.next()?) + let scope = segments.next()?; + let name = segments.next()?; + capacity_builder::StringBuilder::::build(|builder| { + builder.append(scope); + builder.append("/"); + builder.append(name); + }) + .unwrap() } else { - segments.next()?.to_string() + StackString::from(segments.next()?) }; let version = Version::parse_standard(segments.next()?).ok()?; let nv = PackageNv { name, version }; @@ -290,7 +299,9 @@ impl<'a> TsResponseImportMapper<'a> { &path, Some(&self.file_referrer), )?; - let sub_path = (export != ".").then_some(export); + let sub_path = (export != ".") + .then_some(export) + .map(SmallStackString::from_string); let mut req = None; req = req.or_else(|| { let import_map = self.maybe_import_map?; @@ -603,18 +614,24 @@ fn try_reverse_map_package_json_exports( /// For a set of tsc changes, can them for any that contain something that looks /// like an import and rewrite the import specifier to include the extension pub fn fix_ts_import_changes( - referrer: &ModuleSpecifier, - resolution_mode: ResolutionMode, changes: &[tsc::FileTextChanges], language_server: &language_server::Inner, ) -> Result, AnyError> { - let import_mapper = language_server.get_ts_response_import_mapper(referrer); let mut r = Vec::new(); for change in changes { + let Ok(referrer) = ModuleSpecifier::parse(&change.file_name) else { + continue; + }; + let referrer_doc = language_server.get_asset_or_document(&referrer).ok(); + let resolution_mode = referrer_doc + .as_ref() + .map(|d| d.resolution_mode()) + .unwrap_or(ResolutionMode::Import); + let import_mapper = + language_server.get_ts_response_import_mapper(&referrer); let mut text_changes = Vec::new(); for text_change in &change.text_changes { let lines = text_change.new_text.split('\n'); - let new_lines: Vec = lines .map(|line| { // This assumes that there's only one import per line. @@ -622,7 +639,7 @@ pub fn fix_ts_import_changes( let specifier = captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); if let Some(new_specifier) = import_mapper - .check_unresolved_specifier(specifier, referrer, resolution_mode) + .check_unresolved_specifier(specifier, &referrer, resolution_mode) { line.replace(specifier, &new_specifier) } else { diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index fbf9ea6f1b..c6d1b39ef4 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -11,6 +11,7 @@ use crate::lsp::logging::lsp_warn; use deno_core::url::Url; use deno_core::ModuleSpecifier; use deno_path_util::url_to_file_path; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use std::collections::BTreeMap; use std::fs; use std::path::Path; @@ -94,8 +95,8 @@ impl LspCache { let deno_dir = DenoDir::new(global_cache_path) .expect("should be infallible with absolute custom root"); let global = Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), deno_dir.remote_folder_path(), - crate::cache::RealDenoCacheEnv, )); Self { deno_dir, diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index e3148f22db..27ca6638a9 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -9,8 +9,6 @@ use deno_config::deno_json::LintConfig; use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::TestConfig; use deno_config::deno_json::TsConfig; -use deno_config::fs::DenoConfigFs; -use deno_config::fs::RealDenoConfigFs; use deno_config::glob::FilePatterns; use deno_config::glob::PathOrPatternSet; use deno_config::workspace::CreateResolverOptions; @@ -38,10 +36,10 @@ use deno_lint::linter::LintConfig as DenoLintConfig; use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonCache; use deno_path_util::url_to_file_path; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; use lsp_types::ClientCapabilities; -use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::HashMap; @@ -1220,7 +1218,6 @@ impl ConfigData { settings: &Settings, file_fetcher: &Arc, // sync requirement is because the lsp requires sync - cached_deno_config_fs: &(dyn DenoConfigFs + Sync), deno_json_cache: &(dyn DenoJsonCache + Sync), pkg_json_cache: &(dyn PackageJsonCache + Sync), workspace_cache: &(dyn WorkspaceCache + Sync), @@ -1230,6 +1227,7 @@ impl ConfigData { Ok(scope_dir_path) => { let paths = [scope_dir_path]; WorkspaceDirectory::discover( + &FsSysTraitsAdapter::new_real(), match specified_config { Some(config_path) => { deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( @@ -1241,7 +1239,6 @@ impl ConfigData { } }, &WorkspaceDiscoverOptions { - fs: cached_deno_config_fs, additional_config_file_names: &[], deno_json_cache: Some(deno_json_cache), pkg_json_cache: Some(pkg_json_cache), @@ -1620,9 +1617,9 @@ impl ConfigData { || unstable.contains("sloppy-imports"); let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new_without_stat_cache(Arc::new( - deno_runtime::deno_fs::RealFs, - )), + SloppyImportsCachedFs::new_without_stat_cache( + FsSysTraitsAdapter::new_real(), + ), )) }); let resolver = Arc::new(resolver); @@ -1841,7 +1838,6 @@ impl ConfigTree { // since we're resolving a workspace multiple times in different // folders, we want to cache all the lookups and config files across // ConfigData::load calls - let cached_fs = CachedDenoConfigFs::default(); let deno_json_cache = DenoJsonMemCache::default(); let pkg_json_cache = PackageJsonMemCache::default(); let workspace_cache = WorkspaceMemCache::default(); @@ -1866,7 +1862,6 @@ impl ConfigTree { folder_uri, settings, file_fetcher, - &cached_fs, &deno_json_cache, &pkg_json_cache, &workspace_cache, @@ -1897,7 +1892,6 @@ impl ConfigTree { &scope, settings, file_fetcher, - &cached_fs, &deno_json_cache, &pkg_json_cache, &workspace_cache, @@ -1914,7 +1908,6 @@ impl ConfigTree { member_scope, settings, file_fetcher, - &cached_fs, &deno_json_cache, &pkg_json_cache, &workspace_cache, @@ -1931,7 +1924,7 @@ impl ConfigTree { pub async fn inject_config_file(&mut self, config_file: ConfigFile) { let scope = config_file.specifier.join(".").unwrap(); let json_text = serde_json::to_string(&config_file.json).unwrap(); - let test_fs = deno_runtime::deno_fs::InMemoryFs::default(); + let test_fs = Arc::new(deno_runtime::deno_fs::InMemoryFs::default()); let config_path = url_to_file_path(&config_file.specifier).unwrap(); test_fs.setup_text_files(vec![( config_path.to_string_lossy().to_string(), @@ -1939,11 +1932,11 @@ impl ConfigTree { )]); let workspace_dir = Arc::new( WorkspaceDirectory::discover( + &FsSysTraitsAdapter(test_fs.clone()), deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( &config_path, ), &deno_config::workspace::WorkspaceDiscoverOptions { - fs: &crate::args::deno_json::DenoConfigFsAdapter(&test_fs), ..Default::default() }, ) @@ -2077,78 +2070,6 @@ impl deno_config::workspace::WorkspaceCache for WorkspaceMemCache { } } -#[derive(Default)] -struct CachedFsItems { - items: HashMap>, -} - -impl CachedFsItems { - pub fn get( - &mut self, - path: &Path, - action: impl FnOnce(&Path) -> Result, - ) -> Result { - let value = if let Some(value) = self.items.get(path) { - value - } else { - let value = action(path); - // just in case this gets really large for some reason - if self.items.len() == 16_384 { - return value; - } - self.items.insert(path.to_owned(), value); - self.items.get(path).unwrap() - }; - value - .as_ref() - .map(|v| (*v).clone()) - .map_err(|e| std::io::Error::new(e.kind(), e.to_string())) - } -} - -#[derive(Default)] -struct InnerData { - stat_calls: CachedFsItems, - read_to_string_calls: CachedFsItems>, -} - -#[derive(Default)] -struct CachedDenoConfigFs(Mutex); - -impl DenoConfigFs for CachedDenoConfigFs { - fn stat_sync( - &self, - path: &Path, - ) -> Result { - self - .0 - .lock() - .stat_calls - .get(path, |path| RealDenoConfigFs.stat_sync(path)) - } - - fn read_to_string_lossy( - &self, - path: &Path, - ) -> Result, std::io::Error> { - self - .0 - .lock() - .read_to_string_calls - .get(path, |path| RealDenoConfigFs.read_to_string_lossy(path)) - } - - fn read_dir( - &self, - path: &Path, - ) -> Result, std::io::Error> { - // no need to cache these because the workspace cache will ensure - // we only do read_dir calls once (read_dirs are only used for - // npm workspace resolution) - RealDenoConfigFs.read_dir(path) - } -} - #[cfg(test)] mod tests { use deno_config::deno_json::ConfigParseOptions; diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 804cebfb9b..33fd4897c4 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -48,7 +48,7 @@ use deno_graph::SpecifierError; use deno_lint::linter::LintConfig as DenoLintConfig; use deno_resolver::sloppy_imports::SloppyImportsResolution; use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; -use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node; use deno_runtime::tokio_util::create_basic_runtime; use deno_semver::jsr::JsrPackageReqReference; @@ -1281,7 +1281,7 @@ impl DenoDiagnostic { Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("npm package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))), Self::NoLocal(specifier) => { let maybe_sloppy_resolution = CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs)) + SloppyImportsCachedFs::new(FsSysTraitsAdapter::new_real()) ).resolve(specifier, SloppyImportsResolutionKind::Execution); let data = maybe_sloppy_resolution.as_ref().map(|res| { json!({ diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index bdb64c9da3..d15cfe5a6c 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -251,6 +251,13 @@ impl AssetOrDocument { pub fn document_lsp_version(&self) -> Option { self.document().and_then(|d| d.maybe_lsp_version()) } + + pub fn resolution_mode(&self) -> ResolutionMode { + match self { + AssetOrDocument::Asset(_) => ResolutionMode::Import, + AssetOrDocument::Document(d) => d.resolution_mode(), + } + } } type ModuleResult = Result; diff --git a/cli/lsp/jsr.rs b/cli/lsp/jsr.rs index 1d012b42f0..fc30de2ae0 100644 --- a/cli/lsp/jsr.rs +++ b/cli/lsp/jsr.rs @@ -18,6 +18,7 @@ use deno_graph::ModuleSpecifier; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::Version; use serde::Deserialize; use std::collections::HashMap; @@ -33,8 +34,8 @@ pub struct JsrCacheResolver { /// The `module_graph` fields of the version infos should be forcibly absent. /// It can be large and we don't want to store it. info_by_nv: DashMap>>, - info_by_name: DashMap>>, - workspace_scope_by_name: HashMap, + info_by_name: DashMap>>, + workspace_scope_by_name: HashMap, cache: Arc, } @@ -59,7 +60,7 @@ impl JsrCacheResolver { continue; }; let nv = PackageNv { - name: jsr_pkg_config.name.clone(), + name: jsr_pkg_config.name.as_str().into(), version: version.clone(), }; info_by_name.insert( @@ -125,8 +126,8 @@ impl JsrCacheResolver { return nv.value().clone(); } let maybe_get_nv = || { - let name = req.name.clone(); - let package_info = self.package_info(&name)?; + let name = &req.name; + let package_info = self.package_info(name)?; // Find the first matching version of the package which is cached. let mut versions = package_info.versions.keys().collect::>(); versions.sort(); @@ -144,7 +145,10 @@ impl JsrCacheResolver { self.package_version_info(&nv).is_some() }) .cloned()?; - Some(PackageNv { name, version }) + Some(PackageNv { + name: name.clone(), + version, + }) }; let nv = maybe_get_nv(); self.nv_by_req.insert(req.clone(), nv.clone()); @@ -216,7 +220,10 @@ impl JsrCacheResolver { None } - pub fn package_info(&self, name: &str) -> Option> { + pub fn package_info( + &self, + name: &StackString, + ) -> Option> { if let Some(info) = self.info_by_name.get(name) { return info.value().clone(); } @@ -226,7 +233,7 @@ impl JsrCacheResolver { serde_json::from_slice::(&meta_bytes).ok() }; let info = read_cached_package_info().map(Arc::new); - self.info_by_name.insert(name.to_string(), info.clone()); + self.info_by_name.insert(name.clone(), info.clone()); info } diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index c7c3c07e07..372ad98e2f 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -17,6 +17,7 @@ use deno_core::ModuleSpecifier; use deno_graph::GraphKind; use deno_graph::Resolution; use deno_path_util::url_to_file_path; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_semver::jsr::JsrPackageReqReference; @@ -279,7 +280,7 @@ impl LanguageServer { .await?; graph_util::graph_valid( &graph, - factory.fs(), + &FsSysTraitsAdapter(factory.fs().clone()), &roots, graph_util::GraphValidOptions { kind: GraphKind::All, @@ -1855,20 +1856,12 @@ impl Inner { } let changes = if code_action_data.fix_id == "fixMissingImport" { - fix_ts_import_changes( - &code_action_data.specifier, - maybe_asset_or_doc - .as_ref() - .and_then(|d| d.document()) - .map(|d| d.resolution_mode()) - .unwrap_or(ResolutionMode::Import), - &combined_code_actions.changes, - self, - ) - .map_err(|err| { - error!("Unable to remap changes: {:#}", err); - LspError::internal_error() - })? + fix_ts_import_changes(&combined_code_actions.changes, self).map_err( + |err| { + error!("Unable to remap changes: {:#}", err); + LspError::internal_error() + }, + )? } else { combined_code_actions.changes }; @@ -1912,20 +1905,16 @@ impl Inner { asset_or_doc.scope().cloned(), ) .await?; - if kind_suffix == ".rewrite.function.returnType" { - refactor_edit_info.edits = fix_ts_import_changes( - &action_data.specifier, - asset_or_doc - .document() - .map(|d| d.resolution_mode()) - .unwrap_or(ResolutionMode::Import), - &refactor_edit_info.edits, - self, - ) - .map_err(|err| { - error!("Unable to remap changes: {:#}", err); - LspError::internal_error() - })? + if kind_suffix == ".rewrite.function.returnType" + || kind_suffix == ".move.newFile" + { + refactor_edit_info.edits = + fix_ts_import_changes(&refactor_edit_info.edits, self).map_err( + |err| { + error!("Unable to remap changes: {:#}", err); + LspError::internal_error() + }, + )? } code_action.edit = refactor_edit_info.to_workspace_edit(self)?; code_action @@ -3624,11 +3613,11 @@ impl Inner { let workspace = match config_data { Some(d) => d.member_dir.clone(), None => Arc::new(WorkspaceDirectory::discover( + &FsSysTraitsAdapter::new_real(), deno_config::workspace::WorkspaceDiscoverStart::Paths(&[ initial_cwd.clone() ]), &WorkspaceDiscoverOptions { - fs: Default::default(), // use real fs, deno_json_cache: None, pkg_json_cache: None, workspace_cache: None, @@ -3794,7 +3783,7 @@ impl Inner { for (name, command) in scripts { result.push(TaskDefinition { name: name.clone(), - command: command.clone(), + command: Some(command.clone()), source_uri: url_to_uri(&package_json.specifier()) .map_err(|_| LspError::internal_error())?, }); diff --git a/cli/lsp/lsp_custom.rs b/cli/lsp/lsp_custom.rs index 74c6aca88b..8df4ba1d07 100644 --- a/cli/lsp/lsp_custom.rs +++ b/cli/lsp/lsp_custom.rs @@ -14,7 +14,7 @@ pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str = #[serde(rename_all = "camelCase")] pub struct TaskDefinition { pub name: String, - pub command: String, + pub command: Option, pub source_uri: lsp::Uri, } diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index 067f201829..488e333e9d 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -32,6 +32,7 @@ use deno_core::url::Position; use deno_core::url::Url; use deno_core::ModuleSpecifier; use deno_graph::Dependency; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use log::error; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -430,8 +431,8 @@ impl ModuleRegistry { ) -> Self { // the http cache should always be the global one for registry completions let http_cache = Arc::new(GlobalHttpCache::new( + FsSysTraitsAdapter::new_real(), location.clone(), - crate::cache::RealDenoCacheEnv, )); let file_fetcher = CliFileFetcher::new( http_cache.clone(), diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 7948fdec7b..5295b2d41c 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -19,9 +19,10 @@ use deno_resolver::cjs::IsCjsResolutionMode; use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::DenoResolverOptions; use deno_resolver::NodeAndNpmReqResolver; -use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJsonResolver; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; @@ -42,7 +43,6 @@ use super::jsr::JsrCacheResolver; use crate::args::create_default_npmrc; use crate::args::CliLockfile; use crate::args::NpmInstallDepsProvider; -use crate::cache::DenoCacheEnvFsAdapter; use crate::factory::Deferred; use crate::graph_util::to_node_resolution_kind; use crate::graph_util::to_node_resolution_mode; @@ -61,7 +61,6 @@ use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::npm::ManagedCliNpmResolver; use crate::resolver::CliDenoResolver; -use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; use crate::resolver::CliResolverOptions; @@ -599,21 +598,19 @@ struct ResolverFactoryServices { struct ResolverFactory<'a> { config_data: Option<&'a Arc>, - fs: Arc, pkg_json_resolver: Arc, + sys: FsSysTraitsAdapter, services: ResolverFactoryServices, } impl<'a> ResolverFactory<'a> { pub fn new(config_data: Option<&'a Arc>) -> Self { - let fs = Arc::new(deno_fs::RealFs); - let pkg_json_resolver = Arc::new(PackageJsonResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), - )); + let sys = FsSysTraitsAdapter::new_real(); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new(sys.clone())); Self { config_data, - fs, pkg_json_resolver, + sys, services: Default::default(), } } @@ -624,9 +621,10 @@ impl<'a> ResolverFactory<'a> { cache: &LspCache, ) { let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false); + let sys = FsSysTraitsAdapter::new_real(); let options = if enable_byonm { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), + sys, pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.config_data.and_then(|config_data| { config_data.node_modules_dir.clone().or_else(|| { @@ -642,12 +640,14 @@ impl<'a> ResolverFactory<'a> { .and_then(|d| d.npmrc.clone()) .unwrap_or_else(create_default_npmrc); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(self.fs.as_ref()), + &sys, cache.deno_dir().npm_folder_path(), npmrc.get_all_known_registries_urls(), )); CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions { http_client_provider: http_client_provider.clone(), + // only used for top level install, so we can ignore this + npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) { Some(lockfile) => { CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( @@ -656,10 +656,7 @@ impl<'a> ResolverFactory<'a> { } None => CliNpmResolverManagedSnapshotOption::Specified(None), }, - // Don't provide the lockfile. We don't want these resolvers - // updating it. Only the cache request should update the lockfile. - maybe_lockfile: None, - fs: Arc::new(deno_fs::RealFs), + sys: FsSysTraitsAdapter::new_real(), npm_cache_dir, // Use an "only" cache setting in order to make the // user do an explicit "cache" command and prevent @@ -667,11 +664,12 @@ impl<'a> ResolverFactory<'a> { // the user is typing. cache_setting: CacheSetting::Only, text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly), + // Don't provide the lockfile. We don't want these resolvers + // updating it. Only the cache request should update the lockfile. + maybe_lockfile: None, maybe_node_modules_path: self .config_data .and_then(|d| d.node_modules_dir.clone()), - // only used for top level install, so we can ignore this - npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), npmrc, npm_system_info: NpmSystemInfo::default(), lifecycle_scripts: Default::default(), @@ -779,10 +777,11 @@ impl<'a> ResolverFactory<'a> { .get_or_init(|| { let npm_resolver = self.services.npm_resolver.as_ref()?; Some(Arc::new(NodeResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(self.fs.clone()), self.in_npm_pkg_checker().clone(), + RealIsBuiltInNodeModuleChecker, npm_resolver.clone().into_npm_pkg_folder_resolver(), self.pkg_json_resolver.clone(), + self.sys.clone(), ))) }) .as_ref() @@ -797,10 +796,10 @@ impl<'a> ResolverFactory<'a> { let npm_resolver = self.npm_resolver()?; Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), - fs: CliDenoResolverFs(self.fs.clone()), in_npm_pkg_checker: self.in_npm_pkg_checker().clone(), node_resolver: node_resolver.clone(), npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), + sys: self.sys.clone(), }))) }) .as_ref() diff --git a/cli/lsp/search.rs b/cli/lsp/search.rs index 8933eeb186..c98acde6f1 100644 --- a/cli/lsp/search.rs +++ b/cli/lsp/search.rs @@ -67,7 +67,9 @@ pub mod tests { &self, nv: &PackageNv, ) -> Result>, AnyError> { - let Some(exports_by_version) = self.package_versions.get(&nv.name) else { + let Some(exports_by_version) = + self.package_versions.get(nv.name.as_str()) + else { return Err(anyhow!("Package not found.")); }; let Some(exports) = exports_by_version.get(&nv.version) else { diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 93299a28a0..1c7f20d11e 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -11,37 +11,6 @@ use std::sync::atomic::AtomicU16; use std::sync::atomic::Ordering; use std::sync::Arc; -use crate::args::jsr_url; -use crate::args::CliLockfile; -use crate::args::CliOptions; -use crate::args::DenoSubcommand; -use crate::args::TsTypeLib; -use crate::cache::CodeCache; -use crate::cache::FastInsecureHasher; -use crate::cache::ParsedSourceCache; -use crate::emit::Emitter; -use crate::graph_container::MainModuleGraphContainer; -use crate::graph_container::ModuleGraphContainer; -use crate::graph_container::ModuleGraphUpdatePermit; -use crate::graph_util::CreateGraphOptions; -use crate::graph_util::ModuleGraphBuilder; -use crate::node; -use crate::node::CliNodeCodeTranslator; -use crate::npm::CliNpmResolver; -use crate::resolver::CjsTracker; -use crate::resolver::CliNpmReqResolver; -use crate::resolver::CliResolver; -use crate::resolver::ModuleCodeStringSource; -use crate::resolver::NotSupportedKindInNpmError; -use crate::resolver::NpmModuleLoader; -use crate::tools::check; -use crate::tools::check::MaybeDiagnostics; -use crate::tools::check::TypeChecker; -use crate::util::progress_bar::ProgressBar; -use crate::util::text_encoding::code_without_source_map; -use crate::util::text_encoding::source_map_from_code; -use crate::worker::CreateModuleLoaderResult; -use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; use deno_ast::ModuleKind; use deno_core::anyhow::anyhow; @@ -70,7 +39,7 @@ use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_graph::WasmModule; use deno_runtime::code_cache; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; @@ -80,6 +49,38 @@ use node_resolver::errors::ClosestPkgJsonError; use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; +use sys_traits::FsRead; + +use crate::args::jsr_url; +use crate::args::CliLockfile; +use crate::args::CliOptions; +use crate::args::DenoSubcommand; +use crate::args::TsTypeLib; +use crate::cache::CodeCache; +use crate::cache::FastInsecureHasher; +use crate::cache::ParsedSourceCache; +use crate::emit::Emitter; +use crate::graph_container::MainModuleGraphContainer; +use crate::graph_container::ModuleGraphContainer; +use crate::graph_container::ModuleGraphUpdatePermit; +use crate::graph_util::CreateGraphOptions; +use crate::graph_util::ModuleGraphBuilder; +use crate::node::CliNodeCodeTranslator; +use crate::npm::CliNpmResolver; +use crate::resolver::CjsTracker; +use crate::resolver::CliNpmReqResolver; +use crate::resolver::CliResolver; +use crate::resolver::ModuleCodeStringSource; +use crate::resolver::NotSupportedKindInNpmError; +use crate::resolver::NpmModuleLoader; +use crate::tools::check; +use crate::tools::check::MaybeDiagnostics; +use crate::tools::check::TypeChecker; +use crate::util::progress_bar::ProgressBar; +use crate::util::text_encoding::code_without_source_map; +use crate::util::text_encoding::source_map_from_code; +use crate::worker::CreateModuleLoaderResult; +use crate::worker::ModuleLoaderFactory; pub struct ModuleLoadPreparer { options: Arc, @@ -216,7 +217,6 @@ struct SharedCliModuleLoaderState { cjs_tracker: Arc, code_cache: Option>, emitter: Arc, - fs: Arc, in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, @@ -227,6 +227,7 @@ struct SharedCliModuleLoaderState { npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc, resolver: Arc, + sys: FsSysTraitsAdapter, in_flight_loads_tracker: InFlightModuleLoadsTracker, } @@ -276,7 +277,6 @@ impl CliModuleLoaderFactory { cjs_tracker: Arc, code_cache: Option>, emitter: Arc, - fs: Arc, in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, @@ -287,6 +287,7 @@ impl CliModuleLoaderFactory { npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc, resolver: Arc, + sys: FsSysTraitsAdapter, ) -> Self { Self { shared: Arc::new(SharedCliModuleLoaderState { @@ -302,7 +303,6 @@ impl CliModuleLoaderFactory { cjs_tracker, code_cache, emitter, - fs, in_npm_pkg_checker, main_module_graph_container, module_load_preparer, @@ -313,6 +313,7 @@ impl CliModuleLoaderFactory { npm_module_loader, parsed_source_cache, resolver, + sys, in_flight_loads_tracker: InFlightModuleLoadsTracker { loads_number: Arc::new(AtomicU16::new(0)), cleanup_task_timeout: 10_000, @@ -345,7 +346,7 @@ impl CliModuleLoaderFactory { let node_require_loader = Rc::new(CliNodeRequireLoader { cjs_tracker: self.shared.cjs_tracker.clone(), emitter: self.shared.emitter.clone(), - fs: self.shared.fs.clone(), + sys: self.shared.sys.clone(), graph_container, in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(), npm_resolver: self.shared.npm_resolver.clone(), @@ -594,9 +595,9 @@ impl Some(Module::Json(module)) => module.specifier.clone(), Some(Module::Wasm(module)) => module.specifier.clone(), Some(Module::External(module)) => { - node::resolve_specifier_into_node_modules( + node_resolver::resolve_specifier_into_node_modules( + &self.shared.sys, &module.specifier, - self.shared.fs.as_ref(), ) } None => specifier.into_owned(), @@ -997,7 +998,7 @@ impl ModuleLoader std::future::ready(()).boxed_local() } - fn get_source_map(&self, file_name: &str) -> Option> { + fn get_source_map(&self, file_name: &str) -> Option> { let specifier = resolve_url(file_name).ok()?; match specifier.scheme() { // we should only be looking for emits for schemes that denote external @@ -1009,7 +1010,7 @@ impl ModuleLoader .0 .load_prepared_module_for_source_map_sync(&specifier) .ok()??; - source_map_from_code(source.code.as_bytes()) + source_map_from_code(source.code.as_bytes()).map(Cow::Owned) } fn get_source_mapped_source_line( @@ -1092,7 +1093,7 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { struct CliNodeRequireLoader { cjs_tracker: Arc, emitter: Arc, - fs: Arc, + sys: FsSysTraitsAdapter, graph_container: TGraphContainer, in_npm_pkg_checker: Arc, npm_resolver: Arc, @@ -1121,7 +1122,7 @@ impl NodeRequireLoader ) -> Result, AnyError> { // todo(dsherret): use the preloaded module from the graph if available? let media_type = MediaType::from_path(path); - let text = self.fs.read_text_file_lossy_sync(path, None)?; + let text = self.sys.fs_read_to_string_lossy(path)?; if media_type.is_emittable() { let specifier = deno_path_util::url_from_file_path(path)?; if self.in_npm_pkg_checker.in_npm_package(&specifier) { diff --git a/cli/node.rs b/cli/node.rs index 11959df6b9..480da506c8 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -8,7 +8,8 @@ use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_graph::ParsedSourceStore; use deno_runtime::deno_fs; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use deno_runtime::deno_fs::FsSysTraitsAdapter; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use node_resolver::analyze::CjsAnalysisExports; use node_resolver::analyze::CjsCodeAnalyzer; @@ -21,23 +22,11 @@ use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; use crate::resolver::CjsTracker; -pub type CliNodeCodeTranslator = - NodeCodeTranslator; - -/// Resolves a specifier that is pointing into a node_modules folder. -/// -/// Note: This should be called whenever getting the specifier from -/// a Module::External(module) reference because that module might -/// not be fully resolved at the time deno_graph is analyzing it -/// because the node_modules folder might not exist at that time. -pub fn resolve_specifier_into_node_modules( - specifier: &ModuleSpecifier, - fs: &dyn deno_fs::FileSystem, -) -> ModuleSpecifier { - node_resolver::resolve_specifier_into_node_modules(specifier, &|path| { - fs.realpath_sync(path).map_err(|err| err.into_io_error()) - }) -} +pub type CliNodeCodeTranslator = NodeCodeTranslator< + CliCjsCodeAnalyzer, + RealIsBuiltInNodeModuleChecker, + FsSysTraitsAdapter, +>; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum CliCjsAnalysis { diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs index eca399251b..218f33989d 100644 --- a/cli/npm/byonm.rs +++ b/cli/npm/byonm.rs @@ -9,22 +9,20 @@ use deno_core::serde_json; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolverCreateOptions; use deno_resolver::npm::CliNpmReqResolver; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use node_resolver::NpmPackageFolderResolver; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; -use crate::resolver::CliDenoResolverFs; use super::CliNpmResolver; use super::InnerCliNpmResolverRef; pub type CliByonmNpmResolverCreateOptions = - ByonmNpmResolverCreateOptions; -pub type CliByonmNpmResolver = - ByonmNpmResolver; + ByonmNpmResolverCreateOptions; +pub type CliByonmNpmResolver = ByonmNpmResolver; // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. #[derive(Debug)] diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index 4545800e99..5b0a304de8 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -21,9 +21,10 @@ use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; use deno_npm_cache::NpmCacheSetting; +use deno_path_util::fs::canonicalize_path_maybe_not_exists; use deno_resolver::npm::CliNpmReqResolver; use deno_runtime::colors; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; @@ -41,7 +42,6 @@ use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; use crate::args::PackageJsonDepValueParseWithLocationError; use crate::cache::FastInsecureHasher; -use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::progress_bar::ProgressBar; use crate::util::sync::AtomicFlag; @@ -50,7 +50,7 @@ use self::resolvers::create_npm_fs_resolver; use self::resolvers::NpmPackageFsResolver; use super::CliNpmCache; -use super::CliNpmCacheEnv; +use super::CliNpmCacheHttpClient; use super::CliNpmRegistryInfoProvider; use super::CliNpmResolver; use super::CliNpmTarballCache; @@ -68,9 +68,9 @@ pub enum CliNpmResolverManagedSnapshotOption { pub struct CliManagedNpmResolverCreateOptions { pub snapshot: CliNpmResolverManagedSnapshotOption, pub maybe_lockfile: Option>, - pub fs: Arc, pub http_client_provider: Arc, pub npm_cache_dir: Arc, + pub sys: FsSysTraitsAdapter, pub cache_setting: deno_cache_dir::file_fetcher::CacheSetting, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub maybe_node_modules_path: Option, @@ -83,9 +83,12 @@ pub struct CliManagedNpmResolverCreateOptions { pub async fn create_managed_npm_resolver_for_lsp( options: CliManagedNpmResolverCreateOptions, ) -> Arc { - let cache_env = create_cache_env(&options); - let npm_cache = create_cache(cache_env.clone(), &options); - let npm_api = create_api(npm_cache.clone(), cache_env.clone(), &options); + let npm_cache = create_cache(&options); + let http_client = Arc::new(CliNpmCacheHttpClient::new( + options.http_client_provider.clone(), + options.text_only_progress_bar.clone(), + )); + let npm_api = create_api(npm_cache.clone(), http_client.clone(), &options); // spawn due to the lsp's `Send` requirement deno_core::unsync::spawn(async move { let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await { @@ -96,14 +99,14 @@ pub async fn create_managed_npm_resolver_for_lsp( } }; create_inner( - cache_env, - options.fs, - options.maybe_lockfile, - npm_api, + http_client, npm_cache, - options.npmrc, options.npm_install_deps_provider, + npm_api, + options.sys, options.text_only_progress_bar, + options.maybe_lockfile, + options.npmrc, options.maybe_node_modules_path, options.npm_system_info, snapshot, @@ -117,19 +120,22 @@ pub async fn create_managed_npm_resolver_for_lsp( pub async fn create_managed_npm_resolver( options: CliManagedNpmResolverCreateOptions, ) -> Result, AnyError> { - let npm_cache_env = create_cache_env(&options); - let npm_cache = create_cache(npm_cache_env.clone(), &options); - let api = create_api(npm_cache.clone(), npm_cache_env.clone(), &options); + let npm_cache = create_cache(&options); + let http_client = Arc::new(CliNpmCacheHttpClient::new( + options.http_client_provider.clone(), + options.text_only_progress_bar.clone(), + )); + let api = create_api(npm_cache.clone(), http_client.clone(), &options); let snapshot = resolve_snapshot(&api, options.snapshot).await?; Ok(create_inner( - npm_cache_env, - options.fs, - options.maybe_lockfile, - api, + http_client, npm_cache, - options.npmrc, options.npm_install_deps_provider, + api, + options.sys, options.text_only_progress_bar, + options.maybe_lockfile, + options.npmrc, options.maybe_node_modules_path, options.npm_system_info, snapshot, @@ -139,14 +145,14 @@ pub async fn create_managed_npm_resolver( #[allow(clippy::too_many_arguments)] fn create_inner( - env: Arc, - fs: Arc, - maybe_lockfile: Option>, - registry_info_provider: Arc, + http_client: Arc, npm_cache: Arc, - npm_rc: Arc, npm_install_deps_provider: Arc, + registry_info_provider: Arc, + sys: FsSysTraitsAdapter, text_only_progress_bar: crate::util::progress_bar::ProgressBar, + maybe_lockfile: Option>, + npm_rc: Arc, node_modules_dir_path: Option, npm_system_info: NpmSystemInfo, snapshot: Option, @@ -159,28 +165,29 @@ fn create_inner( )); let tarball_cache = Arc::new(CliNpmTarballCache::new( npm_cache.clone(), - env, + http_client, + sys.clone(), npm_rc.clone(), )); let fs_resolver = create_npm_fs_resolver( - fs.clone(), npm_cache.clone(), &npm_install_deps_provider, &text_only_progress_bar, resolution.clone(), + sys.clone(), tarball_cache.clone(), node_modules_dir_path, npm_system_info.clone(), lifecycle_scripts.clone(), ); Arc::new(ManagedCliNpmResolver::new( - fs, fs_resolver, maybe_lockfile, registry_info_provider, npm_cache, npm_install_deps_provider, resolution, + sys, tarball_cache, text_only_progress_bar, npm_system_info, @@ -188,36 +195,25 @@ fn create_inner( )) } -fn create_cache_env( - options: &CliManagedNpmResolverCreateOptions, -) -> Arc { - Arc::new(CliNpmCacheEnv::new( - options.fs.clone(), - options.http_client_provider.clone(), - options.text_only_progress_bar.clone(), - )) -} - fn create_cache( - env: Arc, options: &CliManagedNpmResolverCreateOptions, ) -> Arc { Arc::new(CliNpmCache::new( options.npm_cache_dir.clone(), + options.sys.clone(), NpmCacheSetting::from_cache_setting(&options.cache_setting), - env, options.npmrc.clone(), )) } fn create_api( cache: Arc, - env: Arc, + http_client: Arc, options: &CliManagedNpmResolverCreateOptions, ) -> Arc { Arc::new(CliNpmRegistryInfoProvider::new( cache, - env, + http_client, options.npmrc.clone(), )) } @@ -306,12 +302,12 @@ pub enum PackageCaching<'a> { /// An npm resolver where the resolution is managed by Deno rather than /// the user bringing their own node_modules (BYONM) on the file system. pub struct ManagedCliNpmResolver { - fs: Arc, fs_resolver: Arc, maybe_lockfile: Option>, registry_info_provider: Arc, npm_cache: Arc, npm_install_deps_provider: Arc, + sys: FsSysTraitsAdapter, resolution: Arc, tarball_cache: Arc, text_only_progress_bar: ProgressBar, @@ -331,20 +327,19 @@ impl std::fmt::Debug for ManagedCliNpmResolver { impl ManagedCliNpmResolver { #[allow(clippy::too_many_arguments)] pub fn new( - fs: Arc, fs_resolver: Arc, maybe_lockfile: Option>, registry_info_provider: Arc, npm_cache: Arc, npm_install_deps_provider: Arc, resolution: Arc, + sys: FsSysTraitsAdapter, tarball_cache: Arc, text_only_progress_bar: ProgressBar, npm_system_info: NpmSystemInfo, lifecycle_scripts: LifecycleScriptsConfig, ) -> Self { Self { - fs, fs_resolver, maybe_lockfile, registry_info_provider, @@ -352,6 +347,7 @@ impl ManagedCliNpmResolver { npm_install_deps_provider, text_only_progress_bar, resolution, + sys, tarball_cache, npm_system_info, top_level_install_flag: Default::default(), @@ -364,8 +360,7 @@ impl ManagedCliNpmResolver { pkg_id: &NpmPackageId, ) -> Result { let path = self.fs_resolver.package_folder(pkg_id)?; - let path = - canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())?; + let path = canonicalize_path_maybe_not_exists(&self.sys, &path)?; log::debug!( "Resolved package folder of {} to {}", pkg_id.as_serialized(), @@ -560,11 +555,11 @@ impl ManagedCliNpmResolver { &self, ) -> Result<(), Box> { for err in self.npm_install_deps_provider.pkg_json_dep_errors() { - match &err.source { - deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => { + match err.source.as_kind() { + deno_package_json::PackageJsonDepValueParseErrorKind::VersionReq(_) => { return Err(Box::new(err.clone())); } - deno_package_json::PackageJsonDepValueParseError::Unsupported { + deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported { .. } => { // only warn for this one @@ -667,12 +662,13 @@ impl NpmPackageFolderResolver for ManagedCliNpmResolver { .fs_resolver .resolve_package_folder_from_package(name, referrer)?; let path = - canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref()) - .map_err(|err| PackageFolderResolveIoError { + canonicalize_path_maybe_not_exists(&self.sys, &path).map_err(|err| { + PackageFolderResolveIoError { package_name: name.to_string(), referrer: referrer.clone(), source: err, - })?; + } + })?; log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); Ok(path) } @@ -728,13 +724,12 @@ impl CliNpmResolver for ManagedCliNpmResolver { )); Arc::new(ManagedCliNpmResolver::new( - self.fs.clone(), create_npm_fs_resolver( - self.fs.clone(), self.npm_cache.clone(), &self.npm_install_deps_provider, &self.text_only_progress_bar, npm_resolution.clone(), + self.sys.clone(), self.tarball_cache.clone(), self.root_node_modules_path().map(ToOwned::to_owned), self.npm_system_info.clone(), @@ -745,6 +740,7 @@ impl CliNpmResolver for ManagedCliNpmResolver { self.npm_cache.clone(), self.npm_install_deps_provider.clone(), npm_resolution, + self.sys.clone(), self.tarball_cache.clone(), self.text_only_progress_bar.clone(), self.npm_system_info.clone(), diff --git a/cli/npm/managed/resolution.rs b/cli/npm/managed/resolution.rs index 73c5c31caf..5d9fcf4646 100644 --- a/cli/npm/managed/resolution.rs +++ b/cli/npm/managed/resolution.rs @@ -4,6 +4,7 @@ use std::collections::HashMap; use std::collections::HashSet; use std::sync::Arc; +use capacity_builder::StringBuilder; use deno_core::error::AnyError; use deno_lockfile::NpmPackageDependencyLockfileInfo; use deno_lockfile::NpmPackageLockfileInfo; @@ -24,6 +25,7 @@ use deno_npm::NpmSystemInfo; use deno_semver::jsr::JsrDepPackageReq; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::SmallStackString; use deno_semver::VersionReq; use crate::args::CliLockfile; @@ -336,7 +338,13 @@ fn populate_lockfile_from_snapshot( let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id; lockfile.insert_package_specifier( JsrDepPackageReq::npm(package_req.clone()), - format!("{}{}", id.nv.version, id.peer_deps_serialized()), + { + StringBuilder::::build(|builder| { + builder.append(&id.nv.version); + builder.append(&id.peer_dependencies); + }) + .unwrap() + }, ); } for package in snapshot.all_packages_for_every_system() { diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 68e95fb39a..83081d3b8e 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -11,7 +11,6 @@ use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; -use super::super::PackageCaching; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::anyhow::Context; @@ -21,10 +20,12 @@ use deno_core::futures::StreamExt; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodePermissions; use node_resolver::errors::PackageFolderResolveError; +use sys_traits::FsCanonicalize; +use super::super::PackageCaching; use crate::npm::CliNpmTarballCache; /// Part of the resolution that interacts with the file system. @@ -73,15 +74,15 @@ pub trait NpmPackageFsResolver: Send + Sync { #[derive(Debug)] pub struct RegistryReadPermissionChecker { - fs: Arc, + sys: FsSysTraitsAdapter, cache: Mutex>, registry_path: PathBuf, } impl RegistryReadPermissionChecker { - pub fn new(fs: Arc, registry_path: PathBuf) -> Self { + pub fn new(fs: FsSysTraitsAdapter, registry_path: PathBuf) -> Self { Self { - fs, + sys: fs, registry_path, cache: Default::default(), } @@ -108,7 +109,7 @@ impl RegistryReadPermissionChecker { |path: &Path| -> Result, AnyError> { match cache.get(path) { Some(canon) => Ok(Some(canon.clone())), - None => match self.fs.realpath_sync(path) { + None => match self.sys.fs_canonicalize(path) { Ok(canon) => { cache.insert(path.to_path_buf(), canon.clone()); Ok(Some(canon)) diff --git a/cli/npm/managed/resolvers/common/bin_entries.rs b/cli/npm/managed/resolvers/common/bin_entries.rs index e4a1845689..ca47b9a086 100644 --- a/cli/npm/managed/resolvers/common/bin_entries.rs +++ b/cli/npm/managed/resolvers/common/bin_entries.rs @@ -28,8 +28,10 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str { .id .nv .name + .as_str() .rsplit_once('/') - .map_or(package.id.nv.name.as_str(), |(_, name)| name) + .map(|(_, name)| name) + .unwrap_or(package.id.nv.name.as_str()) } pub fn warn_missing_entrypoint( diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 4e79941af6..f56f012407 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -18,7 +18,7 @@ use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodePermissions; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageNotFoundError; @@ -47,15 +47,15 @@ pub struct GlobalNpmPackageResolver { impl GlobalNpmPackageResolver { pub fn new( cache: Arc, - fs: Arc, tarball_cache: Arc, resolution: Arc, + sys: FsSysTraitsAdapter, system_info: NpmSystemInfo, lifecycle_scripts: LifecycleScriptsConfig, ) -> Self { Self { registry_read_permission_checker: RegistryReadPermissionChecker::new( - fs, + sys, cache.root_dir_path().to_path_buf(), ), cache, diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 1e83717f15..8bbaf6c51c 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -15,11 +15,6 @@ use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; -use crate::args::LifecycleScriptsConfig; -use crate::colors; -use crate::npm::managed::PackageCaching; -use crate::npm::CliNpmCache; -use crate::npm::CliNpmTarballCache; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_cache_dir::npm::mixed_case_package_name_decode; @@ -34,21 +29,28 @@ use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; +use deno_path_util::fs::atomic_write_file_with_retries; +use deno_path_util::fs::canonicalize_path_maybe_not_exists; use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder; -use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodePermissions; use deno_semver::package::PackageNv; +use deno_semver::StackString; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::ReferrerNotFoundError; use serde::Deserialize; use serde::Serialize; +use sys_traits::FsMetadata; +use crate::args::LifecycleScriptsConfig; use crate::args::NpmInstallDepsProvider; use crate::cache::CACHE_PERM; -use crate::util::fs::atomic_write_file_with_retries; -use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; +use crate::colors; +use crate::npm::managed::PackageCaching; +use crate::npm::CliNpmCache; +use crate::npm::CliNpmTarballCache; use crate::util::fs::clone_dir_recursive; use crate::util::fs::symlink_dir; use crate::util::fs::LaxSingleProcessFsFlag; @@ -65,10 +67,10 @@ use super::common::RegistryReadPermissionChecker; #[derive(Debug)] pub struct LocalNpmPackageResolver { cache: Arc, - fs: Arc, npm_install_deps_provider: Arc, progress_bar: ProgressBar, resolution: Arc, + sys: FsSysTraitsAdapter, tarball_cache: Arc, root_node_modules_path: PathBuf, root_node_modules_url: Url, @@ -81,10 +83,10 @@ impl LocalNpmPackageResolver { #[allow(clippy::too_many_arguments)] pub fn new( cache: Arc, - fs: Arc, npm_install_deps_provider: Arc, progress_bar: ProgressBar, resolution: Arc, + sys: FsSysTraitsAdapter, tarball_cache: Arc, node_modules_folder: PathBuf, system_info: NpmSystemInfo, @@ -92,15 +94,15 @@ impl LocalNpmPackageResolver { ) -> Self { Self { cache, - fs: fs.clone(), npm_install_deps_provider, progress_bar, resolution, tarball_cache, registry_read_permission_checker: RegistryReadPermissionChecker::new( - fs, + sys.clone(), node_modules_folder.clone(), ), + sys, root_node_modules_url: Url::from_directory_path(&node_modules_folder) .unwrap(), root_node_modules_path: node_modules_folder, @@ -139,8 +141,7 @@ impl LocalNpmPackageResolver { }; // Canonicalize the path so it's not pointing to the symlinked directory // in `node_modules` directory of the referrer. - canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref()) - .map(Some) + canonicalize_path_maybe_not_exists(&self.sys, &path).map(Some) } fn resolve_package_folder_from_specifier( @@ -209,7 +210,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { }; let sub_dir = join_package_name(&node_modules_folder, name); - if self.fs.is_dir_sync(&sub_dir) { + if self.sys.fs_is_dir_no_err(&sub_dir) { return Ok(sub_dir); } @@ -355,8 +356,10 @@ async fn sync_resolution_with_fs( let package_partitions = snapshot.all_system_packages_partitioned(system_info); let mut cache_futures = FuturesUnordered::new(); - let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> = - HashMap::with_capacity(package_partitions.packages.len()); + let mut newest_packages_by_name: HashMap< + &StackString, + &NpmResolutionPackage, + > = HashMap::with_capacity(package_partitions.packages.len()); let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new())); let mut lifecycle_scripts = super::common::lifecycle_scripts::LifecycleScripts::new( @@ -536,7 +539,7 @@ async fn sync_resolution_with_fs( } } - let mut found_names: HashMap<&String, &PackageNv> = HashMap::new(); + let mut found_names: HashMap<&StackString, &PackageNv> = HashMap::new(); // set of node_modules in workspace packages that we've already ensured exist let mut existing_child_node_modules_dirs: HashSet = HashSet::new(); @@ -922,7 +925,13 @@ impl SetupCache { } bincode::serialize(&self.current).ok().and_then(|data| { - atomic_write_file_with_retries(&self.file_path, data, CACHE_PERM).ok() + atomic_write_file_with_retries( + &FsSysTraitsAdapter::new_real(), + &self.file_path, + &data, + CACHE_PERM, + ) + .ok() }); true } @@ -1012,10 +1021,10 @@ fn get_package_folder_id_from_folder_name( ) -> Option { let folder_name = folder_name.replace('+', "/"); let (name, ending) = folder_name.rsplit_once('@')?; - let name = if let Some(encoded_name) = name.strip_prefix('_') { - mixed_case_package_name_decode(encoded_name)? + let name: StackString = if let Some(encoded_name) = name.strip_prefix('_') { + StackString::from_string(mixed_case_package_name_decode(encoded_name)?) } else { - name.to_string() + name.into() }; let (raw_version, copy_index) = match ending.split_once('_') { Some((raw_version, copy_index)) => { diff --git a/cli/npm/managed/resolvers/mod.rs b/cli/npm/managed/resolvers/mod.rs index 736270749f..2d6d37798f 100644 --- a/cli/npm/managed/resolvers/mod.rs +++ b/cli/npm/managed/resolvers/mod.rs @@ -8,7 +8,7 @@ use std::path::PathBuf; use std::sync::Arc; use deno_npm::NpmSystemInfo; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use crate::args::LifecycleScriptsConfig; use crate::args::NpmInstallDepsProvider; @@ -25,11 +25,11 @@ use super::resolution::NpmResolution; #[allow(clippy::too_many_arguments)] pub fn create_npm_fs_resolver( - fs: Arc, npm_cache: Arc, npm_install_deps_provider: &Arc, progress_bar: &ProgressBar, resolution: Arc, + sys: FsSysTraitsAdapter, tarball_cache: Arc, maybe_node_modules_path: Option, system_info: NpmSystemInfo, @@ -38,10 +38,10 @@ pub fn create_npm_fs_resolver( match maybe_node_modules_path { Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new( npm_cache, - fs, npm_install_deps_provider.clone(), progress_bar.clone(), resolution, + sys, tarball_cache, node_modules_folder, system_info, @@ -49,9 +49,9 @@ pub fn create_npm_fs_resolver( )), None => Arc::new(GlobalNpmPackageResolver::new( npm_cache, - fs, tarball_cache, resolution, + sys, system_info, lifecycle_scripts, )), diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 312ea2055b..6f686c3553 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -17,7 +17,7 @@ use deno_resolver::npm::ByonmInNpmPackageChecker; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::CliNpmReqResolver; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; @@ -30,9 +30,6 @@ use node_resolver::NpmPackageFolderResolver; use crate::file_fetcher::CliFileFetcher; use crate::http_util::HttpClientProvider; -use crate::util::fs::atomic_write_file_with_retries_and_fs; -use crate::util::fs::hard_link_dir_recursive; -use crate::util::fs::AtomicWriteFileFsAdapter; use crate::util::progress_bar::ProgressBar; pub use self::byonm::CliByonmNpmResolver; @@ -43,26 +40,26 @@ pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::ManagedCliNpmResolver; pub use self::managed::PackageCaching; -pub type CliNpmTarballCache = deno_npm_cache::TarballCache; -pub type CliNpmCache = deno_npm_cache::NpmCache; -pub type CliNpmRegistryInfoProvider = - deno_npm_cache::RegistryInfoProvider; +pub type CliNpmTarballCache = + deno_npm_cache::TarballCache; +pub type CliNpmCache = deno_npm_cache::NpmCache; +pub type CliNpmRegistryInfoProvider = deno_npm_cache::RegistryInfoProvider< + CliNpmCacheHttpClient, + FsSysTraitsAdapter, +>; #[derive(Debug)] -pub struct CliNpmCacheEnv { - fs: Arc, +pub struct CliNpmCacheHttpClient { http_client_provider: Arc, progress_bar: ProgressBar, } -impl CliNpmCacheEnv { +impl CliNpmCacheHttpClient { pub fn new( - fs: Arc, http_client_provider: Arc, progress_bar: ProgressBar, ) -> Self { Self { - fs, http_client_provider, progress_bar, } @@ -70,35 +67,7 @@ impl CliNpmCacheEnv { } #[async_trait::async_trait(?Send)] -impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv { - fn exists(&self, path: &Path) -> bool { - self.fs.exists_sync(path) - } - - fn hard_link_dir_recursive( - &self, - from: &Path, - to: &Path, - ) -> Result<(), AnyError> { - // todo(dsherret): use self.fs here instead - hard_link_dir_recursive(from, to) - } - - fn atomic_write_file_with_retries( - &self, - file_path: &Path, - data: &[u8], - ) -> std::io::Result<()> { - atomic_write_file_with_retries_and_fs( - &AtomicWriteFileFsAdapter { - fs: self.fs.as_ref(), - write_mode: crate::cache::CACHE_PERM, - }, - file_path, - data, - ) - } - +impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient { async fn download_with_retries_on_any_tokio_runtime( &self, url: Url, diff --git a/cli/ops/lint.rs b/cli/ops/lint.rs new file mode 100644 index 0000000000..c38ac0c8a2 --- /dev/null +++ b/cli/ops/lint.rs @@ -0,0 +1,34 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_ast::MediaType; +use deno_ast::ModuleSpecifier; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::op2; + +use crate::tools::lint; + +deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],); + +#[op2] +#[buffer] +fn op_lint_create_serialized_ast( + #[string] file_name: &str, + #[string] source: String, +) -> Result, AnyError> { + let file_text = deno_ast::strip_bom(source); + let path = std::env::current_dir()?.join(file_name); + let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| { + generic_error(format!("Failed to parse path as URL: {}", path.display())) + })?; + let media_type = MediaType::from_specifier(&specifier); + let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { + specifier, + text: file_text.into(), + media_type, + capture_tokens: false, + scope_analysis: false, + maybe_syntax: None, + })?; + Ok(lint::serialize_ast_to_buffer(&parsed_source)) +} diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs index 230d268ab4..4ac1618816 100644 --- a/cli/ops/mod.rs +++ b/cli/ops/mod.rs @@ -2,4 +2,5 @@ pub mod bench; pub mod jupyter; +pub mod lint; pub mod testing; diff --git a/cli/resolver.rs b/cli/resolver.rs index f5c3f68f36..0a4fd78686 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -1,5 +1,10 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + use async_trait::async_trait; use dashmap::DashMap; use dashmap::DashSet; @@ -20,16 +25,14 @@ use deno_npm::resolution::NpmResolutionError; use deno_resolver::sloppy_imports::SloppyImportsResolver; use deno_runtime::colors; use deno_runtime::deno_fs; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::is_builtin_node_module; -use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_semver::package::PackageReq; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; -use std::borrow::Cow; -use std::path::Path; -use std::path::PathBuf; -use std::sync::Arc; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; use thiserror::Error; use crate::args::NpmCachingStrategy; @@ -40,18 +43,19 @@ use crate::npm::InnerCliNpmResolverRef; use crate::util::sync::AtomicFlag; use crate::util::text_encoding::from_utf8_lossy_cow; -pub type CjsTracker = deno_resolver::cjs::CjsTracker; -pub type IsCjsResolver = - deno_resolver::cjs::IsCjsResolver; +pub type CjsTracker = deno_resolver::cjs::CjsTracker; +pub type IsCjsResolver = deno_resolver::cjs::IsCjsResolver; pub type CliSloppyImportsResolver = SloppyImportsResolver; pub type CliDenoResolver = deno_resolver::DenoResolver< - CliDenoResolverFs, - DenoFsNodeResolverEnv, + RealIsBuiltInNodeModuleChecker, SloppyImportsCachedFs, + FsSysTraitsAdapter, +>; +pub type CliNpmReqResolver = deno_resolver::npm::NpmReqResolver< + RealIsBuiltInNodeModuleChecker, + FsSysTraitsAdapter, >; -pub type CliNpmReqResolver = - deno_resolver::npm::NpmReqResolver; pub struct ModuleCodeStringSource { pub code: ModuleSourceCode, @@ -59,53 +63,6 @@ pub struct ModuleCodeStringSource { pub media_type: MediaType, } -#[derive(Debug, Clone)] -pub struct CliDenoResolverFs(pub Arc); - -impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> std::io::Result> { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|e| e.into_io_error()) - } - - fn realpath_sync(&self, path: &Path) -> std::io::Result { - self.0.realpath_sync(path).map_err(|e| e.into_io_error()) - } - - fn exists_sync(&self, path: &Path) -> bool { - self.0.exists_sync(path) - } - - fn is_dir_sync(&self, path: &Path) -> bool { - self.0.is_dir_sync(path) - } - - fn read_dir_sync( - &self, - dir_path: &Path, - ) -> std::io::Result> { - self - .0 - .read_dir_sync(dir_path) - .map(|entries| { - entries - .into_iter() - .map(|e| deno_resolver::fs::DirEntry { - name: e.name, - is_file: e.is_file, - is_directory: e.is_directory, - }) - .collect::>() - }) - .map_err(|err| err.into_io_error()) - } -} - #[derive(Debug, Error)] #[error("{media_type} files are not supported in npm packages: {specifier}")] pub struct NotSupportedKindInNpmError { @@ -440,7 +397,7 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> { #[derive(Debug)] pub struct SloppyImportsCachedFs { - fs: Arc, + sys: FsSysTraitsAdapter, cache: Option< DashMap< PathBuf, @@ -450,15 +407,18 @@ pub struct SloppyImportsCachedFs { } impl SloppyImportsCachedFs { - pub fn new(fs: Arc) -> Self { + pub fn new(sys: FsSysTraitsAdapter) -> Self { Self { - fs, + sys, cache: Some(Default::default()), } } - pub fn new_without_stat_cache(fs: Arc) -> Self { - Self { fs, cache: None } + pub fn new_without_stat_cache(fs: FsSysTraitsAdapter) -> Self { + Self { + sys: fs, + cache: None, + } } } @@ -475,10 +435,10 @@ impl deno_resolver::sloppy_imports::SloppyImportResolverFs } } - let entry = self.fs.stat_sync(path).ok().and_then(|stat| { - if stat.is_file { + let entry = self.sys.fs_metadata(path).ok().and_then(|stat| { + if stat.file_type().is_file() { Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::File) - } else if stat.is_directory { + } else if stat.file_type().is_dir() { Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::Dir) } else { None diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index 1e3abb2c0d..d644072f4c 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -446,7 +446,6 @@ }, "command": { "type": "string", - "required": true, "description": "The task to execute" }, "dependencies": { diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 2ed52010fb..91187c48d1 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -37,7 +37,6 @@ use deno_core::futures::AsyncReadExt; use deno_core::futures::AsyncSeekExt; use deno_core::serde_json; use deno_core::url::Url; -use deno_graph::source::RealFileSystem; use deno_graph::ModuleGraph; use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; @@ -91,6 +90,7 @@ use super::serialization::DenoCompileModuleData; use super::serialization::DeserializedDataSection; use super::serialization::RemoteModulesStore; use super::serialization::RemoteModulesStoreBuilder; +use super::serialization::SourceMapStore; use super::virtual_fs::output_vfs; use super::virtual_fs::BuiltVfs; use super::virtual_fs::FileBackedVfs; @@ -98,6 +98,7 @@ use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsFileSubDataKind; use super::virtual_fs::VfsRoot; use super::virtual_fs::VirtualDirectory; +use super::virtual_fs::VirtualDirectoryEntries; use super::virtual_fs::WindowsSystemRootablePath; pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str = @@ -203,18 +204,25 @@ pub struct Metadata { pub otel_config: OtelConfig, } +#[allow(clippy::too_many_arguments)] fn write_binary_bytes( mut file_writer: File, original_bin: Vec, metadata: &Metadata, npm_snapshot: Option, remote_modules: &RemoteModulesStoreBuilder, + source_map_store: &SourceMapStore, vfs: &BuiltVfs, compile_flags: &CompileFlags, ) -> Result<(), AnyError> { - let data_section_bytes = - serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs) - .context("Serializing binary data section.")?; + let data_section_bytes = serialize_binary_data_section( + metadata, + npm_snapshot, + remote_modules, + source_map_store, + vfs, + ) + .context("Serializing binary data section.")?; let target = compile_flags.resolve_target(); if target.contains("linux") { @@ -256,6 +264,7 @@ pub struct StandaloneData { pub modules: StandaloneModules, pub npm_snapshot: Option, pub root_path: PathBuf, + pub source_maps: SourceMapStore, pub vfs: Arc, } @@ -283,13 +292,12 @@ impl StandaloneModules { pub fn read<'a>( &'a self, specifier: &'a ModuleSpecifier, + kind: VfsFileSubDataKind, ) -> Result>, AnyError> { if specifier.scheme() == "file" { let path = deno_path_util::url_to_file_path(specifier)?; let bytes = match self.vfs.file_entry(&path) { - Ok(entry) => self - .vfs - .read_file_all(entry, VfsFileSubDataKind::ModuleGraph)?, + Ok(entry) => self.vfs.read_file_all(entry, kind)?, Err(err) if err.kind() == ErrorKind::NotFound => { match RealFs.read_file_sync(&path, None) { Ok(bytes) => bytes, @@ -307,7 +315,18 @@ impl StandaloneModules { data: bytes, })) } else { - self.remote_modules.read(specifier) + self.remote_modules.read(specifier).map(|maybe_entry| { + maybe_entry.map(|entry| DenoCompileModuleData { + media_type: entry.media_type, + specifier: entry.specifier, + data: match kind { + VfsFileSubDataKind::Raw => entry.data, + VfsFileSubDataKind::ModuleGraph => { + entry.transpiled_data.unwrap_or(entry.data) + } + }, + }) + }) } } } @@ -328,7 +347,8 @@ pub fn extract_standalone( mut metadata, npm_snapshot, remote_modules, - mut vfs_dir, + source_maps, + vfs_root_entries, vfs_files_data, } = match deserialize_binary_data_section(data)? { Some(data_section) => data_section, @@ -351,11 +371,12 @@ pub fn extract_standalone( metadata.argv.push(arg.into_string().unwrap()); } let vfs = { - // align the name of the directory with the root dir - vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string(); - let fs_root = VfsRoot { - dir: vfs_dir, + dir: VirtualDirectory { + // align the name of the directory with the root dir + name: root_path.file_name().unwrap().to_string_lossy().to_string(), + entries: vfs_root_entries, + }, root_path: root_path.clone(), start_file_offset: 0, }; @@ -372,6 +393,7 @@ pub fn extract_standalone( }, npm_snapshot, root_path, + source_maps, vfs, })) } @@ -451,7 +473,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { ) } } - self.write_standalone_binary(options, original_binary).await + self.write_standalone_binary(options, original_binary) } async fn get_base_binary( @@ -554,7 +576,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { /// This functions creates a standalone deno binary by appending a bundle /// and magic trailer to the currently executing binary. #[allow(clippy::too_many_arguments)] - async fn write_standalone_binary( + fn write_standalone_binary( &self, options: WriteBinOptions<'_>, original_bin: Vec, @@ -598,71 +620,81 @@ impl<'a> DenoCompileBinaryWriter<'a> { .with_context(|| format!("Including {}", path.display()))?; } let mut remote_modules_store = RemoteModulesStoreBuilder::default(); - let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() { - Some(FastInsecureHasher::new_deno_versioned()) - } else { - None - }; + let mut source_maps = Vec::with_capacity(graph.specifiers_count()); + // todo(dsherret): transpile in parallel for module in graph.modules() { if module.specifier().scheme() == "data" { continue; // don't store data urls as an entry as they're in the code } - if let Some(hasher) = &mut code_cache_key_hasher { - if let Some(source) = module.source() { - hasher.write(module.specifier().as_str().as_bytes()); - hasher.write(source.as_bytes()); - } - } - let (maybe_source, media_type) = match module { + let (maybe_original_source, maybe_transpiled, media_type) = match module { deno_graph::Module::Js(m) => { - let source = if m.media_type.is_emittable() { + let original_bytes = m.source.as_bytes().to_vec(); + let maybe_transpiled = if m.media_type.is_emittable() { let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( &m.specifier, m.media_type, m.is_script, )?; let module_kind = ModuleKind::from_is_cjs(is_cjs); - let source = self - .emitter - .emit_parsed_source( + let (source, source_map) = + self.emitter.emit_parsed_source_for_deno_compile( &m.specifier, m.media_type, module_kind, &m.source, - ) - .await?; - source.into_bytes() + )?; + if source != m.source.as_ref() { + source_maps.push((&m.specifier, source_map)); + Some(source.into_bytes()) + } else { + None + } } else { - m.source.as_bytes().to_vec() + None }; - (Some(source), m.media_type) + (Some(original_bytes), maybe_transpiled, m.media_type) } deno_graph::Module::Json(m) => { - (Some(m.source.as_bytes().to_vec()), m.media_type) + (Some(m.source.as_bytes().to_vec()), None, m.media_type) } deno_graph::Module::Wasm(m) => { - (Some(m.source.to_vec()), MediaType::Wasm) + (Some(m.source.to_vec()), None, MediaType::Wasm) } deno_graph::Module::Npm(_) | deno_graph::Module::Node(_) - | deno_graph::Module::External(_) => (None, MediaType::Unknown), + | deno_graph::Module::External(_) => (None, None, MediaType::Unknown), }; - if module.specifier().scheme() == "file" { - let file_path = deno_path_util::url_to_file_path(module.specifier())?; - vfs - .add_file_with_data( - &file_path, - match maybe_source { - Some(source) => source, - None => RealFs.read_file_sync(&file_path, None)?.into_owned(), - }, - VfsFileSubDataKind::ModuleGraph, - ) - .with_context(|| { - format!("Failed adding '{}'", file_path.display()) - })?; - } else if let Some(source) = maybe_source { - remote_modules_store.add(module.specifier(), media_type, source); + if let Some(original_source) = maybe_original_source { + if module.specifier().scheme() == "file" { + let file_path = deno_path_util::url_to_file_path(module.specifier())?; + vfs + .add_file_with_data( + &file_path, + original_source, + VfsFileSubDataKind::Raw, + ) + .with_context(|| { + format!("Failed adding '{}'", file_path.display()) + })?; + if let Some(transpiled_source) = maybe_transpiled { + vfs + .add_file_with_data( + &file_path, + transpiled_source, + VfsFileSubDataKind::ModuleGraph, + ) + .with_context(|| { + format!("Failed adding '{}'", file_path.display()) + })?; + } + } else { + remote_modules_store.add( + module.specifier(), + media_type, + original_source, + maybe_transpiled, + ); + } } } remote_modules_store.add_redirects(&graph.redirects); @@ -695,6 +727,28 @@ impl<'a> DenoCompileBinaryWriter<'a> { None => StandaloneRelativeFileBaseUrl::WindowsSystemRoot, }; + let code_cache_key = if self.cli_options.code_cache_enabled() { + let mut hasher = FastInsecureHasher::new_deno_versioned(); + for module in graph.modules() { + if let Some(source) = module.source() { + hasher + .write(root_dir_url.specifier_key(module.specifier()).as_bytes()); + hasher.write(source.as_bytes()); + } + } + Some(hasher.finish()) + } else { + None + }; + + let mut source_map_store = SourceMapStore::with_capacity(source_maps.len()); + for (specifier, source_map) in source_maps { + source_map_store.add( + Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()), + Cow::Owned(source_map.into_bytes()), + ); + } + let node_modules = match self.npm_resolver.as_inner() { InnerCliNpmResolverRef::Managed(_) => { npm_snapshot.as_ref().map(|_| NodeModules::Managed { @@ -742,7 +796,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { let metadata = Metadata { argv: compile_flags.args.clone(), seed: self.cli_options.seed(), - code_cache_key: code_cache_key_hasher.map(|h| h.finish()), + code_cache_key, location: self.cli_options.location_flag().clone(), permissions: self.cli_options.permission_flags().clone(), v8_flags: self.cli_options.v8_flags().clone(), @@ -809,6 +863,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { &metadata, npm_snapshot.map(|s| s.into_serialized()), &remote_modules_store, + &source_map_store, &vfs, compile_flags, ) @@ -903,10 +958,10 @@ impl<'a> DenoCompileBinaryWriter<'a> { root_dir.name = DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME.to_string(); let mut new_entries = Vec::with_capacity(root_dir.entries.len()); let mut localhost_entries = IndexMap::new(); - for entry in std::mem::take(&mut root_dir.entries) { + for entry in root_dir.entries.take_inner() { match entry { - VfsEntry::Dir(dir) => { - for entry in dir.entries { + VfsEntry::Dir(mut dir) => { + for entry in dir.entries.take_inner() { log::debug!("Flattening {} into node_modules", entry.name()); if let Some(existing) = localhost_entries.insert(entry.name().to_string(), entry) @@ -925,11 +980,11 @@ impl<'a> DenoCompileBinaryWriter<'a> { } new_entries.push(VfsEntry::Dir(VirtualDirectory { name: "localhost".to_string(), - entries: localhost_entries.into_iter().map(|(_, v)| v).collect(), + entries: VirtualDirectoryEntries::new( + localhost_entries.into_iter().map(|(_, v)| v).collect(), + ), })); - // needs to be sorted by name - new_entries.sort_by(|a, b| a.name().cmp(b.name())); - root_dir.entries = new_entries; + root_dir.entries = VirtualDirectoryEntries::new(new_entries); // it's better to not expose the user's cache directory, so take it out // of there @@ -937,10 +992,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { let parent_dir = vfs.get_dir_mut(parent).unwrap(); let index = parent_dir .entries - .iter() - .position(|entry| { - entry.name() == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME - }) + .binary_search(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME) .unwrap(); let npm_global_cache_dir_entry = parent_dir.entries.remove(index); @@ -950,11 +1002,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { Cow::Borrowed(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME); for ancestor in parent.ancestors() { let dir = vfs.get_dir_mut(ancestor).unwrap(); - if let Some(index) = dir - .entries - .iter() - .position(|entry| entry.name() == last_name) - { + if let Ok(index) = dir.entries.binary_search(&last_name) { dir.entries.remove(index); } last_name = Cow::Owned(dir.name.clone()); @@ -965,7 +1013,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { // now build the vfs and add the global cache dir entry there let mut built_vfs = vfs.build(); - built_vfs.root.insert_entry(npm_global_cache_dir_entry); + built_vfs.entries.insert(npm_global_cache_dir_entry); built_vfs } InnerCliNpmResolverRef::Byonm(_) => vfs.build(), diff --git a/cli/standalone/code_cache.rs b/cli/standalone/code_cache.rs index 9580b9b44e..a44c920328 100644 --- a/cli/standalone/code_cache.rs +++ b/cli/standalone/code_cache.rs @@ -15,11 +15,12 @@ use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::unsync::sync::AtomicFlag; +use deno_path_util::get_atomic_path; use deno_runtime::code_cache::CodeCache; use deno_runtime::code_cache::CodeCacheType; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use crate::cache::FastInsecureHasher; -use crate::util::path::get_atomic_file_path; use crate::worker::CliCodeCache; enum CodeCacheStrategy { @@ -189,7 +190,8 @@ impl FirstRunCodeCacheStrategy { cache_data: &HashMap, ) { let count = cache_data.len(); - let temp_file = get_atomic_file_path(&self.file_path); + let temp_file = + get_atomic_path(&FsSysTraitsAdapter::new_real(), &self.file_path); match serialize(&temp_file, self.cache_key, cache_data) { Ok(()) => { if let Err(err) = std::fs::rename(&temp_file, &self.file_path) { diff --git a/cli/standalone/file_system.rs b/cli/standalone/file_system.rs index 48dc907570..4b1024db6a 100644 --- a/cli/standalone/file_system.rs +++ b/cli/standalone/file_system.rs @@ -9,6 +9,7 @@ use deno_runtime::deno_fs::AccessCheckCb; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FsDirEntry; use deno_runtime::deno_fs::FsFileType; +use deno_runtime::deno_fs::FsStatSlim; use deno_runtime::deno_fs::OpenOptions; use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_io::fs::File; diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 08ee5ba11a..0ce25ff62f 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -36,10 +36,12 @@ use deno_package_json::PackageJsonDepValue; use deno_resolver::cjs::IsCjsResolutionMode; use deno_resolver::npm::NpmReqResolverOptions; use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJsonResolver; +use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -55,6 +57,7 @@ use node_resolver::errors::ClosestPkgJsonError; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; use serialization::DenoCompileModuleSource; +use serialization::SourceMapStore; use std::borrow::Cow; use std::rc::Rc; use std::sync::Arc; @@ -68,11 +71,9 @@ use crate::args::CaData; use crate::args::NpmInstallDepsProvider; use crate::args::StorageKeyResolver; use crate::cache::Caches; -use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDirProvider; use crate::cache::FastInsecureHasher; use crate::cache::NodeAnalysisCache; -use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; @@ -86,7 +87,6 @@ use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::resolver::CjsTracker; -use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNpmReqResolver; use crate::resolver::NpmModuleLoader; use crate::util::progress_bar::ProgressBar; @@ -122,6 +122,7 @@ struct SharedModuleLoaderState { npm_module_loader: Arc, npm_req_resolver: Arc, npm_resolver: Arc, + source_maps: SourceMapStore, vfs: Arc, workspace_resolver: WorkspaceResolver, } @@ -396,7 +397,11 @@ impl ModuleLoader for EmbeddedModuleLoader { ); } - match self.shared.modules.read(original_specifier) { + match self + .shared + .modules + .read(original_specifier, VfsFileSubDataKind::ModuleGraph) + { Ok(Some(module)) => { let media_type = module.media_type; let (module_specifier, module_type, module_source) = @@ -495,6 +500,45 @@ impl ModuleLoader for EmbeddedModuleLoader { } std::future::ready(()).boxed_local() } + + fn get_source_map(&self, file_name: &str) -> Option> { + if file_name.starts_with("file:///") { + let url = + deno_path_util::url_from_directory_path(self.shared.vfs.root()).ok()?; + let file_url = ModuleSpecifier::parse(file_name).ok()?; + let relative_path = url.make_relative(&file_url)?; + self.shared.source_maps.get(&relative_path) + } else { + self.shared.source_maps.get(file_name) + } + .map(Cow::Borrowed) + } + + fn get_source_mapped_source_line( + &self, + file_name: &str, + line_number: usize, + ) -> Option { + let specifier = ModuleSpecifier::parse(file_name).ok()?; + let data = self + .shared + .modules + .read(&specifier, VfsFileSubDataKind::Raw) + .ok()??; + + let source = String::from_utf8_lossy(&data.data); + // Do NOT use .lines(): it skips the terminating empty line. + // (due to internally using_terminator() instead of .split()) + let lines: Vec<&str> = source.split('\n').collect(); + if line_number >= lines.len() { + Some(format!( + "{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)", + crate::colors::yellow("Warning"), line_number + 1, + )) + } else { + Some(lines[line_number].to_string()) + } + } } impl NodeRequireLoader for EmbeddedModuleLoader { @@ -590,6 +634,7 @@ pub async fn run(data: StandaloneData) -> Result { modules, npm_snapshot, root_path, + source_maps, vfs, } = data; let deno_dir_provider = Arc::new(DenoDirProvider::new(None)); @@ -610,9 +655,8 @@ pub async fn run(data: StandaloneData) -> Result { let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); let cache_setting = CacheSetting::Only; - let pkg_json_resolver = Arc::new(PackageJsonResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), - )); + let sys = FsSysTraitsAdapter(fs.clone()); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new(sys.clone())); let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { Some(binary::NodeModules::Managed { node_modules_dir }) => { // create an npmrc that uses the fake npm_registry_url to resolve packages @@ -625,7 +669,7 @@ pub async fn run(data: StandaloneData) -> Result { registry_configs: Default::default(), }); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(fs.as_ref()), + &sys, npm_global_cache_dir, npmrc.get_all_known_registries_urls(), )); @@ -646,17 +690,17 @@ pub async fn run(data: StandaloneData) -> Result { snapshot, )), maybe_lockfile: None, - fs: fs.clone(), http_client_provider: http_client_provider.clone(), npm_cache_dir, - cache_setting, - text_only_progress_bar: progress_bar, - maybe_node_modules_path, - npm_system_info: Default::default(), npm_install_deps_provider: Arc::new( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), + sys: sys.clone(), + text_only_progress_bar: progress_bar, + cache_setting, + maybe_node_modules_path, + npm_system_info: Default::default(), npmrc, lifecycle_scripts: Default::default(), }, @@ -673,7 +717,7 @@ pub async fn run(data: StandaloneData) -> Result { create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm); let npm_resolver = create_cli_npm_resolver( CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), + sys: sys.clone(), pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir, }), @@ -686,7 +730,7 @@ pub async fn run(data: StandaloneData) -> Result { // so no need to create actual `.npmrc` configuration. let npmrc = create_default_npmrc(); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &DenoCacheEnvFsAdapter(fs.as_ref()), + &sys, npm_global_cache_dir, npmrc.get_all_known_registries_urls(), )); @@ -701,18 +745,18 @@ pub async fn run(data: StandaloneData) -> Result { create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( CliManagedNpmResolverCreateOptions { snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), - maybe_lockfile: None, - fs: fs.clone(), http_client_provider: http_client_provider.clone(), - npm_cache_dir, - cache_setting, - text_only_progress_bar: progress_bar, - maybe_node_modules_path: None, - npm_system_info: Default::default(), npm_install_deps_provider: Arc::new( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), + sys: sys.clone(), + cache_setting, + text_only_progress_bar: progress_bar, + npm_cache_dir, + maybe_lockfile: None, + maybe_node_modules_path: None, + npm_system_info: Default::default(), npmrc: create_default_npmrc(), lifecycle_scripts: Default::default(), }, @@ -724,10 +768,11 @@ pub async fn run(data: StandaloneData) -> Result { let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let node_resolver = Arc::new(NodeResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), in_npm_pkg_checker.clone(), + RealIsBuiltInNodeModuleChecker, npm_resolver.clone().into_npm_pkg_folder_resolver(), pkg_json_resolver.clone(), + sys.clone(), )); let cjs_tracker = Arc::new(CjsTracker::new( in_npm_pkg_checker.clone(), @@ -745,7 +790,7 @@ pub async fn run(data: StandaloneData) -> Result { let npm_req_resolver = Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), - fs: CliDenoResolverFs(fs.clone()), + sys: sys.clone(), in_npm_pkg_checker: in_npm_pkg_checker.clone(), node_resolver: node_resolver.clone(), npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), @@ -758,11 +803,11 @@ pub async fn run(data: StandaloneData) -> Result { ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), in_npm_pkg_checker, node_resolver.clone(), npm_resolver.clone().into_npm_pkg_folder_resolver(), pkg_json_resolver.clone(), + sys, )); let workspace_resolver = { let import_map = match metadata.workspace_resolver.import_map { @@ -841,6 +886,7 @@ pub async fn run(data: StandaloneData) -> Result { )), npm_resolver: npm_resolver.clone(), npm_req_resolver, + source_maps, vfs, workspace_resolver, }), diff --git a/cli/standalone/serialization.rs b/cli/standalone/serialization.rs index ac76172e37..30802aa081 100644 --- a/cli/standalone/serialization.rs +++ b/cli/standalone/serialization.rs @@ -6,6 +6,8 @@ use std::collections::BTreeMap; use std::collections::HashMap; use std::io::Write; +use capacity_builder::BytesAppendable; +use deno_ast::swc::common::source_map; use deno_ast::MediaType; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -20,12 +22,15 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_semver::package::PackageReq; +use deno_semver::StackString; +use indexmap::IndexMap; use crate::standalone::virtual_fs::VirtualDirectory; use super::binary::Metadata; use super::virtual_fs::BuiltVfs; use super::virtual_fs::VfsBuilder; +use super::virtual_fs::VirtualDirectoryEntries; const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; @@ -33,21 +38,22 @@ const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; /// * d3n0l4nd /// * /// * -/// * +/// * /// * /// * +/// * /// * d3n0l4nd pub fn serialize_binary_data_section( metadata: &Metadata, npm_snapshot: Option, remote_modules: &RemoteModulesStoreBuilder, + source_map_store: &SourceMapStore, vfs: &BuiltVfs, ) -> Result, AnyError> { let metadata = serde_json::to_string(metadata)?; let npm_snapshot = npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); - let remote_modules_len = Cell::new(0_u64); - let serialized_vfs = serde_json::to_string(&vfs.root)?; + let serialized_vfs = serde_json::to_string(&vfs.entries)?; let bytes = capacity_builder::BytesBuilder::build(|builder| { builder.append(MAGIC_BYTES); @@ -63,10 +69,7 @@ pub fn serialize_binary_data_section( } // 3. Remote modules { - builder.append_le(remote_modules_len.get()); // this will be properly initialized on the second pass - let start_index = builder.len(); remote_modules.write(builder); - remote_modules_len.set((builder.len() - start_index) as u64); } // 4. VFS { @@ -78,6 +81,16 @@ pub fn serialize_binary_data_section( builder.append(file); } } + // 5. Source maps + { + builder.append_le(source_map_store.data.len() as u32); + for (specifier, source_map) in &source_map_store.data { + builder.append_le(specifier.len() as u32); + builder.append(specifier); + builder.append_le(source_map.len() as u32); + builder.append(source_map.as_ref()); + } + } // write the magic bytes at the end so we can use it // to make sure we've deserialized correctly @@ -91,19 +104,14 @@ pub struct DeserializedDataSection { pub metadata: Metadata, pub npm_snapshot: Option, pub remote_modules: RemoteModulesStore, - pub vfs_dir: VirtualDirectory, + pub source_maps: SourceMapStore, + pub vfs_root_entries: VirtualDirectoryEntries, pub vfs_files_data: &'static [u8], } pub fn deserialize_binary_data_section( data: &'static [u8], ) -> Result, AnyError> { - fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { - let (input, len) = read_u64(input)?; - let (input, data) = read_bytes(input, len as usize)?; - Ok((input, data)) - } - fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> { if input.len() < MAGIC_BYTES.len() { bail!("Unexpected end of data. Could not find magic bytes."); @@ -115,34 +123,51 @@ pub fn deserialize_binary_data_section( Ok((input, true)) } + #[allow(clippy::type_complexity)] + fn read_source_map_entry( + input: &[u8], + ) -> Result<(&[u8], (Cow, &[u8])), AnyError> { + let (input, specifier) = read_string_lossy(input)?; + let (input, source_map) = read_bytes_with_u32_len(input)?; + Ok((input, (specifier, source_map))) + } + let (input, found) = read_magic_bytes(data)?; if !found { return Ok(None); } // 1. Metadata - let (input, data) = read_bytes_with_len(input).context("reading metadata")?; + let (input, data) = + read_bytes_with_u64_len(input).context("reading metadata")?; let metadata: Metadata = serde_json::from_slice(data).context("deserializing metadata")?; // 2. Npm snapshot let (input, data) = - read_bytes_with_len(input).context("reading npm snapshot")?; + read_bytes_with_u64_len(input).context("reading npm snapshot")?; let npm_snapshot = if data.is_empty() { None } else { Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?) }; // 3. Remote modules - let (input, data) = - read_bytes_with_len(input).context("reading remote modules data")?; - let remote_modules = - RemoteModulesStore::build(data).context("deserializing remote modules")?; + let (input, remote_modules) = + RemoteModulesStore::build(input).context("deserializing remote modules")?; // 4. VFS - let (input, data) = read_bytes_with_len(input).context("vfs")?; - let vfs_dir: VirtualDirectory = + let (input, data) = read_bytes_with_u64_len(input).context("vfs")?; + let vfs_root_entries: VirtualDirectoryEntries = serde_json::from_slice(data).context("deserializing vfs data")?; let (input, vfs_files_data) = - read_bytes_with_len(input).context("reading vfs files data")?; + read_bytes_with_u64_len(input).context("reading vfs files data")?; + // 5. Source maps + let (mut input, source_map_data_len) = read_u32_as_usize(input)?; + let mut source_maps = SourceMapStore::with_capacity(source_map_data_len); + for _ in 0..source_map_data_len { + let (current_input, (specifier, source_map)) = + read_source_map_entry(input)?; + input = current_input; + source_maps.add(specifier, Cow::Borrowed(source_map)); + } // finally ensure we read the magic bytes at the end let (_input, found) = read_magic_bytes(input)?; @@ -154,7 +179,8 @@ pub fn deserialize_binary_data_section( metadata, npm_snapshot, remote_modules, - vfs_dir, + source_maps, + vfs_root_entries, vfs_files_data, })) } @@ -162,19 +188,31 @@ pub fn deserialize_binary_data_section( #[derive(Default)] pub struct RemoteModulesStoreBuilder { specifiers: Vec<(String, u64)>, - data: Vec<(MediaType, Vec)>, + data: Vec<(MediaType, Vec, Option>)>, data_byte_len: u64, redirects: Vec<(String, String)>, redirects_len: u64, } impl RemoteModulesStoreBuilder { - pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec) { + pub fn add( + &mut self, + specifier: &Url, + media_type: MediaType, + data: Vec, + maybe_transpiled: Option>, + ) { log::debug!("Adding '{}' ({})", specifier, media_type); let specifier = specifier.to_string(); self.specifiers.push((specifier, self.data_byte_len)); - self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data - self.data.push((media_type, data)); + let maybe_transpiled_len = match &maybe_transpiled { + // data length (4 bytes), data + Some(data) => 4 + data.len() as u64, + None => 0, + }; + // media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length + self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len; + self.data.push((media_type, data, maybe_transpiled)); } pub fn add_redirects(&mut self, redirects: &BTreeMap) { @@ -188,12 +226,15 @@ impl RemoteModulesStoreBuilder { } } - fn write<'a>(&'a self, builder: &mut capacity_builder::BytesBuilder<'a>) { + fn write<'a, TBytes: capacity_builder::BytesType>( + &'a self, + builder: &mut capacity_builder::BytesBuilder<'a, TBytes>, + ) { builder.append_le(self.specifiers.len() as u32); builder.append_le(self.redirects.len() as u32); for (specifier, offset) in &self.specifiers { builder.append_le(specifier.len() as u32); - builder.append(specifier.as_bytes()); + builder.append(specifier); builder.append_le(*offset); } for (from, to) in &self.redirects { @@ -202,10 +243,32 @@ impl RemoteModulesStoreBuilder { builder.append_le(to.len() as u32); builder.append(to); } - for (media_type, data) in &self.data { + builder.append_le( + self + .data + .iter() + .map(|(_, data, maybe_transpiled)| { + 1 + 4 + + (data.len() as u64) + + 1 + + match maybe_transpiled { + Some(transpiled) => 4 + (transpiled.len() as u64), + None => 0, + } + }) + .sum::(), + ); + for (media_type, data, maybe_transpiled) in &self.data { builder.append(serialize_media_type(*media_type)); - builder.append_le(data.len() as u64); + builder.append_le(data.len() as u32); builder.append(data); + if let Some(transpiled) = maybe_transpiled { + builder.append(1); + builder.append_le(transpiled.len() as u32); + builder.append(transpiled); + } else { + builder.append(0); + } } } } @@ -234,6 +297,30 @@ impl DenoCompileModuleSource { } } +pub struct SourceMapStore { + data: IndexMap, Cow<'static, [u8]>>, +} + +impl SourceMapStore { + pub fn with_capacity(capacity: usize) -> Self { + Self { + data: IndexMap::with_capacity(capacity), + } + } + + pub fn add( + &mut self, + specifier: Cow<'static, str>, + source_map: Cow<'static, [u8]>, + ) { + self.data.insert(specifier, source_map); + } + + pub fn get(&self, specifier: &str) -> Option<&[u8]> { + self.data.get(specifier).map(|v| v.as_ref()) + } +} + pub struct DenoCompileModuleData<'a> { pub specifier: &'a Url, pub media_type: MediaType, @@ -280,6 +367,13 @@ impl<'a> DenoCompileModuleData<'a> { } } +pub struct RemoteModuleEntry<'a> { + pub specifier: &'a Url, + pub media_type: MediaType, + pub data: Cow<'static, [u8]>, + pub transpiled_data: Option>, +} + enum RemoteModulesStoreSpecifierValue { Data(usize), Redirect(Url), @@ -291,7 +385,7 @@ pub struct RemoteModulesStore { } impl RemoteModulesStore { - fn build(data: &'static [u8]) -> Result { + fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> { fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> { let (input, specifier) = read_string_lossy(input)?; let specifier = Url::parse(&specifier)?; @@ -334,12 +428,16 @@ impl RemoteModulesStore { Ok((input, specifiers)) } - let (files_data, specifiers) = read_headers(data)?; + let (input, specifiers) = read_headers(input)?; + let (input, files_data) = read_bytes_with_u64_len(input)?; - Ok(Self { - specifiers, - files_data, - }) + Ok(( + input, + Self { + specifiers, + files_data, + }, + )) } pub fn resolve_specifier<'a>( @@ -370,7 +468,7 @@ impl RemoteModulesStore { pub fn read<'a>( &'a self, original_specifier: &'a Url, - ) -> Result>, AnyError> { + ) -> Result>, AnyError> { let mut count = 0; let mut specifier = original_specifier; loop { @@ -386,12 +484,25 @@ impl RemoteModulesStore { let input = &self.files_data[*offset..]; let (input, media_type_byte) = read_bytes(input, 1)?; let media_type = deserialize_media_type(media_type_byte[0])?; - let (input, len) = read_u64(input)?; - let (_input, data) = read_bytes(input, len as usize)?; - return Ok(Some(DenoCompileModuleData { + let (input, data) = read_bytes_with_u32_len(input)?; + check_has_len(input, 1)?; + let (input, has_transpiled) = (&input[1..], input[0]); + let (_, transpiled_data) = match has_transpiled { + 0 => (input, None), + 1 => { + let (input, data) = read_bytes_with_u32_len(input)?; + (input, Some(data)) + } + value => bail!( + "Invalid transpiled data flag: {}. Compiled data is corrupt.", + value + ), + }; + return Ok(Some(RemoteModuleEntry { specifier, media_type, data: Cow::Borrowed(data), + transpiled_data: transpiled_data.map(Cow::Borrowed), })); } None => { @@ -475,12 +586,13 @@ fn deserialize_npm_snapshot( #[allow(clippy::needless_lifetimes)] // clippy bug fn parse_package_dep<'a>( id_to_npm_id: &'a impl Fn(usize) -> Result, - ) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a + ) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a { |input| { let (input, req) = read_string_lossy(input)?; let (input, id) = read_u32_as_usize(input)?; - Ok((input, (req.into_owned(), id_to_npm_id(id)?))) + let req = StackString::from_cow(req); + Ok((input, (req, id_to_npm_id(id)?))) } } @@ -630,17 +742,34 @@ fn parse_vec_n_times_with_index( Ok((input, results)) } +fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { + let (input, len) = read_u64(input)?; + let (input, data) = read_bytes(input, len as usize)?; + Ok((input, data)) +} + +fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { + let (input, len) = read_u32_as_usize(input)?; + let (input, data) = read_bytes(input, len)?; + Ok((input, data)) +} + fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> { - if input.len() < len { - bail!("Unexpected end of data.",); - } + check_has_len(input, len)?; let (len_bytes, input) = input.split_at(len); Ok((input, len_bytes)) } +#[inline(always)] +fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> { + if input.len() < len { + bail!("Unexpected end of data."); + } + Ok(()) +} + fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow), AnyError> { - let (input, str_len) = read_u32_as_usize(input)?; - let (input, data_bytes) = read_bytes(input, str_len)?; + let (input, data_bytes) = read_bytes_with_u32_len(input)?; Ok((input, String::from_utf8_lossy(data_bytes))) } diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs index 04e66d680e..522fe47dd9 100644 --- a/cli/standalone/virtual_fs.rs +++ b/cli/standalone/virtual_fs.rs @@ -67,7 +67,7 @@ impl WindowsSystemRootablePath { #[derive(Debug)] pub struct BuiltVfs { pub root_path: WindowsSystemRootablePath, - pub root: VirtualDirectory, + pub entries: VirtualDirectoryEntries, pub files: Vec>, } @@ -95,7 +95,7 @@ impl VfsBuilder { Self { executable_root: VirtualDirectory { name: "/".to_string(), - entries: Vec::new(), + entries: Default::default(), }, files: Vec::new(), current_offset: 0, @@ -208,23 +208,20 @@ impl VfsBuilder { continue; } let name = component.as_os_str().to_string_lossy(); - let index = match current_dir - .entries - .binary_search_by(|e| e.name().cmp(&name)) - { + let index = match current_dir.entries.binary_search(&name) { Ok(index) => index, Err(insert_index) => { - current_dir.entries.insert( + current_dir.entries.0.insert( insert_index, VfsEntry::Dir(VirtualDirectory { name: name.to_string(), - entries: Vec::new(), + entries: Default::default(), }), ); insert_index } }; - match &mut current_dir.entries[index] { + match &mut current_dir.entries.0[index] { VfsEntry::Dir(dir) => { current_dir = dir; } @@ -248,14 +245,8 @@ impl VfsBuilder { continue; } let name = component.as_os_str().to_string_lossy(); - let index = match current_dir - .entries - .binary_search_by(|e| e.name().cmp(&name)) - { - Ok(index) => index, - Err(_) => return None, - }; - match &mut current_dir.entries[index] { + let entry = current_dir.entries.get_mut_by_name(&name)?; + match entry { VfsEntry::Dir(dir) => { current_dir = dir; } @@ -320,9 +311,9 @@ impl VfsBuilder { offset, len: data.len() as u64, }; - match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { + match dir.entries.binary_search(&name) { Ok(index) => { - let entry = &mut dir.entries[index]; + let entry = &mut dir.entries.0[index]; match entry { VfsEntry::File(virtual_file) => match sub_data_kind { VfsFileSubDataKind::Raw => { @@ -336,7 +327,7 @@ impl VfsBuilder { } } Err(insert_index) => { - dir.entries.insert( + dir.entries.0.insert( insert_index, VfsEntry::File(VirtualFile { name: name.to_string(), @@ -384,10 +375,10 @@ impl VfsBuilder { let target = normalize_path(path.parent().unwrap().join(&target)); let dir = self.add_dir_raw(path.parent().unwrap()); let name = path.file_name().unwrap().to_string_lossy(); - match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { + match dir.entries.binary_search(&name) { Ok(_) => {} // previously inserted Err(insert_index) => { - dir.entries.insert( + dir.entries.0.insert( insert_index, VfsEntry::Symlink(VirtualSymlink { name: name.to_string(), @@ -426,7 +417,7 @@ impl VfsBuilder { dir: &mut VirtualDirectory, parts: &[String], ) { - for entry in &mut dir.entries { + for entry in &mut dir.entries.0 { match entry { VfsEntry::Dir(dir) => { strip_prefix_from_symlinks(dir, parts); @@ -454,13 +445,13 @@ impl VfsBuilder { if self.min_root_dir.as_ref() == Some(¤t_path) { break; } - match ¤t_dir.entries[0] { + match ¤t_dir.entries.0[0] { VfsEntry::Dir(dir) => { if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { // special directory we want to maintain break; } - match current_dir.entries.remove(0) { + match current_dir.entries.0.remove(0) { VfsEntry::Dir(dir) => { current_path = WindowsSystemRootablePath::Path(current_path.join(&dir.name)); @@ -480,7 +471,7 @@ impl VfsBuilder { } BuiltVfs { root_path: current_path, - root: current_dir, + entries: current_dir.entries, files: self.files, } } @@ -506,7 +497,7 @@ pub fn output_vfs(vfs: &BuiltVfs, executable_name: &str) { return; // no need to compute if won't output } - if vfs.root.entries.is_empty() { + if vfs.entries.is_empty() { return; // nothing to output } @@ -696,7 +687,7 @@ fn vfs_as_display_tree( fn dir_size(dir: &VirtualDirectory, seen_offsets: &mut HashSet) -> Size { let mut size = Size::default(); - for entry in &dir.entries { + for entry in dir.entries.iter() { match entry { VfsEntry::Dir(virtual_directory) => { size = size + dir_size(virtual_directory, seen_offsets); @@ -760,15 +751,10 @@ fn vfs_as_display_tree( fn include_all_entries<'a>( dir_path: &WindowsSystemRootablePath, - vfs_dir: &'a VirtualDirectory, + entries: &'a VirtualDirectoryEntries, seen_offsets: &mut HashSet, ) -> Vec> { - if vfs_dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { - return show_global_node_modules_dir(vfs_dir, seen_offsets); - } - - vfs_dir - .entries + entries .iter() .map(|entry| DirEntryOutput { name: Cow::Borrowed(entry.name()), @@ -826,10 +812,12 @@ fn vfs_as_display_tree( } else { EntryOutput::Subset(children) } + } else if vfs_dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { + EntryOutput::Subset(show_global_node_modules_dir(vfs_dir, seen_offsets)) } else { EntryOutput::Subset(include_all_entries( &WindowsSystemRootablePath::Path(dir), - vfs_dir, + &vfs_dir.entries, seen_offsets, )) } @@ -839,7 +827,7 @@ fn vfs_as_display_tree( // user might not have context about what's being shown let mut seen_offsets = HashSet::with_capacity(vfs.files.len()); let mut child_entries = - include_all_entries(&vfs.root_path, &vfs.root, &mut seen_offsets); + include_all_entries(&vfs.root_path, &vfs.entries, &mut seen_offsets); for child_entry in &mut child_entries { child_entry.collapse_leaf_nodes(); } @@ -961,27 +949,70 @@ impl VfsEntry { } } +#[derive(Debug, Default, Serialize, Deserialize)] +pub struct VirtualDirectoryEntries(Vec); + +impl VirtualDirectoryEntries { + pub fn new(mut entries: Vec) -> Self { + // needs to be sorted by name + entries.sort_by(|a, b| a.name().cmp(b.name())); + Self(entries) + } + + pub fn take_inner(&mut self) -> Vec { + std::mem::take(&mut self.0) + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn get_by_name(&self, name: &str) -> Option<&VfsEntry> { + self.binary_search(name).ok().map(|index| &self.0[index]) + } + + pub fn get_mut_by_name(&mut self, name: &str) -> Option<&mut VfsEntry> { + self + .binary_search(name) + .ok() + .map(|index| &mut self.0[index]) + } + + pub fn binary_search(&self, name: &str) -> Result { + self.0.binary_search_by(|e| e.name().cmp(name)) + } + + pub fn insert(&mut self, entry: VfsEntry) { + match self.binary_search(entry.name()) { + Ok(index) => { + self.0[index] = entry; + } + Err(insert_index) => { + self.0.insert(insert_index, entry); + } + } + } + + pub fn remove(&mut self, index: usize) -> VfsEntry { + self.0.remove(index) + } + + pub fn iter(&self) -> std::slice::Iter<'_, VfsEntry> { + self.0.iter() + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct VirtualDirectory { #[serde(rename = "n")] pub name: String, // should be sorted by name #[serde(rename = "e")] - pub entries: Vec, -} - -impl VirtualDirectory { - pub fn insert_entry(&mut self, entry: VfsEntry) { - let name = entry.name(); - match self.entries.binary_search_by(|e| e.name().cmp(name)) { - Ok(index) => { - self.entries[index] = entry; - } - Err(insert_index) => { - self.entries.insert(insert_index, entry); - } - } - } + pub entries: VirtualDirectoryEntries, } #[derive(Debug, Clone, Copy, Serialize, Deserialize)] @@ -1136,20 +1167,13 @@ impl VfsRoot { } }; let component = component.to_string_lossy(); - match current_dir + current_entry = current_dir .entries - .binary_search_by(|e| e.name().cmp(&component)) - { - Ok(index) => { - current_entry = current_dir.entries[index].as_ref(); - } - Err(_) => { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - "path not found", - )); - } - } + .get_by_name(&component) + .ok_or_else(|| { + std::io::Error::new(std::io::ErrorKind::NotFound, "path not found") + })? + .as_ref(); } Ok((final_path, current_entry)) @@ -1706,7 +1730,10 @@ mod test { FileBackedVfs::new( Cow::Owned(data), VfsRoot { - dir: vfs.root, + dir: VirtualDirectory { + name: "".to_string(), + entries: vfs.entries, + }, root_path: dest_path.to_path_buf(), start_file_offset: 0, }, diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 624fa76bf6..4736bdab41 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -26,6 +26,7 @@ use deno_core::serde_json; use deno_core::sourcemap::SourceMap; use deno_core::url::Url; use deno_core::LocalInspectorSession; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use node_resolver::InNpmPackageChecker; use regex::Regex; use std::fs; @@ -198,7 +199,7 @@ pub struct CoverageReport { fn generate_coverage_report( script_coverage: &cdp::ScriptCoverage, script_source: String, - maybe_source_map: &Option>, + maybe_source_map: Option<&[u8]>, output: &Option, ) -> CoverageReport { let maybe_source_map = maybe_source_map @@ -428,7 +429,7 @@ fn collect_coverages( .ignore_git_folder() .ignore_node_modules() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, file_patterns)?; + .collect_file_patterns(&FsSysTraitsAdapter::new_real(), file_patterns)?; let coverage_patterns = FilePatterns { base: initial_cwd.to_path_buf(), @@ -625,7 +626,7 @@ pub fn cover_files( let coverage_report = generate_coverage_report( &script_coverage, runtime_code.as_str().to_owned(), - &source_map, + source_map.as_deref(), &out_mode, ); diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 647a36dc48..c33b988de0 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -28,6 +28,7 @@ use deno_graph::EsParser; use deno_graph::GraphKind; use deno_graph::ModuleAnalyzer; use deno_graph::ModuleSpecifier; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use doc::html::ShortPath; use doc::DocDiagnostic; use indexmap::IndexMap; @@ -114,7 +115,7 @@ pub async fn doc( } DocSourceFileFlag::Paths(ref source_files) => { let module_graph_creator = factory.module_graph_creator().await?; - let fs = factory.fs(); + let fs = FsSysTraitsAdapter(factory.fs().clone()); let module_specifiers = collect_specifiers( FilePatterns { @@ -141,7 +142,7 @@ pub async fn doc( graph_exit_integrity_errors(&graph); let errors = graph_walk_errors( &graph, - fs, + &fs, &module_specifiers, GraphWalkErrorsOptions { check_js: false, @@ -343,14 +344,14 @@ impl deno_doc::html::HrefResolver for DocResolver { let name = &res.req().name; Some(( format!("https://www.npmjs.com/package/{name}"), - name.to_owned(), + name.to_string(), )) } "jsr" => { let res = deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?; let name = &res.req().name; - Some((format!("https://jsr.io/{name}"), name.to_owned())) + Some((format!("https://jsr.io/{name}"), name.to_string())) } _ => None, } diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index e29627345c..55046155c0 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -34,6 +34,7 @@ use deno_core::futures; use deno_core::parking_lot::Mutex; use deno_core::unsync::spawn_blocking; use deno_core::url::Url; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use log::debug; use log::info; use log::warn; @@ -230,7 +231,7 @@ fn collect_fmt_files( .ignore_node_modules() .use_gitignore() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) + .collect_file_patterns(&FsSysTraitsAdapter::new_real(), files) } /// Formats markdown (using ) and its code blocks diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 7a35f597c3..39a7a912bf 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -278,8 +278,10 @@ fn add_npm_packages_to_json( }); if let Some(pkg) = maybe_package { if let Some(module) = module.as_object_mut() { - module - .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); + module.insert( + "npmPackage".to_string(), + pkg.id.as_serialized().into_string().into(), + ); } } } @@ -296,7 +298,7 @@ fn add_npm_packages_to_json( { dep.insert( "npmPackage".to_string(), - pkg.id.as_serialized().into(), + pkg.id.as_serialized().into_string().into(), ); } } @@ -324,19 +326,19 @@ fn add_npm_packages_to_json( let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len()); for pkg in sorted_packages { let mut kv = serde_json::Map::new(); - kv.insert("name".to_string(), pkg.id.nv.name.clone().into()); + kv.insert("name".to_string(), pkg.id.nv.name.to_string().into()); kv.insert("version".to_string(), pkg.id.nv.version.to_string().into()); let mut deps = pkg.dependencies.values().collect::>(); deps.sort(); let deps = deps .into_iter() - .map(|id| serde_json::Value::String(id.as_serialized())) + .map(|id| serde_json::Value::String(id.as_serialized().into_string())) .collect::>(); kv.insert("dependencies".to_string(), deps.into()); let registry_url = npmrc.get_registry_url(&pkg.id.nv.name); kv.insert("registryUrl".to_string(), registry_url.to_string().into()); - json_packages.insert(pkg.id.as_serialized(), kv.into()); + json_packages.insert(pkg.id.as_serialized().into_string(), kv.into()); } json.insert("npmPackages".to_string(), json_packages.into()); @@ -549,7 +551,7 @@ impl<'a> GraphDisplayContext<'a> { None => Specifier(module.specifier().clone()), }; let was_seen = !self.seen.insert(match &package_or_specifier { - Package(package) => package.id.as_serialized(), + Package(package) => package.id.as_serialized().into_string(), Specifier(specifier) => specifier.to_string(), }); let header_text = if was_seen { @@ -631,7 +633,8 @@ impl<'a> GraphDisplayContext<'a> { )); if let Some(package) = self.npm_info.packages.get(dep_id) { if !package.dependencies.is_empty() { - let was_seen = !self.seen.insert(package.id.as_serialized()); + let was_seen = + !self.seen.insert(package.id.as_serialized().into_string()); if was_seen { child.text = format!("{} {}", child.text, colors::gray("*")); } else { diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index dac7340d40..ec538ecb0a 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -161,11 +161,11 @@ pub async fn infer_name_from_url( let npm_ref = npm_ref.into_inner(); if let Some(sub_path) = npm_ref.sub_path { if !sub_path.contains('/') { - return Some(sub_path); + return Some(sub_path.to_string()); } } if !npm_ref.req.name.contains('/') { - return Some(npm_ref.req.name); + return Some(npm_ref.req.name.into_string()); } return None; } diff --git a/cli/tools/lint/ast_buffer/buffer.rs b/cli/tools/lint/ast_buffer/buffer.rs new file mode 100644 index 0000000000..d162ee3de1 --- /dev/null +++ b/cli/tools/lint/ast_buffer/buffer.rs @@ -0,0 +1,519 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::fmt::Display; + +use deno_ast::swc::common::Span; +use deno_ast::swc::common::DUMMY_SP; +use indexmap::IndexMap; + +/// Each property has this flag to mark what kind of value it holds- +/// Plain objects and arrays are not supported yet, but could be easily +/// added if needed. +#[derive(Debug, PartialEq)] +pub enum PropFlags { + Ref, + RefArr, + String, + Bool, + Null, + Undefined, +} + +impl From for u8 { + fn from(m: PropFlags) -> u8 { + m as u8 + } +} + +impl TryFrom for PropFlags { + type Error = &'static str; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(PropFlags::Ref), + 1 => Ok(PropFlags::RefArr), + 2 => Ok(PropFlags::String), + 3 => Ok(PropFlags::Bool), + 4 => Ok(PropFlags::Null), + 5 => Ok(PropFlags::Undefined), + _ => Err("Unknown Prop flag"), + } + } +} + +const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000; +const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000; +const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000; +const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111; + +// TODO: There is probably a native Rust function to do this. +pub fn append_u32(result: &mut Vec, value: u32) { + let v1: u8 = ((value & MASK_U32_1) >> 24) as u8; + let v2: u8 = ((value & MASK_U32_2) >> 16) as u8; + let v3: u8 = ((value & MASK_U32_3) >> 8) as u8; + let v4: u8 = (value & MASK_U32_4) as u8; + + result.push(v1); + result.push(v2); + result.push(v3); + result.push(v4); +} + +pub fn append_usize(result: &mut Vec, value: usize) { + let raw = u32::try_from(value).unwrap(); + append_u32(result, raw); +} + +pub fn write_usize(result: &mut [u8], value: usize, idx: usize) { + let raw = u32::try_from(value).unwrap(); + + let v1: u8 = ((raw & MASK_U32_1) >> 24) as u8; + let v2: u8 = ((raw & MASK_U32_2) >> 16) as u8; + let v3: u8 = ((raw & MASK_U32_3) >> 8) as u8; + let v4: u8 = (raw & MASK_U32_4) as u8; + + result[idx] = v1; + result[idx + 1] = v2; + result[idx + 2] = v3; + result[idx + 3] = v4; +} + +#[derive(Debug)] +pub struct StringTable { + id: usize, + table: IndexMap, +} + +impl StringTable { + pub fn new() -> Self { + Self { + id: 0, + table: IndexMap::new(), + } + } + + pub fn insert(&mut self, s: &str) -> usize { + if let Some(id) = self.table.get(s) { + return *id; + } + + let id = self.id; + self.id += 1; + self.table.insert(s.to_string(), id); + id + } + + pub fn serialize(&mut self) -> Vec { + let mut result: Vec = vec![]; + append_u32(&mut result, self.table.len() as u32); + + // Assume that it's sorted by id + for (s, _id) in &self.table { + let bytes = s.as_bytes(); + append_u32(&mut result, bytes.len() as u32); + result.append(&mut bytes.to_vec()); + } + + result + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct NodeRef(pub usize); + +#[derive(Debug)] +pub struct BoolPos(pub usize); +#[derive(Debug)] +pub struct FieldPos(pub usize); +#[derive(Debug)] +pub struct FieldArrPos(pub usize); +#[derive(Debug)] +pub struct StrPos(pub usize); +#[derive(Debug)] +pub struct UndefPos(pub usize); +#[derive(Debug)] +pub struct NullPos(pub usize); + +#[derive(Debug)] +pub enum NodePos { + Bool(BoolPos), + #[allow(dead_code)] + Field(FieldPos), + #[allow(dead_code)] + FieldArr(FieldArrPos), + Str(StrPos), + Undef(UndefPos), + #[allow(dead_code)] + Null(NullPos), +} + +pub trait AstBufSerializer +where + K: Into + Display, + P: Into + Display, +{ + fn header( + &mut self, + kind: K, + parent: NodeRef, + span: &Span, + prop_count: usize, + ) -> NodeRef; + fn ref_field(&mut self, prop: P) -> FieldPos; + fn ref_vec_field(&mut self, prop: P, len: usize) -> FieldArrPos; + fn str_field(&mut self, prop: P) -> StrPos; + fn bool_field(&mut self, prop: P) -> BoolPos; + fn undefined_field(&mut self, prop: P) -> UndefPos; + #[allow(dead_code)] + fn null_field(&mut self, prop: P) -> NullPos; + + fn write_ref(&mut self, pos: FieldPos, value: NodeRef); + fn write_maybe_ref(&mut self, pos: FieldPos, value: Option); + fn write_refs(&mut self, pos: FieldArrPos, value: Vec); + fn write_str(&mut self, pos: StrPos, value: &str); + fn write_bool(&mut self, pos: BoolPos, value: bool); + + fn serialize(&mut self) -> Vec; +} + +#[derive(Debug)] +pub struct SerializeCtx { + buf: Vec, + start_buf: NodeRef, + str_table: StringTable, + kind_map: Vec, + prop_map: Vec, +} + +/// This is the internal context used to allocate and fill the buffer. The point +/// is to be able to write absolute offsets directly in place. +/// +/// The typical workflow is to reserve all necessary space for the currrent +/// node with placeholders for the offsets of the child nodes. Once child +/// nodes have been traversed, we know their offsets and can replace the +/// placeholder values with the actual ones. +impl SerializeCtx { + pub fn new(kind_len: u8, prop_len: u8) -> Self { + let kind_size = kind_len as usize; + let prop_size = prop_len as usize; + let mut ctx = Self { + start_buf: NodeRef(0), + buf: vec![], + str_table: StringTable::new(), + kind_map: vec![0; kind_size + 1], + prop_map: vec![0; prop_size + 1], + }; + + let empty_str = ctx.str_table.insert(""); + + // Placeholder node is always 0 + ctx.append_node(0, NodeRef(0), &DUMMY_SP, 0); + ctx.kind_map[0] = empty_str; + ctx.start_buf = NodeRef(ctx.buf.len()); + + // Insert default props that are always present + let type_str = ctx.str_table.insert("type"); + let parent_str = ctx.str_table.insert("parent"); + let range_str = ctx.str_table.insert("range"); + let length_str = ctx.str_table.insert("length"); + + // These values are expected to be in this order on the JS side + ctx.prop_map[0] = empty_str; + ctx.prop_map[1] = type_str; + ctx.prop_map[2] = parent_str; + ctx.prop_map[3] = range_str; + ctx.prop_map[4] = length_str; + + ctx + } + + /// Allocate a node's header + fn field_header

(&mut self, prop: P, prop_flags: PropFlags) -> usize + where + P: Into + Display + Clone, + { + let offset = self.buf.len(); + + let n: u8 = prop.clone().into(); + self.buf.push(n); + + if let Some(v) = self.prop_map.get::(n.into()) { + if *v == 0 { + let id = self.str_table.insert(&format!("{prop}")); + self.prop_map[n as usize] = id; + } + } + + let flags: u8 = prop_flags.into(); + self.buf.push(flags); + + offset + } + + /// Allocate a property pointing to another node. + fn field

(&mut self, prop: P, prop_flags: PropFlags) -> usize + where + P: Into + Display + Clone, + { + let offset = self.field_header(prop, prop_flags); + + append_usize(&mut self.buf, 0); + + offset + } + + fn append_node( + &mut self, + kind: u8, + parent: NodeRef, + span: &Span, + prop_count: usize, + ) -> NodeRef { + let offset = self.buf.len(); + + // Node type fits in a u8 + self.buf.push(kind); + + // Offset to the parent node. Will be 0 if none exists + append_usize(&mut self.buf, parent.0); + + // Span, the start and end location of this node + append_u32(&mut self.buf, span.lo.0); + append_u32(&mut self.buf, span.hi.0); + + // No node has more than <10 properties + debug_assert!(prop_count < 10); + self.buf.push(prop_count as u8); + + NodeRef(offset) + } + + /// Allocate the node header. It's always the same for every node. + /// + /// + /// + /// + /// (There is no node with more than 10 properties) + pub fn header( + &mut self, + kind: N, + parent: NodeRef, + span: &Span, + prop_count: usize, + ) -> NodeRef + where + N: Into + Display + Clone, + { + let n: u8 = kind.clone().into(); + + if let Some(v) = self.kind_map.get::(n.into()) { + if *v == 0 { + let id = self.str_table.insert(&format!("{kind}")); + self.kind_map[n as usize] = id; + } + } + + self.append_node(n, parent, span, prop_count) + } + + /// Allocate a reference property that will hold the offset of + /// another node. + pub fn ref_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field(prop, PropFlags::Ref) + } + + /// Allocate a property that is a vec of node offsets pointing to other + /// nodes. + pub fn ref_vec_field

(&mut self, prop: P, len: usize) -> usize + where + P: Into + Display + Clone, + { + let offset = self.field(prop, PropFlags::RefArr); + + for _ in 0..len { + append_u32(&mut self.buf, 0); + } + + offset + } + + // Allocate a property representing a string. Strings are deduplicated + // in the message and the property will only contain the string id. + pub fn str_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field(prop, PropFlags::String) + } + + /// Allocate a bool field + pub fn bool_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + let offset = self.field_header(prop, PropFlags::Bool); + self.buf.push(0); + offset + } + + /// Allocate an undefined field + pub fn undefined_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field_header(prop, PropFlags::Undefined) + } + + /// Allocate an undefined field + #[allow(dead_code)] + pub fn null_field

(&mut self, prop: P) -> usize + where + P: Into + Display + Clone, + { + self.field_header(prop, PropFlags::Null) + } + + /// Replace the placeholder of a reference field with the actual offset + /// to the node we want to point to. + pub fn write_ref(&mut self, field_offset: usize, value: NodeRef) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref { + panic!("Trying to write a ref into a non-ref field") + } + } + + write_usize(&mut self.buf, value.0, field_offset + 2); + } + + /// Helper for writing optional node offsets + pub fn write_maybe_ref( + &mut self, + field_offset: usize, + value: Option, + ) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref { + panic!("Trying to write a ref into a non-ref field") + } + } + + let ref_value = if let Some(v) = value { v } else { NodeRef(0) }; + write_usize(&mut self.buf, ref_value.0, field_offset + 2); + } + + /// Write a vec of node offsets into the property. The necessary space + /// has been reserved earlier. + pub fn write_refs(&mut self, field_offset: usize, value: Vec) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::RefArr { + panic!("Trying to write a ref into a non-ref array field") + } + } + + let mut offset = field_offset + 2; + write_usize(&mut self.buf, value.len(), offset); + offset += 4; + + for item in value { + write_usize(&mut self.buf, item.0, offset); + offset += 4; + } + } + + /// Store the string in our string table and save the id of the string + /// in the current field. + pub fn write_str(&mut self, field_offset: usize, value: &str) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::String { + panic!("Trying to write a ref into a non-string field") + } + } + + let id = self.str_table.insert(value); + write_usize(&mut self.buf, id, field_offset + 2); + } + + /// Write a bool to a field. + pub fn write_bool(&mut self, field_offset: usize, value: bool) { + #[cfg(debug_assertions)] + { + let value_kind = self.buf[field_offset + 1]; + if PropFlags::try_from(value_kind).unwrap() != PropFlags::Bool { + panic!("Trying to write a ref into a non-bool field") + } + } + + self.buf[field_offset + 2] = if value { 1 } else { 0 }; + } + + /// Serialize all information we have into a buffer that can be sent to JS. + /// It has the following structure: + /// + /// <...ast> + /// + /// <- node kind id maps to string id + /// <- node property id maps to string id + /// + /// + /// + pub fn serialize(&mut self) -> Vec { + let mut buf: Vec = vec![]; + + // The buffer starts with the serialized AST first, because that + // contains absolute offsets. By butting this at the start of the + // message we don't have to waste time updating any offsets. + buf.append(&mut self.buf); + + // Next follows the string table. We'll keep track of the offset + // in the message of where the string table begins + let offset_str_table = buf.len(); + + // Serialize string table + buf.append(&mut self.str_table.serialize()); + + // Next, serialize the mappings of kind -> string of encountered + // nodes in the AST. We use this additional lookup table to compress + // the message so that we can save space by using a u8 . All nodes of + // JS, TS and JSX together are <200 + let offset_kind_map = buf.len(); + + // Write the total number of entries in the kind -> str mapping table + // TODO: make this a u8 + append_usize(&mut buf, self.kind_map.len()); + for v in &self.kind_map { + append_usize(&mut buf, *v); + } + + // Store offset to prop -> string map. It's the same as with node kind + // as the total number of properties is <120 which allows us to store it + // as u8. + let offset_prop_map = buf.len(); + // Write the total number of entries in the kind -> str mapping table + append_usize(&mut buf, self.prop_map.len()); + for v in &self.prop_map { + append_usize(&mut buf, *v); + } + + // Putting offsets of relevant parts of the buffer at the end. This + // allows us to hop to the relevant part by merely looking at the last + // for values in the message. Each value represents an offset into the + // buffer. + append_usize(&mut buf, offset_kind_map); + append_usize(&mut buf, offset_prop_map); + append_usize(&mut buf, offset_str_table); + append_usize(&mut buf, self.start_buf.0); + + buf + } +} diff --git a/cli/tools/lint/ast_buffer/mod.rs b/cli/tools/lint/ast_buffer/mod.rs new file mode 100644 index 0000000000..8838bcc5f2 --- /dev/null +++ b/cli/tools/lint/ast_buffer/mod.rs @@ -0,0 +1,13 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_ast::ParsedSource; +use swc::serialize_swc_to_buffer; + +mod buffer; +mod swc; +mod ts_estree; + +pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec { + // TODO: We could support multiple languages here + serialize_swc_to_buffer(parsed_source) +} diff --git a/cli/tools/lint/ast_buffer/swc.rs b/cli/tools/lint/ast_buffer/swc.rs new file mode 100644 index 0000000000..785a38a7d8 --- /dev/null +++ b/cli/tools/lint/ast_buffer/swc.rs @@ -0,0 +1,3018 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_ast::swc::ast::AssignTarget; +use deno_ast::swc::ast::AssignTargetPat; +use deno_ast::swc::ast::BlockStmtOrExpr; +use deno_ast::swc::ast::Callee; +use deno_ast::swc::ast::ClassMember; +use deno_ast::swc::ast::Decl; +use deno_ast::swc::ast::ExportSpecifier; +use deno_ast::swc::ast::Expr; +use deno_ast::swc::ast::ExprOrSpread; +use deno_ast::swc::ast::FnExpr; +use deno_ast::swc::ast::ForHead; +use deno_ast::swc::ast::Function; +use deno_ast::swc::ast::Ident; +use deno_ast::swc::ast::IdentName; +use deno_ast::swc::ast::JSXAttrName; +use deno_ast::swc::ast::JSXAttrOrSpread; +use deno_ast::swc::ast::JSXAttrValue; +use deno_ast::swc::ast::JSXElement; +use deno_ast::swc::ast::JSXElementChild; +use deno_ast::swc::ast::JSXElementName; +use deno_ast::swc::ast::JSXEmptyExpr; +use deno_ast::swc::ast::JSXExpr; +use deno_ast::swc::ast::JSXExprContainer; +use deno_ast::swc::ast::JSXFragment; +use deno_ast::swc::ast::JSXMemberExpr; +use deno_ast::swc::ast::JSXNamespacedName; +use deno_ast::swc::ast::JSXObject; +use deno_ast::swc::ast::JSXOpeningElement; +use deno_ast::swc::ast::Lit; +use deno_ast::swc::ast::MemberExpr; +use deno_ast::swc::ast::MemberProp; +use deno_ast::swc::ast::ModuleDecl; +use deno_ast::swc::ast::ModuleExportName; +use deno_ast::swc::ast::ModuleItem; +use deno_ast::swc::ast::ObjectPatProp; +use deno_ast::swc::ast::OptChainBase; +use deno_ast::swc::ast::Param; +use deno_ast::swc::ast::ParamOrTsParamProp; +use deno_ast::swc::ast::Pat; +use deno_ast::swc::ast::PrivateName; +use deno_ast::swc::ast::Program; +use deno_ast::swc::ast::Prop; +use deno_ast::swc::ast::PropName; +use deno_ast::swc::ast::PropOrSpread; +use deno_ast::swc::ast::SimpleAssignTarget; +use deno_ast::swc::ast::Stmt; +use deno_ast::swc::ast::SuperProp; +use deno_ast::swc::ast::Tpl; +use deno_ast::swc::ast::TsEntityName; +use deno_ast::swc::ast::TsEnumMemberId; +use deno_ast::swc::ast::TsFnOrConstructorType; +use deno_ast::swc::ast::TsFnParam; +use deno_ast::swc::ast::TsIndexSignature; +use deno_ast::swc::ast::TsLit; +use deno_ast::swc::ast::TsLitType; +use deno_ast::swc::ast::TsThisTypeOrIdent; +use deno_ast::swc::ast::TsType; +use deno_ast::swc::ast::TsTypeAnn; +use deno_ast::swc::ast::TsTypeElement; +use deno_ast::swc::ast::TsTypeParam; +use deno_ast::swc::ast::TsTypeParamDecl; +use deno_ast::swc::ast::TsTypeParamInstantiation; +use deno_ast::swc::ast::TsTypeQueryExpr; +use deno_ast::swc::ast::TsUnionOrIntersectionType; +use deno_ast::swc::ast::VarDeclOrExpr; +use deno_ast::swc::common::Span; +use deno_ast::swc::common::Spanned; +use deno_ast::swc::common::SyntaxContext; +use deno_ast::view::Accessibility; +use deno_ast::view::AssignOp; +use deno_ast::view::BinaryOp; +use deno_ast::view::TruePlusMinus; +use deno_ast::view::TsKeywordTypeKind; +use deno_ast::view::TsTypeOperatorOp; +use deno_ast::view::UnaryOp; +use deno_ast::view::UpdateOp; +use deno_ast::view::VarDeclKind; +use deno_ast::ParsedSource; + +use super::buffer::AstBufSerializer; +use super::buffer::BoolPos; +use super::buffer::NodePos; +use super::buffer::NodeRef; +use super::buffer::StrPos; +use super::ts_estree::AstNode; +use super::ts_estree::AstProp; +use super::ts_estree::TsEsTreeBuilder; + +pub fn serialize_swc_to_buffer(parsed_source: &ParsedSource) -> Vec { + let mut ctx = TsEsTreeBuilder::new(); + + let program = &parsed_source.program(); + + let pos = ctx.header(AstNode::Program, NodeRef(0), &program.span(), 2); + let source_type_pos = ctx.str_field(AstProp::SourceType); + + match program.as_ref() { + Program::Module(module) => { + let body_pos = ctx.ref_vec_field(AstProp::Body, module.body.len()); + + let children = module + .body + .iter() + .map(|item| match item { + ModuleItem::ModuleDecl(module_decl) => { + serialize_module_decl(&mut ctx, module_decl, pos) + } + ModuleItem::Stmt(stmt) => serialize_stmt(&mut ctx, stmt, pos), + }) + .collect::>(); + + ctx.write_str(source_type_pos, "module"); + ctx.write_refs(body_pos, children); + } + Program::Script(script) => { + let body_pos = ctx.ref_vec_field(AstProp::Body, script.body.len()); + let children = script + .body + .iter() + .map(|stmt| serialize_stmt(&mut ctx, stmt, pos)) + .collect::>(); + + ctx.write_str(source_type_pos, "script"); + ctx.write_refs(body_pos, children); + } + } + + ctx.serialize() +} + +fn serialize_module_decl( + ctx: &mut TsEsTreeBuilder, + module_decl: &ModuleDecl, + parent: NodeRef, +) -> NodeRef { + match module_decl { + ModuleDecl::Import(node) => { + ctx.header(AstNode::ImportExpression, parent, &node.span, 0) + } + ModuleDecl::ExportDecl(node) => { + let pos = + ctx.header(AstNode::ExportNamedDeclaration, parent, &node.span, 1); + let decl_pos = ctx.ref_field(AstProp::Declarations); + + let decl = serialize_decl(ctx, &node.decl, pos); + + ctx.write_ref(decl_pos, decl); + + pos + } + ModuleDecl::ExportNamed(node) => { + let id = + ctx.header(AstNode::ExportNamedDeclaration, parent, &node.span, 2); + let src_pos = ctx.ref_field(AstProp::Source); + let spec_pos = + ctx.ref_vec_field(AstProp::Specifiers, node.specifiers.len()); + + // FIXME: Flags + // let mut flags = FlagValue::new(); + // flags.set(Flag::ExportType); + + let src_id = node + .src + .as_ref() + .map(|src| serialize_lit(ctx, &Lit::Str(*src.clone()), id)); + + let spec_ids = node + .specifiers + .iter() + .map(|spec| { + match spec { + ExportSpecifier::Named(child) => { + let spec_pos = + ctx.header(AstNode::ExportSpecifier, id, &child.span, 2); + let local_pos = ctx.ref_field(AstProp::Local); + let exp_pos = ctx.ref_field(AstProp::Exported); + + // let mut flags = FlagValue::new(); + // flags.set(Flag::ExportType); + + let local = + serialize_module_exported_name(ctx, &child.orig, spec_pos); + + let exported = child.exported.as_ref().map(|exported| { + serialize_module_exported_name(ctx, exported, spec_pos) + }); + + // ctx.write_flags(&flags); + ctx.write_ref(local_pos, local); + ctx.write_maybe_ref(exp_pos, exported); + + spec_pos + } + + // These two aren't syntactically valid + ExportSpecifier::Namespace(_) => todo!(), + ExportSpecifier::Default(_) => todo!(), + } + }) + .collect::>(); + + // ctx.write_flags(&flags); + ctx.write_maybe_ref(src_pos, src_id); + ctx.write_refs(spec_pos, spec_ids); + + id + } + ModuleDecl::ExportDefaultDecl(node) => { + ctx.header(AstNode::ExportDefaultDeclaration, parent, &node.span, 0) + } + ModuleDecl::ExportDefaultExpr(node) => { + ctx.header(AstNode::ExportDefaultDeclaration, parent, &node.span, 0) + } + ModuleDecl::ExportAll(node) => { + ctx.header(AstNode::ExportAllDeclaration, parent, &node.span, 0) + } + ModuleDecl::TsImportEquals(node) => { + ctx.header(AstNode::TsImportEquals, parent, &node.span, 0) + } + ModuleDecl::TsExportAssignment(node) => { + ctx.header(AstNode::TsExportAssignment, parent, &node.span, 0) + } + ModuleDecl::TsNamespaceExport(node) => { + ctx.header(AstNode::TsNamespaceExport, parent, &node.span, 0) + } + } +} + +fn serialize_stmt( + ctx: &mut TsEsTreeBuilder, + stmt: &Stmt, + parent: NodeRef, +) -> NodeRef { + match stmt { + Stmt::Block(node) => { + let pos = ctx.header(AstNode::BlockStatement, parent, &node.span, 1); + let body_pos = ctx.ref_vec_field(AstProp::Body, node.stmts.len()); + + let children = node + .stmts + .iter() + .map(|stmt| serialize_stmt(ctx, stmt, pos)) + .collect::>(); + + ctx.write_refs(body_pos, children); + + pos + } + Stmt::Empty(_) => NodeRef(0), + Stmt::Debugger(node) => { + ctx.header(AstNode::DebuggerStatement, parent, &node.span, 0) + } + Stmt::With(_) => todo!(), + Stmt::Return(node) => { + let pos = ctx.header(AstNode::ReturnStatement, parent, &node.span, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = node.arg.as_ref().map(|arg| serialize_expr(ctx, arg, pos)); + ctx.write_maybe_ref(arg_pos, arg); + + pos + } + Stmt::Labeled(node) => { + let pos = ctx.header(AstNode::LabeledStatement, parent, &node.span, 2); + let label_pos = ctx.ref_field(AstProp::Label); + let body_pos = ctx.ref_field(AstProp::Body); + + let ident = serialize_ident(ctx, &node.label, pos); + let stmt = serialize_stmt(ctx, &node.body, pos); + + ctx.write_ref(label_pos, ident); + ctx.write_ref(body_pos, stmt); + + pos + } + Stmt::Break(node) => { + let pos = ctx.header(AstNode::BreakStatement, parent, &node.span, 1); + let label_pos = ctx.ref_field(AstProp::Label); + + let arg = node + .label + .as_ref() + .map(|label| serialize_ident(ctx, label, pos)); + + ctx.write_maybe_ref(label_pos, arg); + + pos + } + Stmt::Continue(node) => { + let pos = ctx.header(AstNode::ContinueStatement, parent, &node.span, 1); + let label_pos = ctx.ref_field(AstProp::Label); + + let arg = node + .label + .as_ref() + .map(|label| serialize_ident(ctx, label, pos)); + + ctx.write_maybe_ref(label_pos, arg); + + pos + } + Stmt::If(node) => { + let pos = ctx.header(AstNode::IfStatement, parent, &node.span, 3); + let test_pos = ctx.ref_field(AstProp::Test); + let cons_pos = ctx.ref_field(AstProp::Consequent); + let alt_pos = ctx.ref_field(AstProp::Alternate); + + let test = serialize_expr(ctx, node.test.as_ref(), pos); + let cons = serialize_stmt(ctx, node.cons.as_ref(), pos); + let alt = node.alt.as_ref().map(|alt| serialize_stmt(ctx, alt, pos)); + + ctx.write_ref(test_pos, test); + ctx.write_ref(cons_pos, cons); + ctx.write_maybe_ref(alt_pos, alt); + + pos + } + Stmt::Switch(node) => { + let id = ctx.header(AstNode::SwitchStatement, parent, &node.span, 2); + let disc_pos = ctx.ref_field(AstProp::Discriminant); + let cases_pos = ctx.ref_vec_field(AstProp::Cases, node.cases.len()); + + let disc = serialize_expr(ctx, &node.discriminant, id); + + let cases = node + .cases + .iter() + .map(|case| { + let case_pos = ctx.header(AstNode::SwitchCase, id, &case.span, 2); + let test_pos = ctx.ref_field(AstProp::Test); + let cons_pos = + ctx.ref_vec_field(AstProp::Consequent, case.cons.len()); + + let test = case + .test + .as_ref() + .map(|test| serialize_expr(ctx, test, case_pos)); + + let cons = case + .cons + .iter() + .map(|cons| serialize_stmt(ctx, cons, case_pos)) + .collect::>(); + + ctx.write_maybe_ref(test_pos, test); + ctx.write_refs(cons_pos, cons); + + case_pos + }) + .collect::>(); + + ctx.write_ref(disc_pos, disc); + ctx.write_refs(cases_pos, cases); + + id + } + Stmt::Throw(node) => { + let pos = ctx.header(AstNode::ThrowStatement, parent, &node.span, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = serialize_expr(ctx, &node.arg, pos); + ctx.write_ref(arg_pos, arg); + + pos + } + Stmt::Try(node) => { + let pos = ctx.header(AstNode::TryStatement, parent, &node.span, 3); + let block_pos = ctx.ref_field(AstProp::Block); + let handler_pos = ctx.ref_field(AstProp::Handler); + let finalizer_pos = ctx.ref_field(AstProp::Finalizer); + + let block = serialize_stmt(ctx, &Stmt::Block(node.block.clone()), pos); + + let handler = node.handler.as_ref().map(|catch| { + let clause_pos = ctx.header(AstNode::CatchClause, pos, &catch.span, 2); + let param_pos = ctx.ref_field(AstProp::Param); + let body_pos = ctx.ref_field(AstProp::Body); + + let param = catch + .param + .as_ref() + .map(|param| serialize_pat(ctx, param, clause_pos)); + + let body = + serialize_stmt(ctx, &Stmt::Block(catch.body.clone()), clause_pos); + + ctx.write_maybe_ref(param_pos, param); + ctx.write_ref(body_pos, body); + + clause_pos + }); + + let finalizer = node.finalizer.as_ref().map(|finalizer| { + serialize_stmt(ctx, &Stmt::Block(finalizer.clone()), pos) + }); + + ctx.write_ref(block_pos, block); + ctx.write_maybe_ref(handler_pos, handler); + ctx.write_maybe_ref(finalizer_pos, finalizer); + + pos + } + Stmt::While(node) => { + let pos = ctx.header(AstNode::WhileStatement, parent, &node.span, 2); + let test_pos = ctx.ref_field(AstProp::Test); + let body_pos = ctx.ref_field(AstProp::Body); + + let test = serialize_expr(ctx, node.test.as_ref(), pos); + let stmt = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_ref(test_pos, test); + ctx.write_ref(body_pos, stmt); + + pos + } + Stmt::DoWhile(node) => { + let pos = ctx.header(AstNode::DoWhileStatement, parent, &node.span, 2); + let test_pos = ctx.ref_field(AstProp::Test); + let body_pos = ctx.ref_field(AstProp::Body); + + let expr = serialize_expr(ctx, node.test.as_ref(), pos); + let stmt = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_ref(test_pos, expr); + ctx.write_ref(body_pos, stmt); + + pos + } + Stmt::For(node) => { + let pos = ctx.header(AstNode::ForStatement, parent, &node.span, 4); + let init_pos = ctx.ref_field(AstProp::Init); + let test_pos = ctx.ref_field(AstProp::Test); + let update_pos = ctx.ref_field(AstProp::Update); + let body_pos = ctx.ref_field(AstProp::Body); + + let init = node.init.as_ref().map(|init| match init { + VarDeclOrExpr::VarDecl(var_decl) => { + serialize_stmt(ctx, &Stmt::Decl(Decl::Var(var_decl.clone())), pos) + } + VarDeclOrExpr::Expr(expr) => serialize_expr(ctx, expr, pos), + }); + + let test = node + .test + .as_ref() + .map(|expr| serialize_expr(ctx, expr, pos)); + let update = node + .update + .as_ref() + .map(|expr| serialize_expr(ctx, expr, pos)); + let body = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_maybe_ref(init_pos, init); + ctx.write_maybe_ref(test_pos, test); + ctx.write_maybe_ref(update_pos, update); + ctx.write_ref(body_pos, body); + + pos + } + Stmt::ForIn(node) => { + let pos = ctx.header(AstNode::ForInStatement, parent, &node.span, 3); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let body_pos = ctx.ref_field(AstProp::Body); + + let left = serialize_for_head(ctx, &node.left, pos); + let right = serialize_expr(ctx, node.right.as_ref(), pos); + let body = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + ctx.write_ref(body_pos, body); + + pos + } + Stmt::ForOf(node) => { + let pos = ctx.header(AstNode::ForOfStatement, parent, &node.span, 4); + let await_pos = ctx.bool_field(AstProp::Await); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + let body_pos = ctx.ref_field(AstProp::Body); + + let left = serialize_for_head(ctx, &node.left, pos); + let right = serialize_expr(ctx, node.right.as_ref(), pos); + let body = serialize_stmt(ctx, node.body.as_ref(), pos); + + ctx.write_bool(await_pos, node.is_await); + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + ctx.write_ref(body_pos, body); + + pos + } + Stmt::Decl(node) => serialize_decl(ctx, node, parent), + Stmt::Expr(node) => { + let pos = ctx.header(AstNode::ExpressionStatement, parent, &node.span, 1); + let expr_pos = ctx.ref_field(AstProp::Expression); + + let expr = serialize_expr(ctx, node.expr.as_ref(), pos); + ctx.write_ref(expr_pos, expr); + + pos + } + } +} + +fn serialize_expr( + ctx: &mut TsEsTreeBuilder, + expr: &Expr, + parent: NodeRef, +) -> NodeRef { + match expr { + Expr::This(node) => { + ctx.header(AstNode::ThisExpression, parent, &node.span, 0) + } + Expr::Array(node) => { + let pos = ctx.header(AstNode::ArrayExpression, parent, &node.span, 1); + let elems_pos = ctx.ref_vec_field(AstProp::Elements, node.elems.len()); + + let elems = node + .elems + .iter() + .map(|item| { + item + .as_ref() + .map_or(NodeRef(0), |item| serialize_expr_or_spread(ctx, item, pos)) + }) + .collect::>(); + + ctx.write_refs(elems_pos, elems); + + pos + } + Expr::Object(node) => { + let pos = ctx.header(AstNode::ObjectExpression, parent, &node.span, 1); + let props_pos = ctx.ref_vec_field(AstProp::Properties, node.props.len()); + + let prop_ids = node + .props + .iter() + .map(|prop| serialize_prop_or_spread(ctx, prop, pos)) + .collect::>(); + + ctx.write_refs(props_pos, prop_ids); + + pos + } + Expr::Fn(node) => { + let fn_obj = node.function.as_ref(); + + let pos = + ctx.header(AstNode::FunctionExpression, parent, &fn_obj.span, 7); + + let async_pos = ctx.bool_field(AstProp::Async); + let gen_pos = ctx.bool_field(AstProp::Generator); + let id_pos = ctx.ref_field(AstProp::Id); + let tparams_pos = ctx.ref_field(AstProp::TypeParameters); + let params_pos = ctx.ref_vec_field(AstProp::Params, fn_obj.params.len()); + let return_pos = ctx.ref_field(AstProp::ReturnType); + let body_pos = ctx.ref_field(AstProp::Body); + + let ident = node + .ident + .as_ref() + .map(|ident| serialize_ident(ctx, ident, pos)); + + let type_params = + maybe_serialize_ts_type_param(ctx, &fn_obj.type_params, pos); + + let params = fn_obj + .params + .iter() + .map(|param| serialize_pat(ctx, ¶m.pat, pos)) + .collect::>(); + + let return_id = + maybe_serialize_ts_type_ann(ctx, &fn_obj.return_type, pos); + let body = fn_obj + .body + .as_ref() + .map(|block| serialize_stmt(ctx, &Stmt::Block(block.clone()), pos)); + + ctx.write_bool(async_pos, fn_obj.is_async); + ctx.write_bool(gen_pos, fn_obj.is_generator); + ctx.write_maybe_ref(id_pos, ident); + ctx.write_maybe_ref(tparams_pos, type_params); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(return_pos, return_id); + ctx.write_maybe_ref(body_pos, body); + + pos + } + Expr::Unary(node) => { + let pos = ctx.header(AstNode::UnaryExpression, parent, &node.span, 2); + let flag_pos = ctx.str_field(AstProp::Operator); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = serialize_expr(ctx, &node.arg, pos); + + ctx.write_str( + flag_pos, + match node.op { + UnaryOp::Minus => "-", + UnaryOp::Plus => "+", + UnaryOp::Bang => "!", + UnaryOp::Tilde => "~", + UnaryOp::TypeOf => "typeof", + UnaryOp::Void => "void", + UnaryOp::Delete => "delete", + }, + ); + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::Update(node) => { + let pos = ctx.header(AstNode::UpdateExpression, parent, &node.span, 3); + let prefix_pos = ctx.bool_field(AstProp::Prefix); + let arg_pos = ctx.ref_field(AstProp::Argument); + let op_ops = ctx.str_field(AstProp::Operator); + + let arg = serialize_expr(ctx, node.arg.as_ref(), pos); + + ctx.write_bool(prefix_pos, node.prefix); + ctx.write_ref(arg_pos, arg); + ctx.write_str( + op_ops, + match node.op { + UpdateOp::PlusPlus => "++", + UpdateOp::MinusMinus => "--", + }, + ); + + pos + } + Expr::Bin(node) => { + let (node_type, flag_str) = match node.op { + BinaryOp::LogicalAnd => (AstNode::LogicalExpression, "&&"), + BinaryOp::LogicalOr => (AstNode::LogicalExpression, "||"), + BinaryOp::NullishCoalescing => (AstNode::LogicalExpression, "??"), + BinaryOp::EqEq => (AstNode::BinaryExpression, "=="), + BinaryOp::NotEq => (AstNode::BinaryExpression, "!="), + BinaryOp::EqEqEq => (AstNode::BinaryExpression, "==="), + BinaryOp::NotEqEq => (AstNode::BinaryExpression, "!="), + BinaryOp::Lt => (AstNode::BinaryExpression, "<"), + BinaryOp::LtEq => (AstNode::BinaryExpression, "<="), + BinaryOp::Gt => (AstNode::BinaryExpression, ">"), + BinaryOp::GtEq => (AstNode::BinaryExpression, ">="), + BinaryOp::LShift => (AstNode::BinaryExpression, "<<"), + BinaryOp::RShift => (AstNode::BinaryExpression, ">>"), + BinaryOp::ZeroFillRShift => (AstNode::BinaryExpression, ">>>"), + BinaryOp::Add => (AstNode::BinaryExpression, "+"), + BinaryOp::Sub => (AstNode::BinaryExpression, "-"), + BinaryOp::Mul => (AstNode::BinaryExpression, "*"), + BinaryOp::Div => (AstNode::BinaryExpression, "/"), + BinaryOp::Mod => (AstNode::BinaryExpression, "%"), + BinaryOp::BitOr => (AstNode::BinaryExpression, "|"), + BinaryOp::BitXor => (AstNode::BinaryExpression, "^"), + BinaryOp::BitAnd => (AstNode::BinaryExpression, "&"), + BinaryOp::In => (AstNode::BinaryExpression, "in"), + BinaryOp::InstanceOf => (AstNode::BinaryExpression, "instanceof"), + BinaryOp::Exp => (AstNode::BinaryExpression, "**"), + }; + + let pos = ctx.header(node_type, parent, &node.span, 3); + let op_pos = ctx.str_field(AstProp::Operator); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + + let left_id = serialize_expr(ctx, node.left.as_ref(), pos); + let right_id = serialize_expr(ctx, node.right.as_ref(), pos); + + ctx.write_str(op_pos, flag_str); + ctx.write_ref(left_pos, left_id); + ctx.write_ref(right_pos, right_id); + + pos + } + Expr::Assign(node) => { + let pos = + ctx.header(AstNode::AssignmentExpression, parent, &node.span, 3); + let op_pos = ctx.str_field(AstProp::Operator); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + + let left = match &node.left { + AssignTarget::Simple(simple_assign_target) => { + match simple_assign_target { + SimpleAssignTarget::Ident(target) => { + serialize_ident(ctx, &target.id, pos) + } + SimpleAssignTarget::Member(target) => { + serialize_expr(ctx, &Expr::Member(target.clone()), pos) + } + SimpleAssignTarget::SuperProp(target) => { + serialize_expr(ctx, &Expr::SuperProp(target.clone()), pos) + } + SimpleAssignTarget::Paren(target) => { + serialize_expr(ctx, &target.expr, pos) + } + SimpleAssignTarget::OptChain(target) => { + serialize_expr(ctx, &Expr::OptChain(target.clone()), pos) + } + SimpleAssignTarget::TsAs(target) => { + serialize_expr(ctx, &Expr::TsAs(target.clone()), pos) + } + SimpleAssignTarget::TsSatisfies(target) => { + serialize_expr(ctx, &Expr::TsSatisfies(target.clone()), pos) + } + SimpleAssignTarget::TsNonNull(target) => { + serialize_expr(ctx, &Expr::TsNonNull(target.clone()), pos) + } + SimpleAssignTarget::TsTypeAssertion(target) => { + serialize_expr(ctx, &Expr::TsTypeAssertion(target.clone()), pos) + } + SimpleAssignTarget::TsInstantiation(target) => { + serialize_expr(ctx, &Expr::TsInstantiation(target.clone()), pos) + } + SimpleAssignTarget::Invalid(_) => unreachable!(), + } + } + AssignTarget::Pat(target) => match target { + AssignTargetPat::Array(array_pat) => { + serialize_pat(ctx, &Pat::Array(array_pat.clone()), pos) + } + AssignTargetPat::Object(object_pat) => { + serialize_pat(ctx, &Pat::Object(object_pat.clone()), pos) + } + AssignTargetPat::Invalid(_) => unreachable!(), + }, + }; + + let right = serialize_expr(ctx, node.right.as_ref(), pos); + + ctx.write_str( + op_pos, + match node.op { + AssignOp::Assign => "=", + AssignOp::AddAssign => "+=", + AssignOp::SubAssign => "-=", + AssignOp::MulAssign => "*=", + AssignOp::DivAssign => "/=", + AssignOp::ModAssign => "%=", + AssignOp::LShiftAssign => "<<=", + AssignOp::RShiftAssign => ">>=", + AssignOp::ZeroFillRShiftAssign => ">>>=", + AssignOp::BitOrAssign => "|=", + AssignOp::BitXorAssign => "^=", + AssignOp::BitAndAssign => "&=", + AssignOp::ExpAssign => "**=", + AssignOp::AndAssign => "&&=", + AssignOp::OrAssign => "||=", + AssignOp::NullishAssign => "??=", + }, + ); + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + + pos + } + Expr::Member(node) => serialize_member_expr(ctx, node, parent, false), + Expr::SuperProp(node) => { + let pos = ctx.header(AstNode::MemberExpression, parent, &node.span, 3); + let computed_pos = ctx.bool_field(AstProp::Computed); + let obj_pos = ctx.ref_field(AstProp::Object); + let prop_pos = ctx.ref_field(AstProp::Property); + + let obj = ctx.header(AstNode::Super, pos, &node.obj.span, 0); + + let mut computed = false; + let prop = match &node.prop { + SuperProp::Ident(ident_name) => { + serialize_ident_name(ctx, ident_name, pos) + } + SuperProp::Computed(prop) => { + computed = true; + serialize_expr(ctx, &prop.expr, pos) + } + }; + + ctx.write_bool(computed_pos, computed); + ctx.write_ref(obj_pos, obj); + ctx.write_ref(prop_pos, prop); + + pos + } + Expr::Cond(node) => { + let pos = + ctx.header(AstNode::ConditionalExpression, parent, &node.span, 3); + let test_pos = ctx.ref_field(AstProp::Test); + let cons_pos = ctx.ref_field(AstProp::Consequent); + let alt_pos = ctx.ref_field(AstProp::Alternate); + + let test = serialize_expr(ctx, node.test.as_ref(), pos); + let cons = serialize_expr(ctx, node.cons.as_ref(), pos); + let alt = serialize_expr(ctx, node.alt.as_ref(), pos); + + ctx.write_ref(test_pos, test); + ctx.write_ref(cons_pos, cons); + ctx.write_ref(alt_pos, alt); + + pos + } + Expr::Call(node) => { + let pos = ctx.header(AstNode::CallExpression, parent, &node.span, 4); + let opt_pos = ctx.bool_field(AstProp::Optional); + let callee_pos = ctx.ref_field(AstProp::Callee); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let args_pos = ctx.ref_vec_field(AstProp::Arguments, node.args.len()); + + let callee = match &node.callee { + Callee::Super(super_node) => { + ctx.header(AstNode::Super, pos, &super_node.span, 0) + } + Callee::Import(_) => todo!(), + Callee::Expr(expr) => serialize_expr(ctx, expr, pos), + }; + + let type_arg = node.type_args.clone().map(|param_node| { + serialize_ts_param_inst(ctx, param_node.as_ref(), pos) + }); + + let args = node + .args + .iter() + .map(|arg| serialize_expr_or_spread(ctx, arg, pos)) + .collect::>(); + + ctx.write_bool(opt_pos, false); + ctx.write_ref(callee_pos, callee); + ctx.write_maybe_ref(type_args_pos, type_arg); + ctx.write_refs(args_pos, args); + + pos + } + Expr::New(node) => { + let pos = ctx.header(AstNode::NewExpression, parent, &node.span, 3); + let callee_pos = ctx.ref_field(AstProp::Callee); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let args_pos = ctx.ref_vec_field( + AstProp::Arguments, + node.args.as_ref().map_or(0, |v| v.len()), + ); + + let callee = serialize_expr(ctx, node.callee.as_ref(), pos); + + let args: Vec = node.args.as_ref().map_or(vec![], |args| { + args + .iter() + .map(|arg| serialize_expr_or_spread(ctx, arg, pos)) + .collect::>() + }); + + let type_args = node.type_args.clone().map(|param_node| { + serialize_ts_param_inst(ctx, param_node.as_ref(), pos) + }); + + ctx.write_ref(callee_pos, callee); + ctx.write_maybe_ref(type_args_pos, type_args); + ctx.write_refs(args_pos, args); + + pos + } + Expr::Seq(node) => { + let pos = ctx.header(AstNode::SequenceExpression, parent, &node.span, 1); + let exprs_pos = ctx.ref_vec_field(AstProp::Expressions, node.exprs.len()); + + let children = node + .exprs + .iter() + .map(|expr| serialize_expr(ctx, expr, pos)) + .collect::>(); + + ctx.write_refs(exprs_pos, children); + + pos + } + Expr::Ident(node) => serialize_ident(ctx, node, parent), + Expr::Lit(node) => serialize_lit(ctx, node, parent), + Expr::Tpl(node) => { + let pos = ctx.header(AstNode::TemplateLiteral, parent, &node.span, 2); + let quasis_pos = ctx.ref_vec_field(AstProp::Quasis, node.quasis.len()); + let exprs_pos = ctx.ref_vec_field(AstProp::Expressions, node.exprs.len()); + + let quasis = node + .quasis + .iter() + .map(|quasi| { + let tpl_pos = + ctx.header(AstNode::TemplateElement, pos, &quasi.span, 3); + let tail_pos = ctx.bool_field(AstProp::Tail); + let raw_pos = ctx.str_field(AstProp::Raw); + let cooked_pos = ctx.str_field(AstProp::Cooked); + + ctx.write_bool(tail_pos, quasi.tail); + ctx.write_str(raw_pos, &quasi.raw); + ctx.write_str( + cooked_pos, + &quasi + .cooked + .as_ref() + .map_or("".to_string(), |v| v.to_string()), + ); + + tpl_pos + }) + .collect::>(); + + let exprs = node + .exprs + .iter() + .map(|expr| serialize_expr(ctx, expr, pos)) + .collect::>(); + + ctx.write_refs(quasis_pos, quasis); + ctx.write_refs(exprs_pos, exprs); + + pos + } + Expr::TaggedTpl(node) => { + let pos = + ctx.header(AstNode::TaggedTemplateExpression, parent, &node.span, 3); + let tag_pos = ctx.ref_field(AstProp::Tag); + let type_arg_pos = ctx.ref_field(AstProp::TypeArguments); + let quasi_pos = ctx.ref_field(AstProp::Quasi); + + let tag = serialize_expr(ctx, &node.tag, pos); + + let type_param_id = node + .type_params + .clone() + .map(|params| serialize_ts_param_inst(ctx, params.as_ref(), pos)); + let quasi = serialize_expr(ctx, &Expr::Tpl(*node.tpl.clone()), pos); + + ctx.write_ref(tag_pos, tag); + ctx.write_maybe_ref(type_arg_pos, type_param_id); + ctx.write_ref(quasi_pos, quasi); + + pos + } + Expr::Arrow(node) => { + let pos = + ctx.header(AstNode::ArrowFunctionExpression, parent, &node.span, 6); + let async_pos = ctx.bool_field(AstProp::Async); + let gen_pos = ctx.bool_field(AstProp::Generator); + let type_param_pos = ctx.ref_field(AstProp::TypeParameters); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let body_pos = ctx.ref_field(AstProp::Body); + let return_type_pos = ctx.ref_field(AstProp::ReturnType); + + let type_param = + maybe_serialize_ts_type_param(ctx, &node.type_params, pos); + + let params = node + .params + .iter() + .map(|param| serialize_pat(ctx, param, pos)) + .collect::>(); + + let body = match node.body.as_ref() { + BlockStmtOrExpr::BlockStmt(block_stmt) => { + serialize_stmt(ctx, &Stmt::Block(block_stmt.clone()), pos) + } + BlockStmtOrExpr::Expr(expr) => serialize_expr(ctx, expr.as_ref(), pos), + }; + + let return_type = + maybe_serialize_ts_type_ann(ctx, &node.return_type, pos); + + ctx.write_bool(async_pos, node.is_async); + ctx.write_bool(gen_pos, node.is_generator); + ctx.write_maybe_ref(type_param_pos, type_param); + ctx.write_refs(params_pos, params); + ctx.write_ref(body_pos, body); + ctx.write_maybe_ref(return_type_pos, return_type); + + pos + } + Expr::Class(node) => { + // FIXME + ctx.header(AstNode::ClassExpression, parent, &node.class.span, 0) + } + Expr::Yield(node) => { + let pos = ctx.header(AstNode::YieldExpression, parent, &node.span, 2); + let delegate_pos = ctx.bool_field(AstProp::Delegate); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = node + .arg + .as_ref() + .map(|arg| serialize_expr(ctx, arg.as_ref(), pos)); + + ctx.write_bool(delegate_pos, node.delegate); + ctx.write_maybe_ref(arg_pos, arg); + + pos + } + Expr::MetaProp(node) => { + ctx.header(AstNode::MetaProp, parent, &node.span, 0) + } + Expr::Await(node) => { + let pos = ctx.header(AstNode::AwaitExpression, parent, &node.span, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = serialize_expr(ctx, node.arg.as_ref(), pos); + + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::Paren(node) => { + // Paren nodes are treated as a syntax only thing in TSEStree + // and are never materialized to actual AST nodes. + serialize_expr(ctx, &node.expr, parent) + } + Expr::JSXMember(node) => serialize_jsx_member_expr(ctx, node, parent), + Expr::JSXNamespacedName(node) => { + serialize_jsx_namespaced_name(ctx, node, parent) + } + Expr::JSXEmpty(node) => serialize_jsx_empty_expr(ctx, node, parent), + Expr::JSXElement(node) => serialize_jsx_element(ctx, node, parent), + Expr::JSXFragment(node) => serialize_jsx_fragment(ctx, node, parent), + Expr::TsTypeAssertion(node) => { + let pos = ctx.header(AstNode::TSTypeAssertion, parent, &node.span, 2); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let expr = serialize_expr(ctx, &node.expr, parent); + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Expr::TsConstAssertion(node) => { + let pos = ctx.header(AstNode::TsConstAssertion, parent, &node.span, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + let arg = serialize_expr(ctx, node.expr.as_ref(), pos); + + // FIXME + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::TsNonNull(node) => { + let pos = ctx.header(AstNode::TSNonNullExpression, parent, &node.span, 1); + let expr_pos = ctx.ref_field(AstProp::Expression); + + let expr_id = serialize_expr(ctx, node.expr.as_ref(), pos); + + ctx.write_ref(expr_pos, expr_id); + + pos + } + Expr::TsAs(node) => { + let id = ctx.header(AstNode::TSAsExpression, parent, &node.span, 2); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let expr = serialize_expr(ctx, node.expr.as_ref(), id); + let type_ann = serialize_ts_type(ctx, node.type_ann.as_ref(), id); + + ctx.write_ref(expr_pos, expr); + ctx.write_ref(type_ann_pos, type_ann); + + id + } + Expr::TsInstantiation(node) => { + let pos = ctx.header(AstNode::TsInstantiation, parent, &node.span, 1); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + + let expr = serialize_expr(ctx, node.expr.as_ref(), pos); + + let type_arg = serialize_ts_param_inst(ctx, node.type_args.as_ref(), pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_ref(type_args_pos, type_arg); + + pos + } + Expr::TsSatisfies(node) => { + let pos = + ctx.header(AstNode::TSSatisfiesExpression, parent, &node.span, 2); + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let epxr = serialize_expr(ctx, node.expr.as_ref(), pos); + let type_ann = serialize_ts_type(ctx, node.type_ann.as_ref(), pos); + + ctx.write_ref(expr_pos, epxr); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Expr::PrivateName(node) => serialize_private_name(ctx, node, parent), + Expr::OptChain(node) => { + let pos = ctx.header(AstNode::ChainExpression, parent, &node.span, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = match node.base.as_ref() { + OptChainBase::Member(member_expr) => { + serialize_member_expr(ctx, member_expr, pos, true) + } + OptChainBase::Call(opt_call) => { + let call_pos = + ctx.header(AstNode::CallExpression, pos, &opt_call.span, 4); + let opt_pos = ctx.bool_field(AstProp::Optional); + let callee_pos = ctx.ref_field(AstProp::Callee); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let args_pos = + ctx.ref_vec_field(AstProp::Arguments, opt_call.args.len()); + + let callee = serialize_expr(ctx, &opt_call.callee, pos); + + let type_param_id = opt_call.type_args.clone().map(|params| { + serialize_ts_param_inst(ctx, params.as_ref(), call_pos) + }); + + let args = opt_call + .args + .iter() + .map(|arg| serialize_expr_or_spread(ctx, arg, pos)) + .collect::>(); + + ctx.write_bool(opt_pos, true); + ctx.write_ref(callee_pos, callee); + ctx.write_maybe_ref(type_args_pos, type_param_id); + ctx.write_refs(args_pos, args); + + call_pos + } + }; + + ctx.write_ref(arg_pos, arg); + + pos + } + Expr::Invalid(_) => { + unreachable!() + } + } +} + +fn serialize_prop_or_spread( + ctx: &mut TsEsTreeBuilder, + prop: &PropOrSpread, + parent: NodeRef, +) -> NodeRef { + match prop { + PropOrSpread::Spread(spread_element) => serialize_spread( + ctx, + spread_element.expr.as_ref(), + &spread_element.dot3_token, + parent, + ), + PropOrSpread::Prop(prop) => { + let pos = ctx.header(AstNode::Property, parent, &prop.span(), 6); + + let shorthand_pos = ctx.bool_field(AstProp::Shorthand); + let computed_pos = ctx.bool_field(AstProp::Computed); + let method_pos = ctx.bool_field(AstProp::Method); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let value_pos = ctx.ref_field(AstProp::Value); + + let mut shorthand = false; + let mut computed = false; + let mut method = false; + let mut kind = "init"; + + // FIXME: optional + let (key_id, value_id) = match prop.as_ref() { + Prop::Shorthand(ident) => { + shorthand = true; + + let value = serialize_ident(ctx, ident, pos); + (value, value) + } + Prop::KeyValue(key_value_prop) => { + if let PropName::Computed(_) = key_value_prop.key { + computed = true; + } + + let key = serialize_prop_name(ctx, &key_value_prop.key, pos); + let value = serialize_expr(ctx, key_value_prop.value.as_ref(), pos); + + (key, value) + } + Prop::Assign(assign_prop) => { + let child_id = + ctx.header(AstNode::AssignmentPattern, pos, &assign_prop.span, 2); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + + let left = serialize_ident(ctx, &assign_prop.key, child_id); + let right = serialize_expr(ctx, assign_prop.value.as_ref(), child_id); + + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + + (left, child_id) + } + Prop::Getter(getter_prop) => { + kind = "get"; + + let key = serialize_prop_name(ctx, &getter_prop.key, pos); + + let value = serialize_expr( + ctx, + &Expr::Fn(FnExpr { + ident: None, + function: Box::new(Function { + params: vec![], + decorators: vec![], + span: getter_prop.span, + ctxt: SyntaxContext::empty(), + body: getter_prop.body.clone(), + is_generator: false, + is_async: false, + type_params: None, // FIXME + return_type: None, + }), + }), + pos, + ); + + (key, value) + } + Prop::Setter(setter_prop) => { + kind = "set"; + + let key_id = serialize_prop_name(ctx, &setter_prop.key, pos); + + let param = Param::from(*setter_prop.param.clone()); + + let value_id = serialize_expr( + ctx, + &Expr::Fn(FnExpr { + ident: None, + function: Box::new(Function { + params: vec![param], + decorators: vec![], + span: setter_prop.span, + ctxt: SyntaxContext::empty(), + body: setter_prop.body.clone(), + is_generator: false, + is_async: false, + type_params: None, + return_type: None, + }), + }), + pos, + ); + + (key_id, value_id) + } + Prop::Method(method_prop) => { + method = true; + + let key_id = serialize_prop_name(ctx, &method_prop.key, pos); + + let value_id = serialize_expr( + ctx, + &Expr::Fn(FnExpr { + ident: None, + function: method_prop.function.clone(), + }), + pos, + ); + + (key_id, value_id) + } + }; + + ctx.write_bool(shorthand_pos, shorthand); + ctx.write_bool(computed_pos, computed); + ctx.write_bool(method_pos, method); + ctx.write_str(kind_pos, kind); + ctx.write_ref(key_pos, key_id); + ctx.write_ref(value_pos, value_id); + + pos + } + } +} + +fn serialize_member_expr( + ctx: &mut TsEsTreeBuilder, + node: &MemberExpr, + parent: NodeRef, + optional: bool, +) -> NodeRef { + let pos = ctx.header(AstNode::MemberExpression, parent, &node.span, 4); + let opt_pos = ctx.bool_field(AstProp::Optional); + let computed_pos = ctx.bool_field(AstProp::Computed); + let obj_pos = ctx.ref_field(AstProp::Object); + let prop_pos = ctx.ref_field(AstProp::Property); + + let obj = serialize_expr(ctx, node.obj.as_ref(), pos); + + let mut computed = false; + + let prop = match &node.prop { + MemberProp::Ident(ident_name) => serialize_ident_name(ctx, ident_name, pos), + MemberProp::PrivateName(private_name) => { + serialize_private_name(ctx, private_name, pos) + } + MemberProp::Computed(computed_prop_name) => { + computed = true; + serialize_expr(ctx, computed_prop_name.expr.as_ref(), pos) + } + }; + + ctx.write_bool(opt_pos, optional); + ctx.write_bool(computed_pos, computed); + ctx.write_ref(obj_pos, obj); + ctx.write_ref(prop_pos, prop); + + pos +} + +fn serialize_class_member( + ctx: &mut TsEsTreeBuilder, + member: &ClassMember, + parent: NodeRef, +) -> NodeRef { + match member { + ClassMember::Constructor(constructor) => { + let member_id = + ctx.header(AstNode::MethodDefinition, parent, &constructor.span, 3); + let key_pos = ctx.ref_field(AstProp::Key); + let body_pos = ctx.ref_field(AstProp::Body); + let args_pos = + ctx.ref_vec_field(AstProp::Arguments, constructor.params.len()); + let acc_pos = if constructor.accessibility.is_some() { + NodePos::Str(ctx.str_field(AstProp::Accessibility)) + } else { + NodePos::Undef(ctx.undefined_field(AstProp::Accessibility)) + }; + + // FIXME flags + + let key = serialize_prop_name(ctx, &constructor.key, member_id); + let body = constructor + .body + .as_ref() + .map(|body| serialize_stmt(ctx, &Stmt::Block(body.clone()), member_id)); + + let params = constructor + .params + .iter() + .map(|param| match param { + ParamOrTsParamProp::TsParamProp(_) => { + todo!() + } + ParamOrTsParamProp::Param(param) => { + serialize_pat(ctx, ¶m.pat, member_id) + } + }) + .collect::>(); + + if let Some(acc) = constructor.accessibility { + if let NodePos::Str(str_pos) = acc_pos { + ctx.write_str(str_pos, &accessibility_to_str(acc)); + } + } + + ctx.write_ref(key_pos, key); + ctx.write_maybe_ref(body_pos, body); + // FIXME + ctx.write_refs(args_pos, params); + + member_id + } + ClassMember::Method(method) => { + let member_id = + ctx.header(AstNode::MethodDefinition, parent, &method.span, 0); + + // let mut flags = FlagValue::new(); + // flags.set(Flag::ClassMethod); + if method.function.is_async { + // FIXME + } + + // accessibility_to_flag(&mut flags, method.accessibility); + + let _key_id = serialize_prop_name(ctx, &method.key, member_id); + + let _body_id = + method.function.body.as_ref().map(|body| { + serialize_stmt(ctx, &Stmt::Block(body.clone()), member_id) + }); + + let _params = method + .function + .params + .iter() + .map(|param| serialize_pat(ctx, ¶m.pat, member_id)) + .collect::>(); + + // ctx.write_node(member_id, ); + // ctx.write_flags(&flags); + // ctx.write_id(key_id); + // ctx.write_id(body_id); + // ctx.write_ids(AstProp::Params, params); + + member_id + } + ClassMember::PrivateMethod(_) => todo!(), + ClassMember::ClassProp(_) => todo!(), + ClassMember::PrivateProp(_) => todo!(), + ClassMember::TsIndexSignature(member) => { + serialize_ts_index_sig(ctx, member, parent) + } + ClassMember::Empty(_) => unreachable!(), + ClassMember::StaticBlock(_) => todo!(), + ClassMember::AutoAccessor(_) => todo!(), + } +} + +fn serialize_expr_or_spread( + ctx: &mut TsEsTreeBuilder, + arg: &ExprOrSpread, + parent: NodeRef, +) -> NodeRef { + if let Some(spread) = &arg.spread { + serialize_spread(ctx, &arg.expr, spread, parent) + } else { + serialize_expr(ctx, arg.expr.as_ref(), parent) + } +} + +fn serialize_ident( + ctx: &mut TsEsTreeBuilder, + ident: &Ident, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::Identifier, parent, &ident.span, 1); + let name_pos = ctx.str_field(AstProp::Name); + ctx.write_str(name_pos, ident.sym.as_str()); + + pos +} + +fn serialize_module_exported_name( + ctx: &mut TsEsTreeBuilder, + name: &ModuleExportName, + parent: NodeRef, +) -> NodeRef { + match &name { + ModuleExportName::Ident(ident) => serialize_ident(ctx, ident, parent), + ModuleExportName::Str(lit) => { + serialize_lit(ctx, &Lit::Str(lit.clone()), parent) + } + } +} + +fn serialize_decl( + ctx: &mut TsEsTreeBuilder, + decl: &Decl, + parent: NodeRef, +) -> NodeRef { + match decl { + Decl::Class(node) => { + let id = + ctx.header(AstNode::ClassDeclaration, parent, &node.class.span, 8); + let declare_pos = ctx.bool_field(AstProp::Declare); + let abstract_pos = ctx.bool_field(AstProp::Abstract); + let id_pos = ctx.ref_field(AstProp::Id); + let body_pos = ctx.ref_field(AstProp::Body); + let type_params_pos = ctx.ref_field(AstProp::TypeParameters); + let super_pos = ctx.ref_field(AstProp::SuperClass); + let super_type_pos = ctx.ref_field(AstProp::SuperTypeArguments); + let impl_pos = + ctx.ref_vec_field(AstProp::Implements, node.class.implements.len()); + + let body_id = ctx.header(AstNode::ClassBody, id, &node.class.span, 1); + let body_body_pos = + ctx.ref_vec_field(AstProp::Body, node.class.body.len()); + + let ident = serialize_ident(ctx, &node.ident, id); + let type_params = + maybe_serialize_ts_type_param(ctx, &node.class.type_params, id); + + let super_class = node + .class + .super_class + .as_ref() + .map(|super_class| serialize_expr(ctx, super_class, id)); + + let super_type_params = node + .class + .super_type_params + .as_ref() + .map(|super_params| serialize_ts_param_inst(ctx, super_params, id)); + + let implement_ids = node + .class + .implements + .iter() + .map(|implements| { + let child_pos = + ctx.header(AstNode::TSClassImplements, id, &implements.span, 2); + + let expr_pos = ctx.ref_field(AstProp::Expression); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + + let type_args = implements + .type_args + .clone() + .map(|args| serialize_ts_param_inst(ctx, &args, child_pos)); + + let expr = serialize_expr(ctx, &implements.expr, child_pos); + + ctx.write_ref(expr_pos, expr); + ctx.write_maybe_ref(type_args_pos, type_args); + + child_pos + }) + .collect::>(); + + let member_ids = node + .class + .body + .iter() + .map(|member| serialize_class_member(ctx, member, parent)) + .collect::>(); + + ctx.write_ref(body_pos, body_id); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_bool(abstract_pos, node.class.is_abstract); + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(type_params_pos, type_params); + ctx.write_maybe_ref(super_pos, super_class); + ctx.write_maybe_ref(super_type_pos, super_type_params); + ctx.write_refs(impl_pos, implement_ids); + + // body + ctx.write_refs(body_body_pos, member_ids); + + id + } + Decl::Fn(node) => { + let pos = ctx.header( + AstNode::FunctionDeclaration, + parent, + &node.function.span, + 8, + ); + let declare_pos = ctx.bool_field(AstProp::Declare); + let async_pos = ctx.bool_field(AstProp::Async); + let gen_pos = ctx.bool_field(AstProp::Generator); + let id_pos = ctx.ref_field(AstProp::Id); + let type_params_pos = ctx.ref_field(AstProp::TypeParameters); + let return_pos = ctx.ref_field(AstProp::ReturnType); + let body_pos = ctx.ref_field(AstProp::Body); + let params_pos = + ctx.ref_vec_field(AstProp::Params, node.function.params.len()); + + let ident_id = serialize_ident(ctx, &node.ident, parent); + let type_param_id = + maybe_serialize_ts_type_param(ctx, &node.function.type_params, pos); + let return_type = + maybe_serialize_ts_type_ann(ctx, &node.function.return_type, pos); + + let body = node + .function + .body + .as_ref() + .map(|body| serialize_stmt(ctx, &Stmt::Block(body.clone()), pos)); + + let params = node + .function + .params + .iter() + .map(|param| serialize_pat(ctx, ¶m.pat, pos)) + .collect::>(); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_bool(async_pos, node.function.is_async); + ctx.write_bool(gen_pos, node.function.is_generator); + ctx.write_ref(id_pos, ident_id); + ctx.write_maybe_ref(type_params_pos, type_param_id); + ctx.write_maybe_ref(return_pos, return_type); + ctx.write_maybe_ref(body_pos, body); + ctx.write_refs(params_pos, params); + + pos + } + Decl::Var(node) => { + let id = ctx.header(AstNode::VariableDeclaration, parent, &node.span, 3); + let declare_pos = ctx.bool_field(AstProp::Declare); + let kind_pos = ctx.str_field(AstProp::Kind); + let decls_pos = + ctx.ref_vec_field(AstProp::Declarations, node.decls.len()); + + let children = node + .decls + .iter() + .map(|decl| { + let child_id = + ctx.header(AstNode::VariableDeclarator, id, &decl.span, 2); + let id_pos = ctx.ref_field(AstProp::Id); + let init_pos = ctx.ref_field(AstProp::Init); + + // FIXME: Definite? + + let ident = serialize_pat(ctx, &decl.name, child_id); + + let init = decl + .init + .as_ref() + .map(|init| serialize_expr(ctx, init.as_ref(), child_id)); + + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(init_pos, init); + + child_id + }) + .collect::>(); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_str( + kind_pos, + match node.kind { + VarDeclKind::Var => "var", + VarDeclKind::Let => "let", + VarDeclKind::Const => "const", + }, + ); + ctx.write_refs(decls_pos, children); + + id + } + Decl::Using(_) => { + todo!(); + } + Decl::TsInterface(node) => { + let pos = ctx.header(AstNode::TSInterface, parent, &node.span, 0); + let declare_pos = ctx.bool_field(AstProp::Declare); + let id_pos = ctx.ref_field(AstProp::Id); + let extends_pos = ctx.ref_vec_field(AstProp::Extends, node.extends.len()); + let type_param_pos = ctx.ref_field(AstProp::TypeParameters); + let body_pos = ctx.ref_field(AstProp::Body); + + let body_id = + ctx.header(AstNode::TSInterfaceBody, pos, &node.body.span, 0); + let body_body_pos = + ctx.ref_vec_field(AstProp::Body, node.body.body.len()); + + let ident_id = serialize_ident(ctx, &node.id, pos); + let type_param = + maybe_serialize_ts_type_param(ctx, &node.type_params, pos); + + let extend_ids = node + .extends + .iter() + .map(|item| { + let child_pos = + ctx.header(AstNode::TSInterfaceHeritage, pos, &item.span, 1); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let expr_pos = ctx.ref_field(AstProp::Expression); + + let expr = serialize_expr(ctx, &item.expr, child_pos); + let type_args = item.type_args.clone().map(|params| { + serialize_ts_param_inst(ctx, params.as_ref(), child_pos) + }); + + ctx.write_ref(expr_pos, expr); + ctx.write_maybe_ref(type_args_pos, type_args); + + child_pos + }) + .collect::>(); + + let body_elem_ids = node + .body + .body + .iter() + .map(|item| match item { + TsTypeElement::TsCallSignatureDecl(ts_call) => { + let item_id = ctx.header( + AstNode::TsCallSignatureDeclaration, + pos, + &ts_call.span, + 3, + ); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let params_pos = + ctx.ref_vec_field(AstProp::Params, ts_call.params.len()); + let return_pos = ctx.ref_field(AstProp::ReturnType); + + let type_param = + maybe_serialize_ts_type_param(ctx, &ts_call.type_params, pos); + let return_type = + maybe_serialize_ts_type_ann(ctx, &ts_call.type_ann, pos); + let params = ts_call + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, pos)) + .collect::>(); + + ctx.write_maybe_ref(type_ann_pos, type_param); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(return_pos, return_type); + + item_id + } + TsTypeElement::TsConstructSignatureDecl(_) => todo!(), + TsTypeElement::TsPropertySignature(sig) => { + let item_pos = + ctx.header(AstNode::TSPropertySignature, pos, &sig.span, 6); + + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let key_pos = ctx.ref_field(AstProp::Key); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let key = serialize_expr(ctx, &sig.key, item_pos); + let type_ann = + maybe_serialize_ts_type_ann(ctx, &sig.type_ann, item_pos); + + ctx.write_bool(computed_pos, sig.computed); + ctx.write_bool(optional_pos, sig.optional); + ctx.write_bool(readonly_pos, sig.readonly); + ctx.write_ref(key_pos, key); + ctx.write_maybe_ref(type_ann_pos, type_ann); + + item_pos + } + TsTypeElement::TsGetterSignature(sig) => { + let item_pos = + ctx.header(AstNode::TSMethodSignature, pos, &sig.span, 6); + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let return_type_pos = ctx.ref_field(AstProp::ReturnType); + + let key = serialize_expr(ctx, sig.key.as_ref(), item_pos); + let return_type = + maybe_serialize_ts_type_ann(ctx, &sig.type_ann, item_pos); + + ctx.write_bool(computed_pos, false); + ctx.write_bool(optional_pos, false); + ctx.write_bool(readonly_pos, false); + ctx.write_str(kind_pos, "getter"); + ctx.write_maybe_ref(return_type_pos, return_type); + ctx.write_ref(key_pos, key); + + item_pos + } + TsTypeElement::TsSetterSignature(sig) => { + let item_pos = + ctx.header(AstNode::TSMethodSignature, pos, &sig.span, 6); + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let params_pos = ctx.ref_vec_field(AstProp::Params, 1); + + let key = serialize_expr(ctx, sig.key.as_ref(), item_pos); + let params = serialize_ts_fn_param(ctx, &sig.param, item_pos); + + ctx.write_bool(computed_pos, false); + ctx.write_bool(optional_pos, false); + ctx.write_bool(readonly_pos, false); + ctx.write_str(kind_pos, "setter"); + ctx.write_ref(key_pos, key); + ctx.write_refs(params_pos, vec![params]); + + item_pos + } + TsTypeElement::TsMethodSignature(sig) => { + let item_pos = + ctx.header(AstNode::TSMethodSignature, pos, &sig.span, 8); + let computed_pos = ctx.bool_field(AstProp::Computed); + let optional_pos = ctx.bool_field(AstProp::Optional); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let _static_bos = ctx.bool_field(AstProp::Static); + let kind_pos = ctx.str_field(AstProp::Kind); + let key_pos = ctx.ref_field(AstProp::Key); + let params_pos = + ctx.ref_vec_field(AstProp::Params, sig.params.len()); + let return_type_pos = ctx.ref_field(AstProp::ReturnType); + + let key = serialize_expr(ctx, sig.key.as_ref(), item_pos); + let params = sig + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, item_pos)) + .collect::>(); + let return_type = + maybe_serialize_ts_type_ann(ctx, &sig.type_ann, item_pos); + + ctx.write_bool(computed_pos, false); + ctx.write_bool(optional_pos, false); + ctx.write_bool(readonly_pos, false); + ctx.write_str(kind_pos, "method"); + ctx.write_ref(key_pos, key); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(return_type_pos, return_type); + + item_pos + } + TsTypeElement::TsIndexSignature(sig) => { + serialize_ts_index_sig(ctx, sig, pos) + } + }) + .collect::>(); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_ref(id_pos, ident_id); + ctx.write_maybe_ref(type_param_pos, type_param); + ctx.write_refs(extends_pos, extend_ids); + ctx.write_ref(body_pos, body_id); + + // Body + ctx.write_refs(body_body_pos, body_elem_ids); + + pos + } + Decl::TsTypeAlias(node) => { + let pos = ctx.header(AstNode::TsTypeAlias, parent, &node.span, 4); + let declare_pos = ctx.bool_field(AstProp::Declare); + let id_pos = ctx.ref_field(AstProp::Id); + let type_params_pos = ctx.ref_field(AstProp::TypeParameters); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let ident = serialize_ident(ctx, &node.id, pos); + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + let type_param = + maybe_serialize_ts_type_param(ctx, &node.type_params, pos); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(type_params_pos, type_param); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + Decl::TsEnum(node) => { + let pos = ctx.header(AstNode::TSEnumDeclaration, parent, &node.span, 3); + let declare_pos = ctx.bool_field(AstProp::Declare); + let const_pos = ctx.bool_field(AstProp::Const); + let id_pos = ctx.ref_field(AstProp::Id); + let body_pos = ctx.ref_field(AstProp::Body); + + let body = ctx.header(AstNode::TSEnumBody, pos, &node.span, 1); + let members_pos = ctx.ref_vec_field(AstProp::Members, node.members.len()); + + let ident_id = serialize_ident(ctx, &node.id, parent); + + let members = node + .members + .iter() + .map(|member| { + let member_id = + ctx.header(AstNode::TSEnumMember, body, &member.span, 2); + let id_pos = ctx.ref_field(AstProp::Id); + let init_pos = ctx.ref_field(AstProp::Initializer); + + let ident = match &member.id { + TsEnumMemberId::Ident(ident) => { + serialize_ident(ctx, ident, member_id) + } + TsEnumMemberId::Str(lit_str) => { + serialize_lit(ctx, &Lit::Str(lit_str.clone()), member_id) + } + }; + + let init = member + .init + .as_ref() + .map(|init| serialize_expr(ctx, init, member_id)); + + ctx.write_ref(id_pos, ident); + ctx.write_maybe_ref(init_pos, init); + + member_id + }) + .collect::>(); + + ctx.write_refs(members_pos, members); + + ctx.write_bool(declare_pos, node.declare); + ctx.write_bool(const_pos, node.is_const); + ctx.write_ref(id_pos, ident_id); + ctx.write_ref(body_pos, body); + + pos + } + Decl::TsModule(ts_module_decl) => { + ctx.header(AstNode::TsModule, parent, &ts_module_decl.span, 0) + } + } +} + +fn serialize_ts_index_sig( + ctx: &mut TsEsTreeBuilder, + node: &TsIndexSignature, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::TSMethodSignature, parent, &node.span, 4); + let readonly_pos = ctx.bool_field(AstProp::Readonly); + // TODO: where is this coming from? + let static_pos = ctx.bool_field(AstProp::Static); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + let params = node + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, pos)) + .collect::>(); + + ctx.write_bool(readonly_pos, false); + ctx.write_bool(static_pos, node.is_static); + ctx.write_refs(params_pos, params); + ctx.write_maybe_ref(type_ann_pos, type_ann); + + pos +} + +fn accessibility_to_str(accessibility: Accessibility) -> String { + match accessibility { + Accessibility::Public => "public".to_string(), + Accessibility::Protected => "protected".to_string(), + Accessibility::Private => "private".to_string(), + } +} + +fn serialize_private_name( + ctx: &mut TsEsTreeBuilder, + node: &PrivateName, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::PrivateIdentifier, parent, &node.span, 1); + let name_pos = ctx.str_field(AstProp::Name); + + ctx.write_str(name_pos, node.name.as_str()); + + pos +} + +fn serialize_jsx_element( + ctx: &mut TsEsTreeBuilder, + node: &JSXElement, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXElement, parent, &node.span, 3); + let open_pos = ctx.ref_field(AstProp::OpeningElement); + let close_pos = ctx.ref_field(AstProp::ClosingElement); + let children_pos = ctx.ref_vec_field(AstProp::Children, node.children.len()); + + let open = serialize_jsx_opening_element(ctx, &node.opening, pos); + + let close = node.closing.as_ref().map(|closing| { + let closing_pos = + ctx.header(AstNode::JSXClosingElement, pos, &closing.span, 1); + let name_pos = ctx.ref_field(AstProp::Name); + + let name = serialize_jsx_element_name(ctx, &closing.name, closing_pos); + ctx.write_ref(name_pos, name); + + closing_pos + }); + + let children = serialize_jsx_children(ctx, &node.children, pos); + + ctx.write_ref(open_pos, open); + ctx.write_maybe_ref(close_pos, close); + ctx.write_refs(children_pos, children); + + pos +} + +fn serialize_jsx_fragment( + ctx: &mut TsEsTreeBuilder, + node: &JSXFragment, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXFragment, parent, &node.span, 3); + + let opening_pos = ctx.ref_field(AstProp::OpeningFragment); + let closing_pos = ctx.ref_field(AstProp::ClosingFragment); + let children_pos = ctx.ref_vec_field(AstProp::Children, node.children.len()); + + let opening_id = + ctx.header(AstNode::JSXOpeningFragment, pos, &node.opening.span, 0); + let closing_id = + ctx.header(AstNode::JSXClosingFragment, pos, &node.closing.span, 0); + + let children = serialize_jsx_children(ctx, &node.children, pos); + + ctx.write_ref(opening_pos, opening_id); + ctx.write_ref(closing_pos, closing_id); + ctx.write_refs(children_pos, children); + + pos +} + +fn serialize_jsx_children( + ctx: &mut TsEsTreeBuilder, + children: &[JSXElementChild], + parent: NodeRef, +) -> Vec { + children + .iter() + .map(|child| { + match child { + JSXElementChild::JSXText(text) => { + let pos = ctx.header(AstNode::JSXText, parent, &text.span, 2); + let raw_pos = ctx.str_field(AstProp::Raw); + let value_pos = ctx.str_field(AstProp::Value); + + ctx.write_str(raw_pos, &text.raw); + ctx.write_str(value_pos, &text.value); + + pos + } + JSXElementChild::JSXExprContainer(container) => { + serialize_jsx_container_expr(ctx, container, parent) + } + JSXElementChild::JSXElement(el) => { + serialize_jsx_element(ctx, el, parent) + } + JSXElementChild::JSXFragment(frag) => { + serialize_jsx_fragment(ctx, frag, parent) + } + // No parser supports this + JSXElementChild::JSXSpreadChild(_) => unreachable!(), + } + }) + .collect::>() +} + +fn serialize_jsx_member_expr( + ctx: &mut TsEsTreeBuilder, + node: &JSXMemberExpr, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXMemberExpression, parent, &node.span, 2); + let obj_ref = ctx.ref_field(AstProp::Object); + let prop_ref = ctx.ref_field(AstProp::Property); + + let obj = match &node.obj { + JSXObject::JSXMemberExpr(member) => { + serialize_jsx_member_expr(ctx, member, pos) + } + JSXObject::Ident(ident) => serialize_jsx_identifier(ctx, ident, parent), + }; + + let prop = serialize_ident_name_as_jsx_identifier(ctx, &node.prop, pos); + + ctx.write_ref(obj_ref, obj); + ctx.write_ref(prop_ref, prop); + + pos +} + +fn serialize_jsx_element_name( + ctx: &mut TsEsTreeBuilder, + node: &JSXElementName, + parent: NodeRef, +) -> NodeRef { + match &node { + JSXElementName::Ident(ident) => { + serialize_jsx_identifier(ctx, ident, parent) + } + JSXElementName::JSXMemberExpr(member) => { + serialize_jsx_member_expr(ctx, member, parent) + } + JSXElementName::JSXNamespacedName(ns) => { + serialize_jsx_namespaced_name(ctx, ns, parent) + } + } +} + +fn serialize_jsx_opening_element( + ctx: &mut TsEsTreeBuilder, + node: &JSXOpeningElement, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXOpeningElement, parent, &node.span, 3); + let sclose_pos = ctx.bool_field(AstProp::SelfClosing); + let name_pos = ctx.ref_field(AstProp::Name); + let attrs_pos = ctx.ref_vec_field(AstProp::Attributes, node.attrs.len()); + + let name = serialize_jsx_element_name(ctx, &node.name, pos); + + // FIXME: type args + + let attrs = node + .attrs + .iter() + .map(|attr| match attr { + JSXAttrOrSpread::JSXAttr(attr) => { + let attr_pos = ctx.header(AstNode::JSXAttribute, pos, &attr.span, 2); + let name_pos = ctx.ref_field(AstProp::Name); + let value_pos = ctx.ref_field(AstProp::Value); + + let name = match &attr.name { + JSXAttrName::Ident(name) => { + serialize_ident_name_as_jsx_identifier(ctx, name, attr_pos) + } + JSXAttrName::JSXNamespacedName(node) => { + serialize_jsx_namespaced_name(ctx, node, attr_pos) + } + }; + + let value = attr.value.as_ref().map(|value| match value { + JSXAttrValue::Lit(lit) => serialize_lit(ctx, lit, attr_pos), + JSXAttrValue::JSXExprContainer(container) => { + serialize_jsx_container_expr(ctx, container, attr_pos) + } + JSXAttrValue::JSXElement(el) => { + serialize_jsx_element(ctx, el, attr_pos) + } + JSXAttrValue::JSXFragment(frag) => { + serialize_jsx_fragment(ctx, frag, attr_pos) + } + }); + + ctx.write_ref(name_pos, name); + ctx.write_maybe_ref(value_pos, value); + + attr_pos + } + JSXAttrOrSpread::SpreadElement(spread) => { + let attr_pos = + ctx.header(AstNode::JSXAttribute, pos, &spread.dot3_token, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let arg = serialize_expr(ctx, &spread.expr, attr_pos); + + ctx.write_ref(arg_pos, arg); + + attr_pos + } + }) + .collect::>(); + + ctx.write_bool(sclose_pos, node.self_closing); + ctx.write_ref(name_pos, name); + ctx.write_refs(attrs_pos, attrs); + + pos +} + +fn serialize_jsx_container_expr( + ctx: &mut TsEsTreeBuilder, + node: &JSXExprContainer, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXExpressionContainer, parent, &node.span, 1); + let expr_pos = ctx.ref_field(AstProp::Expression); + + let expr = match &node.expr { + JSXExpr::JSXEmptyExpr(expr) => serialize_jsx_empty_expr(ctx, expr, pos), + JSXExpr::Expr(expr) => serialize_expr(ctx, expr, pos), + }; + + ctx.write_ref(expr_pos, expr); + + pos +} + +fn serialize_jsx_empty_expr( + ctx: &mut TsEsTreeBuilder, + node: &JSXEmptyExpr, + parent: NodeRef, +) -> NodeRef { + ctx.header(AstNode::JSXEmptyExpression, parent, &node.span, 0) +} + +fn serialize_jsx_namespaced_name( + ctx: &mut TsEsTreeBuilder, + node: &JSXNamespacedName, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXNamespacedName, parent, &node.span, 2); + let ns_pos = ctx.ref_field(AstProp::Namespace); + let name_pos = ctx.ref_field(AstProp::Name); + + let ns_id = serialize_ident_name_as_jsx_identifier(ctx, &node.ns, pos); + let name_id = serialize_ident_name_as_jsx_identifier(ctx, &node.name, pos); + + ctx.write_ref(ns_pos, ns_id); + ctx.write_ref(name_pos, name_id); + + pos +} + +fn serialize_ident_name_as_jsx_identifier( + ctx: &mut TsEsTreeBuilder, + node: &IdentName, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXIdentifier, parent, &node.span, 1); + let name_pos = ctx.str_field(AstProp::Name); + + ctx.write_str(name_pos, &node.sym); + + pos +} + +fn serialize_jsx_identifier( + ctx: &mut TsEsTreeBuilder, + node: &Ident, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::JSXIdentifier, parent, &node.span, 1); + let name_pos = ctx.str_field(AstProp::Name); + + ctx.write_str(name_pos, &node.sym); + + pos +} + +fn serialize_pat( + ctx: &mut TsEsTreeBuilder, + pat: &Pat, + parent: NodeRef, +) -> NodeRef { + match pat { + Pat::Ident(node) => serialize_ident(ctx, &node.id, parent), + Pat::Array(node) => { + let pos = ctx.header(AstNode::ArrayPattern, parent, &node.span, 3); + let opt_pos = ctx.bool_field(AstProp::Optional); + let type_pos = ctx.ref_field(AstProp::TypeAnnotation); + let elems_pos = ctx.ref_vec_field(AstProp::Elements, node.elems.len()); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + let children = node + .elems + .iter() + .map(|pat| { + pat + .as_ref() + .map_or(NodeRef(0), |v| serialize_pat(ctx, v, pos)) + }) + .collect::>(); + + ctx.write_bool(opt_pos, node.optional); + ctx.write_maybe_ref(type_pos, type_ann); + ctx.write_refs(elems_pos, children); + + pos + } + Pat::Rest(node) => { + let pos = ctx.header(AstNode::RestElement, parent, &node.span, 2); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + let arg = serialize_pat(ctx, &node.arg, parent); + + ctx.write_maybe_ref(type_ann_pos, type_ann); + ctx.write_ref(arg_pos, arg); + + pos + } + Pat::Object(node) => { + let pos = ctx.header(AstNode::ObjectPattern, parent, &node.span, 3); + let opt_pos = ctx.bool_field(AstProp::Optional); + let props_pos = ctx.ref_vec_field(AstProp::Properties, node.props.len()); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + let children = node + .props + .iter() + .map(|prop| match prop { + ObjectPatProp::KeyValue(key_value_prop) => { + let child_pos = + ctx.header(AstNode::Property, pos, &key_value_prop.span(), 3); + let computed_pos = ctx.bool_field(AstProp::Computed); + let key_pos = ctx.ref_field(AstProp::Key); + let value_pos = ctx.ref_field(AstProp::Value); + + let computed = matches!(key_value_prop.key, PropName::Computed(_)); + + let key = serialize_prop_name(ctx, &key_value_prop.key, child_pos); + let value = + serialize_pat(ctx, key_value_prop.value.as_ref(), child_pos); + + ctx.write_bool(computed_pos, computed); + ctx.write_ref(key_pos, key); + ctx.write_ref(value_pos, value); + + child_pos + } + ObjectPatProp::Assign(assign_pat_prop) => { + let child_pos = + ctx.header(AstNode::Property, pos, &assign_pat_prop.span, 3); + // TOOD: Doesn't seem to be present in SWC ast + let _computed_pos = ctx.bool_field(AstProp::Computed); + let key_pos = ctx.ref_field(AstProp::Key); + let value_pos = ctx.ref_field(AstProp::Value); + + let ident = serialize_ident(ctx, &assign_pat_prop.key.id, parent); + + let value = assign_pat_prop + .value + .as_ref() + .map(|value| serialize_expr(ctx, value, child_pos)); + + ctx.write_ref(key_pos, ident); + ctx.write_maybe_ref(value_pos, value); + + child_pos + } + ObjectPatProp::Rest(rest_pat) => { + serialize_pat(ctx, &Pat::Rest(rest_pat.clone()), parent) + } + }) + .collect::>(); + + ctx.write_bool(opt_pos, node.optional); + ctx.write_maybe_ref(type_ann_pos, type_ann); + ctx.write_refs(props_pos, children); + + pos + } + Pat::Assign(node) => { + let pos = ctx.header(AstNode::AssignmentPattern, parent, &node.span, 2); + let left_pos = ctx.ref_field(AstProp::Left); + let right_pos = ctx.ref_field(AstProp::Right); + + let left = serialize_pat(ctx, &node.left, pos); + let right = serialize_expr(ctx, &node.right, pos); + + ctx.write_ref(left_pos, left); + ctx.write_ref(right_pos, right); + + pos + } + Pat::Invalid(_) => unreachable!(), + Pat::Expr(node) => serialize_expr(ctx, node, parent), + } +} + +fn serialize_for_head( + ctx: &mut TsEsTreeBuilder, + for_head: &ForHead, + parent: NodeRef, +) -> NodeRef { + match for_head { + ForHead::VarDecl(var_decl) => { + serialize_decl(ctx, &Decl::Var(var_decl.clone()), parent) + } + ForHead::UsingDecl(using_decl) => { + serialize_decl(ctx, &Decl::Using(using_decl.clone()), parent) + } + ForHead::Pat(pat) => serialize_pat(ctx, pat, parent), + } +} + +fn serialize_spread( + ctx: &mut TsEsTreeBuilder, + expr: &Expr, + span: &Span, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::SpreadElement, parent, span, 1); + let arg_pos = ctx.ref_field(AstProp::Argument); + + let expr_pos = serialize_expr(ctx, expr, parent); + ctx.write_ref(arg_pos, expr_pos); + + pos +} + +fn serialize_ident_name( + ctx: &mut TsEsTreeBuilder, + ident_name: &IdentName, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::Identifier, parent, &ident_name.span, 1); + let name_pos = ctx.str_field(AstProp::Name); + ctx.write_str(name_pos, ident_name.sym.as_str()); + + pos +} + +fn serialize_prop_name( + ctx: &mut TsEsTreeBuilder, + prop_name: &PropName, + parent: NodeRef, +) -> NodeRef { + match prop_name { + PropName::Ident(ident_name) => { + serialize_ident_name(ctx, ident_name, parent) + } + PropName::Str(str_prop) => { + let child_pos = + ctx.header(AstNode::StringLiteral, parent, &str_prop.span, 1); + let value_pos = ctx.str_field(AstProp::Value); + ctx.write_str(value_pos, &str_prop.value); + + child_pos + } + PropName::Num(number) => { + serialize_lit(ctx, &Lit::Num(number.clone()), parent) + } + PropName::Computed(node) => serialize_expr(ctx, &node.expr, parent), + PropName::BigInt(big_int) => { + serialize_lit(ctx, &Lit::BigInt(big_int.clone()), parent) + } + } +} + +fn serialize_lit( + ctx: &mut TsEsTreeBuilder, + lit: &Lit, + parent: NodeRef, +) -> NodeRef { + match lit { + Lit::Str(node) => { + let pos = ctx.header(AstNode::StringLiteral, parent, &node.span, 1); + let value_pos = ctx.str_field(AstProp::Value); + + ctx.write_str(value_pos, &node.value); + + pos + } + Lit::Bool(lit_bool) => { + let pos = ctx.header(AstNode::Bool, parent, &lit_bool.span, 1); + let value_pos = ctx.bool_field(AstProp::Value); + + ctx.write_bool(value_pos, lit_bool.value); + + pos + } + Lit::Null(node) => ctx.header(AstNode::Null, parent, &node.span, 0), + Lit::Num(node) => { + let pos = ctx.header(AstNode::NumericLiteral, parent, &node.span, 1); + let value_pos = ctx.str_field(AstProp::Value); + + let value = node.raw.as_ref().unwrap(); + ctx.write_str(value_pos, value); + + pos + } + Lit::BigInt(node) => { + let pos = ctx.header(AstNode::BigIntLiteral, parent, &node.span, 1); + let value_pos = ctx.str_field(AstProp::Value); + + ctx.write_str(value_pos, &node.value.to_string()); + + pos + } + Lit::Regex(node) => { + let pos = ctx.header(AstNode::RegExpLiteral, parent, &node.span, 2); + let pattern_pos = ctx.str_field(AstProp::Pattern); + let flags_pos = ctx.str_field(AstProp::Flags); + + ctx.write_str(pattern_pos, node.exp.as_str()); + ctx.write_str(flags_pos, node.flags.as_str()); + + pos + } + Lit::JSXText(jsxtext) => { + ctx.header(AstNode::JSXText, parent, &jsxtext.span, 0) + } + } +} + +fn serialize_ts_param_inst( + ctx: &mut TsEsTreeBuilder, + node: &TsTypeParamInstantiation, + parent: NodeRef, +) -> NodeRef { + let pos = + ctx.header(AstNode::TSTypeParameterInstantiation, parent, &node.span, 1); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + + let params = node + .params + .iter() + .map(|param| serialize_ts_type(ctx, param, pos)) + .collect::>(); + + ctx.write_refs(params_pos, params); + + pos +} + +fn serialize_ts_type( + ctx: &mut TsEsTreeBuilder, + node: &TsType, + parent: NodeRef, +) -> NodeRef { + match node { + TsType::TsKeywordType(node) => { + let kind = match node.kind { + TsKeywordTypeKind::TsAnyKeyword => AstNode::TSAnyKeyword, + TsKeywordTypeKind::TsUnknownKeyword => AstNode::TSUnknownKeyword, + TsKeywordTypeKind::TsNumberKeyword => AstNode::TSNumberKeyword, + TsKeywordTypeKind::TsObjectKeyword => AstNode::TSObjectKeyword, + TsKeywordTypeKind::TsBooleanKeyword => AstNode::TSBooleanKeyword, + TsKeywordTypeKind::TsBigIntKeyword => AstNode::TSBigIntKeyword, + TsKeywordTypeKind::TsStringKeyword => AstNode::TSStringKeyword, + TsKeywordTypeKind::TsSymbolKeyword => AstNode::TSSymbolKeyword, + TsKeywordTypeKind::TsVoidKeyword => AstNode::TSVoidKeyword, + TsKeywordTypeKind::TsUndefinedKeyword => AstNode::TSUndefinedKeyword, + TsKeywordTypeKind::TsNullKeyword => AstNode::TSNullKeyword, + TsKeywordTypeKind::TsNeverKeyword => AstNode::TSNeverKeyword, + TsKeywordTypeKind::TsIntrinsicKeyword => AstNode::TSIntrinsicKeyword, + }; + + ctx.header(kind, parent, &node.span, 0) + } + TsType::TsThisType(node) => { + ctx.header(AstNode::TSThisType, parent, &node.span, 0) + } + TsType::TsFnOrConstructorType(node) => match node { + TsFnOrConstructorType::TsFnType(node) => { + let pos = ctx.header(AstNode::TSFunctionType, parent, &node.span, 1); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + + let param_ids = node + .params + .iter() + .map(|param| serialize_ts_fn_param(ctx, param, pos)) + .collect::>(); + + ctx.write_refs(params_pos, param_ids); + + pos + } + TsFnOrConstructorType::TsConstructorType(_) => { + todo!() + } + }, + TsType::TsTypeRef(node) => { + let pos = ctx.header(AstNode::TSTypeReference, parent, &node.span, 2); + let name_pos = ctx.ref_field(AstProp::TypeName); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + + let name = serialize_ts_entity_name(ctx, &node.type_name, pos); + + let type_args = node + .type_params + .clone() + .map(|param| serialize_ts_param_inst(ctx, ¶m, pos)); + + ctx.write_ref(name_pos, name); + ctx.write_maybe_ref(type_args_pos, type_args); + + pos + } + TsType::TsTypeQuery(node) => { + let pos = ctx.header(AstNode::TSTypeQuery, parent, &node.span, 2); + let name_pos = ctx.ref_field(AstProp::ExprName); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + + let expr_name = match &node.expr_name { + TsTypeQueryExpr::TsEntityName(entity) => { + serialize_ts_entity_name(ctx, entity, pos) + } + TsTypeQueryExpr::Import(child) => { + serialize_ts_type(ctx, &TsType::TsImportType(child.clone()), pos) + } + }; + + let type_args = node + .type_args + .clone() + .map(|param| serialize_ts_param_inst(ctx, ¶m, pos)); + + ctx.write_ref(name_pos, expr_name); + ctx.write_maybe_ref(type_args_pos, type_args); + + pos + } + TsType::TsTypeLit(_) => { + // TODO: Not sure what this is + todo!() + } + TsType::TsArrayType(node) => { + let pos = ctx.header(AstNode::TSArrayType, parent, &node.span, 1); + let elem_pos = ctx.ref_field(AstProp::ElementType); + + let elem = serialize_ts_type(ctx, &node.elem_type, pos); + + ctx.write_ref(elem_pos, elem); + + pos + } + TsType::TsTupleType(node) => { + let pos = ctx.header(AstNode::TSTupleType, parent, &node.span, 1); + let children_pos = + ctx.ref_vec_field(AstProp::ElementTypes, node.elem_types.len()); + + let children = node + .elem_types + .iter() + .map(|elem| { + if let Some(label) = &elem.label { + let child_pos = + ctx.header(AstNode::TSNamedTupleMember, pos, &elem.span, 1); + let label_pos = ctx.ref_field(AstProp::Label); + let type_pos = ctx.ref_field(AstProp::ElementType); + + let label_id = serialize_pat(ctx, label, child_pos); + let type_id = serialize_ts_type(ctx, elem.ty.as_ref(), child_pos); + + ctx.write_ref(label_pos, label_id); + ctx.write_ref(type_pos, type_id); + + child_pos + } else { + serialize_ts_type(ctx, elem.ty.as_ref(), pos) + } + }) + .collect::>(); + + ctx.write_refs(children_pos, children); + + pos + } + TsType::TsOptionalType(_) => todo!(), + TsType::TsRestType(node) => { + let pos = ctx.header(AstNode::TSRestType, parent, &node.span, 1); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + TsType::TsUnionOrIntersectionType(node) => match node { + TsUnionOrIntersectionType::TsUnionType(node) => { + let pos = ctx.header(AstNode::TSUnionType, parent, &node.span, 1); + let types_pos = ctx.ref_vec_field(AstProp::Types, node.types.len()); + + let children = node + .types + .iter() + .map(|item| serialize_ts_type(ctx, item, pos)) + .collect::>(); + + ctx.write_refs(types_pos, children); + + pos + } + TsUnionOrIntersectionType::TsIntersectionType(node) => { + let pos = + ctx.header(AstNode::TSIntersectionType, parent, &node.span, 1); + let types_pos = ctx.ref_vec_field(AstProp::Types, node.types.len()); + + let children = node + .types + .iter() + .map(|item| serialize_ts_type(ctx, item, pos)) + .collect::>(); + + ctx.write_refs(types_pos, children); + + pos + } + }, + TsType::TsConditionalType(node) => { + let pos = ctx.header(AstNode::TSConditionalType, parent, &node.span, 4); + let check_pos = ctx.ref_field(AstProp::CheckType); + let extends_pos = ctx.ref_field(AstProp::ExtendsType); + let true_pos = ctx.ref_field(AstProp::TrueType); + let false_pos = ctx.ref_field(AstProp::FalseType); + + let check = serialize_ts_type(ctx, &node.check_type, pos); + let extends = serialize_ts_type(ctx, &node.extends_type, pos); + let v_true = serialize_ts_type(ctx, &node.true_type, pos); + let v_false = serialize_ts_type(ctx, &node.false_type, pos); + + ctx.write_ref(check_pos, check); + ctx.write_ref(extends_pos, extends); + ctx.write_ref(true_pos, v_true); + ctx.write_ref(false_pos, v_false); + + pos + } + TsType::TsInferType(node) => { + let pos = ctx.header(AstNode::TSInferType, parent, &node.span, 1); + let param_pos = ctx.ref_field(AstProp::TypeParameter); + + let param = serialize_ts_type_param(ctx, &node.type_param, parent); + + ctx.write_ref(param_pos, param); + + pos + } + TsType::TsParenthesizedType(_) => todo!(), + TsType::TsTypeOperator(node) => { + let pos = ctx.header(AstNode::TSTypeOperator, parent, &node.span, 2); + + let operator_pos = ctx.str_field(AstProp::Operator); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let type_ann = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_str( + operator_pos, + match node.op { + TsTypeOperatorOp::KeyOf => "keyof", + TsTypeOperatorOp::Unique => "unique", + TsTypeOperatorOp::ReadOnly => "readonly", + }, + ); + ctx.write_ref(type_ann_pos, type_ann); + + pos + } + TsType::TsIndexedAccessType(node) => { + let pos = ctx.header(AstNode::TSIndexedAccessType, parent, &node.span, 2); + + let index_type_pos = ctx.ref_field(AstProp::IndexType); + let obj_type_pos = ctx.ref_field(AstProp::ObjectType); + + let index = serialize_ts_type(ctx, &node.index_type, pos); + let obj = serialize_ts_type(ctx, &node.obj_type, pos); + + ctx.write_ref(index_type_pos, index); + ctx.write_ref(obj_type_pos, obj); + + pos + } + TsType::TsMappedType(node) => { + let pos = ctx.header(AstNode::TSMappedType, parent, &node.span, 5); + + let name_pos = ctx.ref_field(AstProp::NameType); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + let type_param_pos = ctx.ref_field(AstProp::TypeParameter); + + let opt_pos = + create_true_plus_minus_field(ctx, AstProp::Optional, node.optional); + let readonly_pos = + create_true_plus_minus_field(ctx, AstProp::Readonly, node.readonly); + + let name_id = maybe_serialize_ts_type(ctx, &node.name_type, pos); + let type_ann = maybe_serialize_ts_type(ctx, &node.type_ann, pos); + let type_param = serialize_ts_type_param(ctx, &node.type_param, pos); + + write_true_plus_minus(ctx, opt_pos, node.optional); + write_true_plus_minus(ctx, readonly_pos, node.readonly); + ctx.write_maybe_ref(name_pos, name_id); + ctx.write_maybe_ref(type_ann_pos, type_ann); + ctx.write_ref(type_param_pos, type_param); + + pos + } + TsType::TsLitType(node) => serialize_ts_lit_type(ctx, node, parent), + TsType::TsTypePredicate(node) => { + let pos = ctx.header(AstNode::TSTypePredicate, parent, &node.span, 3); + + let asserts_pos = ctx.bool_field(AstProp::Asserts); + let param_name_pos = ctx.ref_field(AstProp::ParameterName); + let type_ann_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let param_name = match &node.param_name { + TsThisTypeOrIdent::TsThisType(ts_this_type) => { + ctx.header(AstNode::TSThisType, pos, &ts_this_type.span, 0) + } + TsThisTypeOrIdent::Ident(ident) => serialize_ident(ctx, ident, pos), + }; + + let type_ann = maybe_serialize_ts_type_ann(ctx, &node.type_ann, pos); + + ctx.write_bool(asserts_pos, node.asserts); + ctx.write_ref(param_name_pos, param_name); + ctx.write_maybe_ref(type_ann_pos, type_ann); + + pos + } + TsType::TsImportType(node) => { + let pos = ctx.header(AstNode::TSTypePredicate, parent, &node.span, 3); + let arg_pos = ctx.ref_field(AstProp::Argument); + let type_args_pos = ctx.ref_field(AstProp::TypeArguments); + let qualifier_pos = ctx.ref_field(AstProp::Qualifier); + + let arg = serialize_ts_lit_type( + ctx, + &TsLitType { + lit: TsLit::Str(node.arg.clone()), + span: node.arg.span, + }, + pos, + ); + + let type_arg = node.type_args.clone().map(|param_node| { + serialize_ts_param_inst(ctx, param_node.as_ref(), pos) + }); + + let qualifier = node.qualifier.clone().map_or(NodeRef(0), |quali| { + serialize_ts_entity_name(ctx, &quali, pos) + }); + + ctx.write_ref(arg_pos, arg); + ctx.write_ref(qualifier_pos, qualifier); + ctx.write_maybe_ref(type_args_pos, type_arg); + + pos + } + } +} + +fn serialize_ts_lit_type( + ctx: &mut TsEsTreeBuilder, + node: &TsLitType, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::TSLiteralType, parent, &node.span, 1); + let lit_pos = ctx.ref_field(AstProp::Literal); + + let lit = match &node.lit { + TsLit::Number(lit) => serialize_lit(ctx, &Lit::Num(lit.clone()), pos), + TsLit::Str(lit) => serialize_lit(ctx, &Lit::Str(lit.clone()), pos), + TsLit::Bool(lit) => serialize_lit(ctx, &Lit::Bool(*lit), pos), + TsLit::BigInt(lit) => serialize_lit(ctx, &Lit::BigInt(lit.clone()), pos), + TsLit::Tpl(lit) => serialize_expr( + ctx, + &Expr::Tpl(Tpl { + span: lit.span, + exprs: vec![], + quasis: lit.quasis.clone(), + }), + pos, + ), + }; + + ctx.write_ref(lit_pos, lit); + + pos +} + +fn create_true_plus_minus_field( + ctx: &mut TsEsTreeBuilder, + prop: AstProp, + value: Option, +) -> NodePos { + if let Some(v) = value { + match v { + TruePlusMinus::True => NodePos::Bool(ctx.bool_field(prop)), + TruePlusMinus::Plus | TruePlusMinus::Minus => { + NodePos::Str(ctx.str_field(prop)) + } + } + } else { + NodePos::Undef(ctx.undefined_field(prop)) + } +} + +fn extract_pos(pos: NodePos) -> usize { + match pos { + NodePos::Bool(bool_pos) => bool_pos.0, + NodePos::Field(field_pos) => field_pos.0, + NodePos::FieldArr(field_arr_pos) => field_arr_pos.0, + NodePos::Str(str_pos) => str_pos.0, + NodePos::Undef(undef_pos) => undef_pos.0, + NodePos::Null(null_pos) => null_pos.0, + } +} + +fn write_true_plus_minus( + ctx: &mut TsEsTreeBuilder, + pos: NodePos, + value: Option, +) { + if let Some(v) = value { + match v { + TruePlusMinus::True => { + let bool_pos = BoolPos(extract_pos(pos)); + ctx.write_bool(bool_pos, true); + } + TruePlusMinus::Plus => { + let str_pos = StrPos(extract_pos(pos)); + ctx.write_str(str_pos, "+") + } + TruePlusMinus::Minus => { + let str_pos = StrPos(extract_pos(pos)); + ctx.write_str(str_pos, "-") + } + } + } +} + +fn serialize_ts_entity_name( + ctx: &mut TsEsTreeBuilder, + node: &TsEntityName, + parent: NodeRef, +) -> NodeRef { + match &node { + TsEntityName::TsQualifiedName(_) => todo!(), + TsEntityName::Ident(ident) => serialize_ident(ctx, ident, parent), + } +} + +fn maybe_serialize_ts_type_ann( + ctx: &mut TsEsTreeBuilder, + node: &Option>, + parent: NodeRef, +) -> Option { + node + .as_ref() + .map(|type_ann| serialize_ts_type_ann(ctx, type_ann, parent)) +} + +fn serialize_ts_type_ann( + ctx: &mut TsEsTreeBuilder, + node: &TsTypeAnn, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::TSTypeAnnotation, parent, &node.span, 1); + let type_pos = ctx.ref_field(AstProp::TypeAnnotation); + + let v_type = serialize_ts_type(ctx, &node.type_ann, pos); + + ctx.write_ref(type_pos, v_type); + + pos +} + +fn maybe_serialize_ts_type( + ctx: &mut TsEsTreeBuilder, + node: &Option>, + parent: NodeRef, +) -> Option { + node + .as_ref() + .map(|item| serialize_ts_type(ctx, item, parent)) +} + +fn serialize_ts_type_param( + ctx: &mut TsEsTreeBuilder, + node: &TsTypeParam, + parent: NodeRef, +) -> NodeRef { + let pos = ctx.header(AstNode::TSTypeParameter, parent, &node.span, 6); + let name_pos = ctx.ref_field(AstProp::Name); + let constraint_pos = ctx.ref_field(AstProp::Constraint); + let default_pos = ctx.ref_field(AstProp::Default); + let const_pos = ctx.bool_field(AstProp::Const); + let in_pos = ctx.bool_field(AstProp::In); + let out_pos = ctx.bool_field(AstProp::Out); + + let name = serialize_ident(ctx, &node.name, pos); + let constraint = maybe_serialize_ts_type(ctx, &node.constraint, pos); + let default = maybe_serialize_ts_type(ctx, &node.default, pos); + + ctx.write_bool(const_pos, node.is_const); + ctx.write_bool(in_pos, node.is_in); + ctx.write_bool(out_pos, node.is_out); + ctx.write_ref(name_pos, name); + ctx.write_maybe_ref(constraint_pos, constraint); + ctx.write_maybe_ref(default_pos, default); + + pos +} + +fn maybe_serialize_ts_type_param( + ctx: &mut TsEsTreeBuilder, + node: &Option>, + parent: NodeRef, +) -> Option { + node.as_ref().map(|node| { + let pos = + ctx.header(AstNode::TSTypeParameterDeclaration, parent, &node.span, 1); + let params_pos = ctx.ref_vec_field(AstProp::Params, node.params.len()); + + let params = node + .params + .iter() + .map(|param| serialize_ts_type_param(ctx, param, pos)) + .collect::>(); + + ctx.write_refs(params_pos, params); + + pos + }) +} + +fn serialize_ts_fn_param( + ctx: &mut TsEsTreeBuilder, + node: &TsFnParam, + parent: NodeRef, +) -> NodeRef { + match node { + TsFnParam::Ident(ident) => serialize_ident(ctx, ident, parent), + TsFnParam::Array(pat) => { + serialize_pat(ctx, &Pat::Array(pat.clone()), parent) + } + TsFnParam::Rest(pat) => serialize_pat(ctx, &Pat::Rest(pat.clone()), parent), + TsFnParam::Object(pat) => { + serialize_pat(ctx, &Pat::Object(pat.clone()), parent) + } + } +} diff --git a/cli/tools/lint/ast_buffer/ts_estree.rs b/cli/tools/lint/ast_buffer/ts_estree.rs new file mode 100644 index 0000000000..64dbd82cde --- /dev/null +++ b/cli/tools/lint/ast_buffer/ts_estree.rs @@ -0,0 +1,516 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::fmt; +use std::fmt::Debug; +use std::fmt::Display; + +use deno_ast::swc::common::Span; + +use super::buffer::AstBufSerializer; +use super::buffer::BoolPos; +use super::buffer::FieldArrPos; +use super::buffer::FieldPos; +use super::buffer::NodeRef; +use super::buffer::NullPos; +use super::buffer::SerializeCtx; +use super::buffer::StrPos; +use super::buffer::UndefPos; + +#[derive(Debug, Clone, PartialEq)] +pub enum AstNode { + // First node must always be the empty/invalid node + Invalid, + // Typically the + Program, + + // Module declarations + ExportAllDeclaration, + ExportDefaultDeclaration, + ExportNamedDeclaration, + ImportDeclaration, + TsExportAssignment, + TsImportEquals, + TsNamespaceExport, + + // Decls + ClassDeclaration, + FunctionDeclaration, + TSEnumDeclaration, + TSInterface, + TsModule, + TsTypeAlias, + Using, + VariableDeclaration, + + // Statements + BlockStatement, + BreakStatement, + ContinueStatement, + DebuggerStatement, + DoWhileStatement, + EmptyStatement, + ExpressionStatement, + ForInStatement, + ForOfStatement, + ForStatement, + IfStatement, + LabeledStatement, + ReturnStatement, + SwitchCase, + SwitchStatement, + ThrowStatement, + TryStatement, + WhileStatement, + WithStatement, + + // Expressions + ArrayExpression, + ArrowFunctionExpression, + AssignmentExpression, + AwaitExpression, + BinaryExpression, + CallExpression, + ChainExpression, + ClassExpression, + ConditionalExpression, + FunctionExpression, + Identifier, + ImportExpression, + LogicalExpression, + MemberExpression, + MetaProp, + NewExpression, + ObjectExpression, + PrivateIdentifier, + SequenceExpression, + Super, + TaggedTemplateExpression, + TemplateLiteral, + ThisExpression, + TSAsExpression, + TsConstAssertion, + TsInstantiation, + TSNonNullExpression, + TSSatisfiesExpression, + TSTypeAssertion, + UnaryExpression, + UpdateExpression, + YieldExpression, + + // TODO: TSEsTree uses a single literal node + // Literals + StringLiteral, + Bool, + Null, + NumericLiteral, + BigIntLiteral, + RegExpLiteral, + + EmptyExpr, + SpreadElement, + Property, + VariableDeclarator, + CatchClause, + RestElement, + ExportSpecifier, + TemplateElement, + MethodDefinition, + ClassBody, + + // Patterns + ArrayPattern, + AssignmentPattern, + ObjectPattern, + + // JSX + JSXAttribute, + JSXClosingElement, + JSXClosingFragment, + JSXElement, + JSXEmptyExpression, + JSXExpressionContainer, + JSXFragment, + JSXIdentifier, + JSXMemberExpression, + JSXNamespacedName, + JSXOpeningElement, + JSXOpeningFragment, + JSXSpreadAttribute, + JSXSpreadChild, + JSXText, + + TSTypeAnnotation, + TSTypeParameterDeclaration, + TSTypeParameter, + TSTypeParameterInstantiation, + TSEnumMember, + TSInterfaceBody, + TSInterfaceHeritage, + TSTypeReference, + TSThisType, + TSLiteralType, + TSInferType, + TSConditionalType, + TSUnionType, + TSIntersectionType, + TSMappedType, + TSTypeQuery, + TSTupleType, + TSNamedTupleMember, + TSFunctionType, + TsCallSignatureDeclaration, + TSPropertySignature, + TSMethodSignature, + TSIndexSignature, + TSIndexedAccessType, + TSTypeOperator, + TSTypePredicate, + TSImportType, + TSRestType, + TSArrayType, + TSClassImplements, + + TSAnyKeyword, + TSBigIntKeyword, + TSBooleanKeyword, + TSIntrinsicKeyword, + TSNeverKeyword, + TSNullKeyword, + TSNumberKeyword, + TSObjectKeyword, + TSStringKeyword, + TSSymbolKeyword, + TSUndefinedKeyword, + TSUnknownKeyword, + TSVoidKeyword, + TSEnumBody, // Last value is used for max value +} + +impl Display for AstNode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl From for u8 { + fn from(m: AstNode) -> u8 { + m as u8 + } +} + +#[derive(Debug, Clone)] +pub enum AstProp { + // Base, these must be in sync with JS in the same order. + Invalid, + Type, + Parent, + Range, + Length, // Not used in AST, but can be used in attr selectors + + // Starting from here the order doesn't matter. + // Following are all possible AST node properties. + Abstract, + Accessibility, + Alternate, + Argument, + Arguments, + Asserts, + Async, + Attributes, + Await, + Block, + Body, + Callee, + Cases, + Children, + CheckType, + ClosingElement, + ClosingFragment, + Computed, + Consequent, + Const, + Constraint, + Cooked, + Declaration, + Declarations, + Declare, + Default, + Definite, + Delegate, + Discriminant, + Elements, + ElementType, + ElementTypes, + ExprName, + Expression, + Expressions, + Exported, + Extends, + ExtendsType, + FalseType, + Finalizer, + Flags, + Generator, + Handler, + Id, + In, + IndexType, + Init, + Initializer, + Implements, + Key, + Kind, + Label, + Left, + Literal, + Local, + Members, + Meta, + Method, + Name, + Namespace, + NameType, + Object, + ObjectType, + OpeningElement, + OpeningFragment, + Operator, + Optional, + Out, + Param, + ParameterName, + Params, + Pattern, + Prefix, + Properties, + Property, + Qualifier, + Quasi, + Quasis, + Raw, + Readonly, + ReturnType, + Right, + SelfClosing, + Shorthand, + Source, + SourceType, + Specifiers, + Static, + SuperClass, + SuperTypeArguments, + Tag, + Tail, + Test, + TrueType, + TypeAnnotation, + TypeArguments, + TypeName, + TypeParameter, + TypeParameters, + Types, + Update, + Value, // Last value is used for max value +} + +// TODO: Feels like there should be an easier way to iterater over an +// enum in Rust and lowercase the first letter. +impl Display for AstProp { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let s = match self { + AstProp::Invalid => "__invalid__", // unused + AstProp::Parent => "parent", + AstProp::Range => "range", + AstProp::Type => "type", + AstProp::Length => "length", + AstProp::Abstract => "abstract", + AstProp::Accessibility => "accessibility", + AstProp::Alternate => "alternate", + AstProp::Argument => "argument", + AstProp::Arguments => "arguments", + AstProp::Asserts => "asserts", + AstProp::Async => "async", + AstProp::Attributes => "attributes", + AstProp::Await => "await", + AstProp::Block => "block", + AstProp::Body => "body", + AstProp::Callee => "callee", + AstProp::Cases => "cases", + AstProp::Children => "children", + AstProp::CheckType => "checkType", + AstProp::ClosingElement => "closingElement", + AstProp::ClosingFragment => "closingFragment", + AstProp::Computed => "computed", + AstProp::Consequent => "consequent", + AstProp::Const => "const", + AstProp::Constraint => "constraint", + AstProp::Cooked => "cooked", + AstProp::Declaration => "declaration", + AstProp::Declarations => "declarations", + AstProp::Declare => "declare", + AstProp::Default => "default", + AstProp::Definite => "definite", + AstProp::Delegate => "delegate", + AstProp::Discriminant => "discriminant", + AstProp::Elements => "elements", + AstProp::ElementType => "elementType", + AstProp::ElementTypes => "elementTypes", + AstProp::ExprName => "exprName", + AstProp::Expression => "expression", + AstProp::Expressions => "expressions", + AstProp::Exported => "exported", + AstProp::Extends => "extends", + AstProp::ExtendsType => "extendsType", + AstProp::FalseType => "falseType", + AstProp::Finalizer => "finalizer", + AstProp::Flags => "flags", + AstProp::Generator => "generator", + AstProp::Handler => "handler", + AstProp::Id => "id", + AstProp::In => "in", + AstProp::IndexType => "indexType", + AstProp::Init => "init", + AstProp::Initializer => "initializer", + AstProp::Implements => "implements", + AstProp::Key => "key", + AstProp::Kind => "kind", + AstProp::Label => "label", + AstProp::Left => "left", + AstProp::Literal => "literal", + AstProp::Local => "local", + AstProp::Members => "members", + AstProp::Meta => "meta", + AstProp::Method => "method", + AstProp::Name => "name", + AstProp::Namespace => "namespace", + AstProp::NameType => "nameType", + AstProp::Object => "object", + AstProp::ObjectType => "objectType", + AstProp::OpeningElement => "openingElement", + AstProp::OpeningFragment => "openingFragment", + AstProp::Operator => "operator", + AstProp::Optional => "optional", + AstProp::Out => "out", + AstProp::Param => "param", + AstProp::ParameterName => "parameterName", + AstProp::Params => "params", + AstProp::Pattern => "pattern", + AstProp::Prefix => "prefix", + AstProp::Properties => "properties", + AstProp::Property => "property", + AstProp::Qualifier => "qualifier", + AstProp::Quasi => "quasi", + AstProp::Quasis => "quasis", + AstProp::Raw => "raw", + AstProp::Readonly => "readonly", + AstProp::ReturnType => "returnType", + AstProp::Right => "right", + AstProp::SelfClosing => "selfClosing", + AstProp::Shorthand => "shorthand", + AstProp::Source => "source", + AstProp::SourceType => "sourceType", + AstProp::Specifiers => "specifiers", + AstProp::Static => "static", + AstProp::SuperClass => "superClass", + AstProp::SuperTypeArguments => "superTypeArguments", + AstProp::Tag => "tag", + AstProp::Tail => "tail", + AstProp::Test => "test", + AstProp::TrueType => "trueType", + AstProp::TypeAnnotation => "typeAnnotation", + AstProp::TypeArguments => "typeArguments", + AstProp::TypeName => "typeName", + AstProp::TypeParameter => "typeParameter", + AstProp::TypeParameters => "typeParameters", + AstProp::Types => "types", + AstProp::Update => "update", + AstProp::Value => "value", + }; + + write!(f, "{}", s) + } +} + +impl From for u8 { + fn from(m: AstProp) -> u8 { + m as u8 + } +} + +pub struct TsEsTreeBuilder { + ctx: SerializeCtx, +} + +// TODO: Add a builder API to make it easier to convert from different source +// ast formats. +impl TsEsTreeBuilder { + pub fn new() -> Self { + // Max values + // TODO: Maybe there is a rust macro to grab the last enum value? + let kind_count: u8 = AstNode::TSEnumBody.into(); + let prop_count: u8 = AstProp::Value.into(); + Self { + ctx: SerializeCtx::new(kind_count, prop_count), + } + } +} + +impl AstBufSerializer for TsEsTreeBuilder { + fn header( + &mut self, + kind: AstNode, + parent: NodeRef, + span: &Span, + prop_count: usize, + ) -> NodeRef { + self.ctx.header(kind, parent, span, prop_count) + } + + fn ref_field(&mut self, prop: AstProp) -> FieldPos { + FieldPos(self.ctx.ref_field(prop)) + } + + fn ref_vec_field(&mut self, prop: AstProp, len: usize) -> FieldArrPos { + FieldArrPos(self.ctx.ref_vec_field(prop, len)) + } + + fn str_field(&mut self, prop: AstProp) -> StrPos { + StrPos(self.ctx.str_field(prop)) + } + + fn bool_field(&mut self, prop: AstProp) -> BoolPos { + BoolPos(self.ctx.bool_field(prop)) + } + + fn undefined_field(&mut self, prop: AstProp) -> UndefPos { + UndefPos(self.ctx.undefined_field(prop)) + } + + fn null_field(&mut self, prop: AstProp) -> NullPos { + NullPos(self.ctx.null_field(prop)) + } + + fn write_ref(&mut self, pos: FieldPos, value: NodeRef) { + self.ctx.write_ref(pos.0, value); + } + + fn write_maybe_ref(&mut self, pos: FieldPos, value: Option) { + self.ctx.write_maybe_ref(pos.0, value); + } + + fn write_refs(&mut self, pos: FieldArrPos, value: Vec) { + self.ctx.write_refs(pos.0, value); + } + + fn write_str(&mut self, pos: StrPos, value: &str) { + self.ctx.write_str(pos.0, value); + } + + fn write_bool(&mut self, pos: BoolPos, value: bool) { + self.ctx.write_bool(pos.0, value); + } + + fn serialize(&mut self) -> Vec { + self.ctx.serialize() + } +} diff --git a/cli/tools/lint/linter.rs b/cli/tools/lint/linter.rs index 2c2bc43acb..a10ad6479e 100644 --- a/cli/tools/lint/linter.rs +++ b/cli/tools/lint/linter.rs @@ -15,8 +15,9 @@ use deno_lint::linter::LintConfig as DenoLintConfig; use deno_lint::linter::LintFileOptions; use deno_lint::linter::Linter as DenoLintLinter; use deno_lint::linter::LinterOptions; +use deno_path_util::fs::atomic_write_file_with_retries; +use deno_runtime::deno_fs::FsSysTraitsAdapter; -use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::specifier_from_file_path; use super::rules::FileOrPackageLintRule; @@ -176,8 +177,9 @@ impl CliLinter { if fix_iterations > 0 { // everything looks good and the file still parses, so write it out atomic_write_file_with_retries( + &FsSysTraitsAdapter::new_real(), file_path, - source.text().as_ref(), + source.text().as_bytes(), crate::cache::CACHE_PERM, ) .context("Failed writing fix to file.")?; diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index c276c07759..cc7423c637 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -21,6 +21,7 @@ use deno_core::unsync::future::SharedLocal; use deno_graph::ModuleGraph; use deno_lint::diagnostic::LintDiagnostic; use deno_lint::linter::LintConfig as DenoLintConfig; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use log::debug; use reporters::create_reporter; use reporters::LintReporter; @@ -51,10 +52,13 @@ use crate::util::fs::canonicalize_path; use crate::util::path::is_script_ext; use crate::util::sync::AtomicFlag; +mod ast_buffer; mod linter; mod reporters; mod rules; +// TODO(bartlomieju): remove once we wire plugins through the CLI linter +pub use ast_buffer::serialize_ast_to_buffer; pub use linter::CliLinter; pub use linter::CliLinterOptions; pub use rules::collect_no_slow_type_diagnostics; @@ -449,7 +453,7 @@ fn collect_lint_files( .ignore_node_modules() .use_gitignore() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) + .collect_file_patterns(&FsSysTraitsAdapter::new_real(), files) } #[allow(clippy::print_stdout)] diff --git a/cli/tools/registry/paths.rs b/cli/tools/registry/paths.rs index 8b6c05fc01..b607c0924c 100644 --- a/cli/tools/registry/paths.rs +++ b/cli/tools/registry/paths.rs @@ -11,6 +11,7 @@ use deno_ast::ModuleSpecifier; use deno_config::glob::FileCollector; use deno_config::glob::FilePatterns; use deno_core::error::AnyError; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use thiserror::Error; use crate::args::CliOptions; @@ -323,11 +324,11 @@ fn collect_paths( file_patterns: FilePatterns, ) -> Result, AnyError> { FileCollector::new(|e| { - if !e.metadata.is_file { + if !e.metadata.file_type().is_file() { if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) { diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType { specifier, - kind: if e.metadata.is_symlink { + kind: if e.metadata.file_type().is_symlink() { "symlink".to_string() } else { "Unknown".to_string() @@ -345,5 +346,5 @@ fn collect_paths( .ignore_node_modules() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .use_gitignore() - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, file_patterns) + .collect_file_patterns(&FsSysTraitsAdapter::new_real(), file_patterns) } diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 791e54c67c..afa9b0222c 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -15,6 +15,7 @@ use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::Version; use deno_semver::VersionReq; use deps::KeyPath; @@ -283,7 +284,7 @@ fn package_json_dependency_entry( (npm_package.into(), selected.version_req) } else { ( - selected.import_name, + selected.import_name.into_string(), format!("npm:{}@{}", npm_package, selected.version_req), ) } @@ -292,7 +293,7 @@ fn package_json_dependency_entry( let scope_replaced = jsr_package.replace('/', "__"); let version_req = format!("npm:@jsr/{scope_replaced}@{}", selected.version_req); - (selected.import_name, version_req) + (selected.import_name.into_string(), version_req) } else { (selected.package_name, selected.version_req) } @@ -549,10 +550,10 @@ pub async fn add( } struct SelectedPackage { - import_name: String, + import_name: StackString, package_name: String, version_req: String, - selected_version: String, + selected_version: StackString, } enum NotFoundHelp { @@ -683,7 +684,7 @@ async fn find_package_and_select_version_for_req( import_name: add_package_req.alias, package_name: prefixed_name, version_req: format!("{}{}", range_symbol, &nv.version), - selected_version: nv.version.to_string(), + selected_version: nv.version.to_custom_string::(), })) } @@ -705,7 +706,7 @@ enum AddRmPackageReqValue { #[derive(Debug, PartialEq, Eq)] pub struct AddRmPackageReq { - alias: String, + alias: StackString, value: AddRmPackageReqValue, } @@ -753,7 +754,11 @@ impl AddRmPackageReq { return Ok(Err(PackageReq::from_str(entry_text)?)); } - (maybe_prefix.unwrap(), Some(alias.to_string()), entry_text) + ( + maybe_prefix.unwrap(), + Some(StackString::from(alias)), + entry_text, + ) } None => return Ok(Err(PackageReq::from_str(entry_text)?)), }, @@ -765,7 +770,7 @@ impl AddRmPackageReq { JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?; let package_req = req_ref.into_inner().req; Ok(Ok(AddRmPackageReq { - alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()), + alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()), value: AddRmPackageReqValue::Jsr(package_req), })) } @@ -785,7 +790,7 @@ impl AddRmPackageReq { ); } Ok(Ok(AddRmPackageReq { - alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()), + alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()), value: AddRmPackageReqValue::Npm(package_req), })) } @@ -878,14 +883,14 @@ mod test { assert_eq!( AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(), AddRmPackageReq { - alias: "foo".to_string(), + alias: "foo".into(), value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); assert_eq!( AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(), AddRmPackageReq { - alias: "alias".to_string(), + alias: "alias".into(), value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); @@ -894,7 +899,7 @@ mod test { .unwrap() .unwrap(), AddRmPackageReq { - alias: "@alias/pkg".to_string(), + alias: "@alias/pkg".into(), value: AddRmPackageReqValue::Npm( PackageReq::from_str("foo@latest").unwrap() ) @@ -905,7 +910,7 @@ mod test { .unwrap() .unwrap(), AddRmPackageReq { - alias: "@alias/pkg".to_string(), + alias: "@alias/pkg".into(), value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); @@ -914,7 +919,7 @@ mod test { .unwrap() .unwrap(), AddRmPackageReq { - alias: "alias".to_string(), + alias: "alias".into(), value: AddRmPackageReqValue::Jsr( PackageReq::from_str("foo@^1.5.0").unwrap() ) diff --git a/cli/tools/registry/pm/deps.rs b/cli/tools/registry/pm/deps.rs index bb03e97f2d..ffa53417e9 100644 --- a/cli/tools/registry/pm/deps.rs +++ b/cli/tools/registry/pm/deps.rs @@ -27,6 +27,7 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; +use deno_semver::StackString; use deno_semver::Version; use deno_semver::VersionReq; use import_map::ImportMap; @@ -139,13 +140,7 @@ pub enum KeyPart { Scopes, Dependencies, DevDependencies, - String(String), -} - -impl From for KeyPart { - fn from(value: String) -> Self { - KeyPart::String(value) - } + String(StackString), } impl From for KeyPart { @@ -164,7 +159,7 @@ impl KeyPart { KeyPart::Scopes => "scopes", KeyPart::Dependencies => "dependencies", KeyPart::DevDependencies => "devDependencies", - KeyPart::String(s) => s, + KeyPart::String(s) => s.as_str(), } } } @@ -217,12 +212,12 @@ fn import_map_entries( .chain(import_map.scopes().flat_map(|scope| { let path = KeyPath::from_parts([ KeyPart::Scopes, - scope.raw_key.to_string().into(), + KeyPart::String(scope.raw_key.into()), ]); scope.imports.entries().map(move |entry| { let mut full_path = path.clone(); - full_path.push(KeyPart::String(entry.raw_key.to_string())); + full_path.push(KeyPart::String(entry.raw_key.into())); (full_path, entry) }) })) @@ -338,7 +333,7 @@ fn add_deps_from_package_json( package_json: &PackageJsonRc, mut filter: impl DepFilter, package_dep_kind: PackageJsonDepKind, - package_json_deps: PackageJsonDepsMap, + package_json_deps: &PackageJsonDepsMap, deps: &mut Vec, ) { for (k, v) in package_json_deps { @@ -353,7 +348,7 @@ fn add_deps_from_package_json( deno_package_json::PackageJsonDepValue::Req(req) => { let alias = k.as_str(); let alias = (alias != req.name).then(|| alias.to_string()); - if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) { + if !filter.should_include(alias.as_deref(), req, DepKind::Npm) { continue; } let id = DepId(deps.len()); @@ -362,9 +357,12 @@ fn add_deps_from_package_json( kind: DepKind::Npm, location: DepLocation::PackageJson( package_json.clone(), - KeyPath::from_parts([package_dep_kind.into(), k.into()]), + KeyPath::from_parts([ + package_dep_kind.into(), + KeyPart::String(k.clone()), + ]), ), - req, + req: req.clone(), alias, }) } @@ -377,14 +375,14 @@ fn add_deps_from_package_json( package_json, filter, PackageJsonDepKind::Normal, - package_json_deps.dependencies, + &package_json_deps.dependencies, deps, ); iterate( package_json, filter, PackageJsonDepKind::Dev, - package_json_deps.dev_dependencies, + &package_json_deps.dev_dependencies, deps, ); } diff --git a/cli/tools/registry/pm/outdated.rs b/cli/tools/registry/pm/outdated.rs index f767eb1522..20a6043f26 100644 --- a/cli/tools/registry/pm/outdated.rs +++ b/cli/tools/registry/pm/outdated.rs @@ -8,6 +8,7 @@ use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::VersionReq; use deno_terminal::colors; @@ -31,7 +32,7 @@ struct OutdatedPackage { latest: String, semver_compatible: String, current: String, - name: String, + name: StackString, } #[allow(clippy::print_stdout)] diff --git a/cli/tools/registry/unfurl.rs b/cli/tools/registry/unfurl.rs index bf6aaaf50d..ca50775717 100644 --- a/cli/tools/registry/unfurl.rs +++ b/cli/tools/registry/unfurl.rs @@ -663,6 +663,7 @@ mod tests { use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_core::serde_json::json; use deno_core::url::Url; + use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_node::PackageJson; use deno_semver::Version; @@ -722,10 +723,9 @@ mod tests { vec![Arc::new(package_json)], deno_config::workspace::PackageJsonDepResolution::Enabled, ); - let fs = Arc::new(RealFs); let unfurler = SpecifierUnfurler::new( Some(Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(fs), + SloppyImportsCachedFs::new(FsSysTraitsAdapter::new_real()), ))), Arc::new(workspace_resolver), true, @@ -863,7 +863,7 @@ const warn2 = await import(`${expr}`); ], deno_config::workspace::PackageJsonDepResolution::Enabled, ); - let fs = Arc::new(RealFs); + let fs = FsSysTraitsAdapter(Arc::new(RealFs)); let unfurler = SpecifierUnfurler::new( Some(Arc::new(CliSloppyImportsResolver::new( SloppyImportsCachedFs::new(fs), diff --git a/cli/tools/task.rs b/cli/tools/task.rs index ec9b238847..740b0ce843 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -231,7 +231,7 @@ pub async fn execute_script( &Url::from_directory_path(cli_options.initial_cwd()).unwrap(), "", &TaskDefinition { - command: task_flags.task.as_ref().unwrap().to_string(), + command: Some(task_flags.task.as_ref().unwrap().to_string()), dependencies: vec![], description: None, }, @@ -448,6 +448,16 @@ impl<'a> TaskRunner<'a> { kill_signal: KillSignal, argv: &'a [String], ) -> Result { + let Some(command) = &definition.command else { + log::info!( + "{} {} {}", + colors::green("Task"), + colors::cyan(task_name), + colors::gray("(no command)") + ); + return Ok(0); + }; + if let Some(npm_resolver) = self.npm_resolver.as_managed() { npm_resolver.ensure_top_level_package_json_install().await?; npm_resolver @@ -469,7 +479,7 @@ impl<'a> TaskRunner<'a> { self .run_single(RunSingleOptions { task_name, - script: &definition.command, + script: command, cwd: &cwd, custom_commands, kill_signal, @@ -837,7 +847,7 @@ fn print_available_tasks( is_deno: false, name: name.to_string(), task: deno_config::deno_json::TaskDefinition { - command: script.to_string(), + command: Some(script.to_string()), dependencies: vec![], description: None, }, @@ -873,11 +883,13 @@ fn print_available_tasks( )?; } } - writeln!( - writer, - " {}", - strip_ansi_codes_and_escape_control_chars(&desc.task.command) - )?; + if let Some(command) = &desc.task.command { + writeln!( + writer, + " {}", + strip_ansi_codes_and_escape_control_chars(command) + )?; + }; if !desc.task.dependencies.is_empty() { let dependencies = desc .task diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index 3ad3724bc7..8d8aaa659b 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -589,7 +589,10 @@ async fn configure_main_worker( WorkerExecutionMode::Test, specifier.clone(), permissions_container, - vec![ops::testing::deno_test::init_ops(worker_sender.sender)], + vec![ + ops::testing::deno_test::init_ops(worker_sender.sender), + ops::lint::deno_lint::init_ops(), + ], Stdio { stdin: StdioPipe::inherit(), stdout: StdioPipe::file(worker_sender.stdout), diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index cb85859f7a..b3d7618be9 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -21,6 +21,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::unsync::spawn; use deno_core::url::Url; +use deno_semver::SmallStackString; use deno_semver::Version; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -255,7 +256,7 @@ async fn print_release_notes( let is_deno_2_rc = new_semver.major == 2 && new_semver.minor == 0 && new_semver.patch == 0 - && new_semver.pre.first() == Some(&"rc".to_string()); + && new_semver.pre.first().map(|s| s.as_str()) == Some("rc"); if is_deno_2_rc || is_switching_from_deno1_to_deno2 { log::info!( @@ -674,7 +675,7 @@ impl RequestedVersion { ); }; - if semver.pre.contains(&"rc".to_string()) { + if semver.pre.contains(&SmallStackString::from_static("rc")) { (ReleaseChannel::Rc, passed_version) } else { (ReleaseChannel::Stable, passed_version) diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 7e8a407cf9..f7862c95e4 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -41,6 +41,13 @@ delete Object.prototype.__proto__; "listen", "listenDatagram", "openKv", + "connectQuic", + "listenQuic", + "QuicBidirectionalStream", + "QuicConn", + "QuicListener", + "QuicReceiveStream", + "QuicSendStream", ]); const unstableMsgSuggestion = "If not, try changing the 'lib' compiler option to include 'deno.unstable' " + diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 4c18d1a2b0..26cf385734 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -4,7 +4,6 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; use crate::cache::ModuleInfoCache; -use crate::node; use crate::npm::CliNpmResolver; use crate::resolver::CjsTracker; use crate::util::checksum; @@ -35,12 +34,13 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FsSysTraitsAdapter; use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::NodeJsErrorCode; use node_resolver::errors::NodeJsErrorCoded; use node_resolver::errors::PackageSubpathResolveError; +use node_resolver::resolve_specifier_into_node_modules; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; use once_cell::sync::Lazy; @@ -660,9 +660,9 @@ fn op_load_inner( None } else { // means it's Deno code importing an npm module - let specifier = node::resolve_specifier_into_node_modules( + let specifier = resolve_specifier_into_node_modules( + &FsSysTraitsAdapter::new_real(), &module.specifier, - &deno_fs::RealFs, ); Some(Cow::Owned(load_from_node_modules( &specifier, @@ -924,9 +924,9 @@ fn resolve_graph_specifier_types( Some(Module::External(module)) => { // we currently only use "External" for when the module is in an npm package Ok(state.maybe_npm.as_ref().map(|_| { - let specifier = node::resolve_specifier_into_node_modules( + let specifier = resolve_specifier_into_node_modules( + &FsSysTraitsAdapter::new_real(), &module.specifier, - &deno_fs::RealFs, ); into_specifier_and_media_type(Some(specifier)) })) diff --git a/cli/util/fs.rs b/cli/util/fs.rs index ba84a0e8f3..58b5fc72c6 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -1,9 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use std::fs::OpenOptions; use std::io::Error; use std::io::ErrorKind; -use std::io::Write; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; @@ -19,185 +17,12 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::unsync::spawn_blocking; use deno_core::ModuleSpecifier; -use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsSysTraitsAdapter; -use crate::util::path::get_atomic_file_path; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressMessagePrompt; -/// Writes the file to the file system at a temporary path, then -/// renames it to the destination in a single sys call in order -/// to never leave the file system in a corrupted state. -/// -/// This also handles creating the directory if a NotFound error -/// occurs. -pub fn atomic_write_file_with_retries>( - file_path: &Path, - data: T, - mode: u32, -) -> std::io::Result<()> { - struct RealAtomicWriteFileFs { - mode: u32, - } - - impl AtomicWriteFileFs for RealAtomicWriteFileFs { - fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> { - write_file(path, bytes, self.mode) - } - fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> { - std::fs::rename(from, to) - } - fn remove_file(&self, path: &Path) -> std::io::Result<()> { - std::fs::remove_file(path) - } - fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> { - std::fs::create_dir_all(dir_path) - } - fn path_exists(&self, path: &Path) -> bool { - path.exists() - } - } - - atomic_write_file_with_retries_and_fs( - &RealAtomicWriteFileFs { mode }, - file_path, - data.as_ref(), - ) -} - -pub trait AtomicWriteFileFs { - fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()>; - fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()>; - fn remove_file(&self, path: &Path) -> std::io::Result<()>; - fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()>; - fn path_exists(&self, path: &Path) -> bool; -} - -pub struct AtomicWriteFileFsAdapter<'a> { - pub fs: &'a dyn FileSystem, - pub write_mode: u32, -} - -impl<'a> AtomicWriteFileFs for AtomicWriteFileFsAdapter<'a> { - fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> { - self - .fs - .write_file_sync( - path, - deno_runtime::deno_fs::OpenOptions::write( - true, - false, - false, - Some(self.write_mode), - ), - None, - bytes, - ) - .map_err(|e| e.into_io_error()) - } - - fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> { - self.fs.rename_sync(from, to).map_err(|e| e.into_io_error()) - } - - fn remove_file(&self, path: &Path) -> std::io::Result<()> { - self - .fs - .remove_sync(path, false) - .map_err(|e| e.into_io_error()) - } - - fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> { - self - .fs - .mkdir_sync(dir_path, /* recursive */ true, None) - .map_err(|e| e.into_io_error()) - } - - fn path_exists(&self, path: &Path) -> bool { - self.fs.exists_sync(path) - } -} - -pub fn atomic_write_file_with_retries_and_fs>( - fs: &impl AtomicWriteFileFs, - file_path: &Path, - data: T, -) -> std::io::Result<()> { - let mut count = 0; - loop { - match atomic_write_file(fs, file_path, data.as_ref()) { - Ok(()) => return Ok(()), - Err(err) => { - if count >= 5 { - // too many retries, return the error - return Err(err); - } - count += 1; - let sleep_ms = std::cmp::min(50, 10 * count); - std::thread::sleep(std::time::Duration::from_millis(sleep_ms)); - } - } - } -} - -/// Writes the file to the file system at a temporary path, then -/// renames it to the destination in a single sys call in order -/// to never leave the file system in a corrupted state. -/// -/// This also handles creating the directory if a NotFound error -/// occurs. -fn atomic_write_file( - fs: &impl AtomicWriteFileFs, - file_path: &Path, - data: &[u8], -) -> std::io::Result<()> { - fn atomic_write_file_raw( - fs: &impl AtomicWriteFileFs, - temp_file_path: &Path, - file_path: &Path, - data: &[u8], - ) -> std::io::Result<()> { - fs.write_file(temp_file_path, data)?; - fs.rename_file(temp_file_path, file_path) - .inspect_err(|_err| { - // clean up the created temp file on error - let _ = fs.remove_file(temp_file_path); - }) - } - - let temp_file_path = get_atomic_file_path(file_path); - - if let Err(write_err) = - atomic_write_file_raw(fs, &temp_file_path, file_path, data) - { - if write_err.kind() == ErrorKind::NotFound { - let parent_dir_path = file_path.parent().unwrap(); - match fs.create_dir_all(parent_dir_path) { - Ok(()) => { - return atomic_write_file_raw(fs, &temp_file_path, file_path, data) - .map_err(|err| add_file_context_to_err(file_path, err)); - } - Err(create_err) => { - if !fs.path_exists(parent_dir_path) { - return Err(Error::new( - create_err.kind(), - format!( - "{:#} (for '{}')\nCheck the permission of the directory.", - create_err, - parent_dir_path.display() - ), - )); - } - } - } - } - return Err(add_file_context_to_err(file_path, write_err)); - } - Ok(()) -} - /// Creates a std::fs::File handling if the parent does not exist. pub fn create_file(file_path: &Path) -> std::io::Result { match std::fs::File::create(file_path) { @@ -236,45 +61,6 @@ fn add_file_context_to_err(file_path: &Path, err: Error) -> Error { ) } -pub fn write_file>( - filename: &Path, - data: T, - mode: u32, -) -> std::io::Result<()> { - write_file_2(filename, data, true, mode, true, false) -} - -pub fn write_file_2>( - filename: &Path, - data: T, - update_mode: bool, - mode: u32, - is_create: bool, - is_append: bool, -) -> std::io::Result<()> { - let mut file = OpenOptions::new() - .read(false) - .write(true) - .append(is_append) - .truncate(!is_append) - .create(is_create) - .open(filename)?; - - if update_mode { - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let mode = mode & 0o777; - let permissions = PermissionsExt::from_mode(mode); - file.set_permissions(permissions)?; - } - #[cfg(not(unix))] - let _ = mode; - } - - file.write_all(data.as_ref()) -} - /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result { Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?)) @@ -289,16 +75,10 @@ pub fn canonicalize_path(path: &Path) -> Result { pub fn canonicalize_path_maybe_not_exists( path: &Path, ) -> Result { - deno_path_util::canonicalize_path_maybe_not_exists(path, &canonicalize_path) -} - -pub fn canonicalize_path_maybe_not_exists_with_fs( - path: &Path, - fs: &dyn FileSystem, -) -> Result { - deno_path_util::canonicalize_path_maybe_not_exists(path, &|path| { - fs.realpath_sync(path).map_err(|err| err.into_io_error()) - }) + deno_path_util::fs::canonicalize_path_maybe_not_exists( + &FsSysTraitsAdapter::new_real(), + path, + ) } /// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`. @@ -346,7 +126,7 @@ pub fn collect_specifiers( .ignore_git_folder() .ignore_node_modules() .set_vendor_folder(vendor_folder) - .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)?; + .collect_file_patterns(&FsSysTraitsAdapter::new_real(), files)?; let mut collected_files_as_urls = collected_files .iter() .map(|f| specifier_from_file_path(f).unwrap()) @@ -418,7 +198,13 @@ mod clone_dir_imp { from: &std::path::Path, to: &std::path::Path, ) -> Result<(), deno_core::error::AnyError> { - if let Err(e) = super::hard_link_dir_recursive(from, to) { + use deno_runtime::deno_fs::FsSysTraitsAdapter; + + if let Err(e) = deno_npm_cache::hard_link_dir_recursive( + &FsSysTraitsAdapter::new_real(), + from, + to, + ) { log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e); super::copy_dir_recursive(from, to)?; } @@ -465,84 +251,6 @@ pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { Ok(()) } -/// Hardlinks the files in one directory to another directory. -/// -/// Note: Does not handle symlinks. -pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { - std::fs::create_dir_all(to) - .with_context(|| format!("Creating {}", to.display()))?; - let read_dir = std::fs::read_dir(from) - .with_context(|| format!("Reading {}", from.display()))?; - - for entry in read_dir { - let entry = entry?; - let file_type = entry.file_type()?; - let new_from = from.join(entry.file_name()); - let new_to = to.join(entry.file_name()); - - if file_type.is_dir() { - hard_link_dir_recursive(&new_from, &new_to).with_context(|| { - format!("Dir {} to {}", new_from.display(), new_to.display()) - })?; - } else if file_type.is_file() { - // note: chance for race conditions here between attempting to create, - // then removing, then attempting to create. There doesn't seem to be - // a way to hard link with overwriting in Rust, but maybe there is some - // way with platform specific code. The workaround here is to handle - // scenarios where something else might create or remove files. - if let Err(err) = std::fs::hard_link(&new_from, &new_to) { - if err.kind() == ErrorKind::AlreadyExists { - if let Err(err) = std::fs::remove_file(&new_to) { - if err.kind() == ErrorKind::NotFound { - // Assume another process/thread created this hard link to the file we are wanting - // to remove then sleep a little bit to let the other process/thread move ahead - // faster to reduce contention. - std::thread::sleep(Duration::from_millis(10)); - } else { - return Err(err).with_context(|| { - format!( - "Removing file to hard link {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - - // Always attempt to recreate the hardlink. In contention scenarios, the other process - // might have been killed or exited after removing the file, but before creating the hardlink - if let Err(err) = std::fs::hard_link(&new_from, &new_to) { - // Assume another process/thread created this hard link to the file we are wanting - // to now create then sleep a little bit to let the other process/thread move ahead - // faster to reduce contention. - if err.kind() == ErrorKind::AlreadyExists { - std::thread::sleep(Duration::from_millis(10)); - } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - } - } - - Ok(()) -} - pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> { let err_mapper = |err: Error, kind: Option| { Error::new( diff --git a/cli/util/path.rs b/cli/util/path.rs index de72843406..539e1235a8 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -1,7 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; -use std::fmt::Write; use std::path::Path; use std::path::PathBuf; @@ -52,19 +51,6 @@ pub fn get_extension(file_path: &Path) -> Option { .map(|e| e.to_lowercase()); } -pub fn get_atomic_file_path(file_path: &Path) -> PathBuf { - let rand = gen_rand_path_component(); - let extension = format!("{rand}.tmp"); - file_path.with_extension(extension) -} - -fn gen_rand_path_component() -> String { - (0..4).fold(String::with_capacity(8), |mut output, _| { - write!(&mut output, "{:02x}", rand::random::()).unwrap(); - output - }) -} - /// TypeScript figures out the type of file based on the extension, but we take /// other factors into account like the file headers. The hack here is to map the /// specifier passed to TypeScript to a new specifier with the file extension. diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index 06b311e150..107b78a213 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -140,23 +140,23 @@ mod tests { #[test] fn test_source_map_from_code() { let to_string = - |bytes: Vec| -> String { String::from_utf8(bytes).unwrap() }; + |bytes: Vec| -> String { String::from_utf8(bytes.to_vec()).unwrap() }; assert_eq!( source_map_from_code( - b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=", + b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=" ).map(to_string), Some("testingtesting".to_string()) ); assert_eq!( source_map_from_code( - b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n \n", + b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n \n" ).map(to_string), Some("testingtesting".to_string()) ); assert_eq!( source_map_from_code( - b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n test\n", - ), + b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n test\n" + ).map(to_string), None ); assert_eq!( @@ -164,7 +164,7 @@ mod tests { b"\"use strict\"; throw new Error(\"Hello world!\"); -//# sourceMappingURL=data:application/json;base64,{", +//# sourceMappingURL=data:application/json;base64,{" ), None ); diff --git a/cli/worker.rs b/cli/worker.rs index 0bbc27b29f..7653e72b75 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -612,6 +612,7 @@ impl CliMainWorkerFactory { serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), otel_config: shared.otel_config.clone(), + close_on_idle: true, }, extensions: custom_extensions, startup_snapshot: crate::js::deno_isolate_init(), @@ -655,7 +656,10 @@ impl CliMainWorkerFactory { "40_test_common.js", "40_test.js", "40_bench.js", - "40_jupyter.js" + "40_jupyter.js", + // TODO(bartlomieju): probably shouldn't include these files here? + "40_lint_selector.js", + "40_lint.js" ); } @@ -812,6 +816,7 @@ fn create_web_worker_callback( serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), otel_config: shared.otel_config.clone(), + close_on_idle: args.close_on_idle, }, extensions: vec![], startup_snapshot: crate::js::deno_isolate_init(), diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 714f230cd2..4dea8f21e1 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.177.0" +version = "0.178.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 7c05996498..96aec27576 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.115.0" +version = "0.116.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index ac9b236a95..7c7cc49b7c 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_canvas" -version = "0.52.0" +version = "0.53.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index df67b14a86..f68dd7d198 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.183.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index c5408e450b..022a8418cf 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cron" -version = "0.63.0" +version = "0.64.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 86d984a421..c283cc9277 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.197.0" +version = "0.198.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index a34758d19a..bb2bee77e2 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -13,6 +13,7 @@ import { core, primordials } from "ext:core/mod.js"; const { + BadResourcePrototype, isAnyArrayBuffer, isArrayBuffer, isStringObject, @@ -26,6 +27,7 @@ const { JSONParse, ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, + PromisePrototypeCatch, TypedArrayPrototypeGetBuffer, TypedArrayPrototypeGetByteLength, TypedArrayPrototypeGetByteOffset, @@ -160,7 +162,18 @@ class InnerBody { ) ) { readableStreamThrowIfErrored(this.stream); - return readableStreamCollectIntoUint8Array(this.stream); + return PromisePrototypeCatch( + readableStreamCollectIntoUint8Array(this.stream), + (e) => { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, e)) { + // TODO(kt3k): We probably like to pass e as `cause` if BadResource supports it. + throw new e.constructor( + "Cannot read body as underlying resource unavailable", + ); + } + throw e; + }, + ); } else { this.streamOrStatic.consumed = true; return this.streamOrStatic.body; diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index fee21808e7..e6e4ded4af 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.207.0" +version = "0.208.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index afcbf7b4e6..9cd5c77013 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.170.0" +version = "0.171.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index fc2b18be13..4e71acb1b2 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -77,6 +77,7 @@ const { Error, Function, MathTrunc, + Number, ObjectEntries, ObjectDefineProperty, ObjectPrototypeIsPrototypeOf, @@ -373,12 +374,12 @@ function parseFileInfo(response) { isDirectory: response.isDirectory, isSymlink: response.isSymlink, size: response.size, - mtime: response.mtimeSet === true ? new Date(response.mtime) : null, - atime: response.atimeSet === true ? new Date(response.atime) : null, + mtime: response.mtimeSet === true ? new Date(Number(response.mtime)) : null, + atime: response.atimeSet === true ? new Date(Number(response.atime)) : null, birthtime: response.birthtimeSet === true ? new Date(response.birthtime) : null, - ctime: response.ctimeSet === true ? new Date(response.ctime) : null, + ctime: response.ctimeSet === true ? new Date(Number(response.ctime)) : null, dev: response.dev, mode: response.mode, ino: unix ? response.ino : null, diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 608554607c..8692f04a73 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.93.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -25,10 +25,12 @@ deno_io.workspace = true deno_path_util.workspace = true deno_permissions.workspace = true filetime.workspace = true +getrandom = "0.2" libc.workspace = true rand.workspace = true rayon = "1.8.0" serde.workspace = true +sys_traits.workspace = true thiserror.workspace = true [target.'cfg(unix)'.dependencies] diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 28a49c5d9b..304c263614 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -5,6 +5,8 @@ use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::time::Duration; +use std::time::SystemTime; use serde::Deserialize; use serde::Serialize; @@ -12,6 +14,8 @@ use serde::Serialize; use deno_io::fs::File; use deno_io::fs::FsResult; use deno_io::fs::FsStat; +use sys_traits::FsFile; +use sys_traits::FsFileSetPermissions; use crate::sync::MaybeSend; use crate::sync::MaybeSync; @@ -71,7 +75,7 @@ pub enum FsFileType { } /// WARNING: This is part of the public JS Deno API. -#[derive(Serialize)] +#[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct FsDirEntry { pub name: String, @@ -100,6 +104,56 @@ impl AccessCheckFn for T where { } +#[derive(Debug)] +pub struct FsStatSlim { + file_type: sys_traits::FileType, + modified: Result, +} + +impl FsStatSlim { + pub fn from_std(metadata: &std::fs::Metadata) -> Self { + Self { + file_type: metadata.file_type().into(), + modified: metadata.modified(), + } + } + + pub fn from_deno_fs_stat(data: &FsStat) -> Self { + FsStatSlim { + file_type: if data.is_file { + sys_traits::FileType::File + } else if data.is_directory { + sys_traits::FileType::Dir + } else if data.is_symlink { + sys_traits::FileType::Symlink + } else { + sys_traits::FileType::Unknown + }, + modified: data + .mtime + .map(|ms| SystemTime::UNIX_EPOCH + Duration::from_millis(ms)) + .ok_or_else(|| { + std::io::Error::new(std::io::ErrorKind::InvalidData, "No mtime") + }), + } + } +} + +impl sys_traits::FsMetadataValue for FsStatSlim { + #[inline] + fn file_type(&self) -> sys_traits::FileType { + self.file_type + } + + fn modified(&self) -> Result { + self + .modified + .as_ref() + .copied() + .map_err(|err| std::io::Error::new(err.kind(), err.to_string())) + } +} + pub type AccessCheckCb<'a> = &'a mut (dyn AccessCheckFn + 'a); #[async_trait::async_trait(?Send)] @@ -361,3 +415,289 @@ fn string_from_utf8_lossy(buf: Vec) -> String { Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(buf) }, } } + +// todo(dsherret): this is temporary. Instead of using the `FileSystem` trait implementation +// in the CLI, the CLI should instead create it's own file system using `sys_traits` traits +// then that can implement the `FileSystem` trait. Then this `FileSystem` trait can stay here +// for use only for `ext/fs` and not the entire CLI. +#[derive(Debug, Clone)] +pub struct FsSysTraitsAdapter(pub FileSystemRc); + +impl FsSysTraitsAdapter { + pub fn new_real() -> Self { + Self(crate::sync::new_rc(crate::RealFs)) + } +} + +impl sys_traits::BaseFsHardLink for FsSysTraitsAdapter { + #[inline] + fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> { + self + .0 + .link_sync(src, dst) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRead for FsSysTraitsAdapter { + #[inline] + fn base_fs_read(&self, path: &Path) -> std::io::Result> { + self + .0 + .read_file_sync(path, None) + .map_err(|err| err.into_io_error()) + } +} + +#[derive(Debug)] +pub struct FsSysTraitsAdapterReadDirEntry { + path: PathBuf, + entry: FsDirEntry, +} + +impl sys_traits::FsDirEntry for FsSysTraitsAdapterReadDirEntry { + type Metadata = FsStatSlim; + + fn file_name(&self) -> Cow { + Cow::Borrowed(self.entry.name.as_ref()) + } + + fn file_type(&self) -> std::io::Result { + if self.entry.is_file { + Ok(sys_traits::FileType::File) + } else if self.entry.is_directory { + Ok(sys_traits::FileType::Dir) + } else if self.entry.is_symlink { + Ok(sys_traits::FileType::Symlink) + } else { + Ok(sys_traits::FileType::Unknown) + } + } + + fn metadata(&self) -> std::io::Result { + Ok(FsStatSlim { + file_type: self.file_type().unwrap(), + modified: Err(std::io::Error::new( + std::io::ErrorKind::Other, + "not supported", + )), + }) + } + + fn path(&self) -> Cow { + Cow::Borrowed(&self.path) + } +} + +impl sys_traits::BaseFsReadDir for FsSysTraitsAdapter { + type ReadDirEntry = FsSysTraitsAdapterReadDirEntry; + + fn base_fs_read_dir( + &self, + path: &Path, + ) -> std::io::Result< + Box>>, + > { + // todo(dsherret): needs to actually be iterable and not allocate a vector + let entries = self + .0 + .read_dir_sync(path) + .map_err(|err| err.into_io_error())?; + let parent_dir = path.to_path_buf(); + Ok(Box::new(entries.into_iter().map(move |entry| { + Ok(FsSysTraitsAdapterReadDirEntry { + path: parent_dir.join(&entry.name), + entry, + }) + }))) + } +} + +impl sys_traits::BaseFsCanonicalize for FsSysTraitsAdapter { + #[inline] + fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result { + self + .0 + .realpath_sync(path) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsMetadata for FsSysTraitsAdapter { + type Metadata = FsStatSlim; + + #[inline] + fn base_fs_metadata(&self, path: &Path) -> std::io::Result { + self + .0 + .stat_sync(path) + .map(|data| FsStatSlim::from_deno_fs_stat(&data)) + .map_err(|err| err.into_io_error()) + } + + #[inline] + fn base_fs_symlink_metadata( + &self, + path: &Path, + ) -> std::io::Result { + self + .0 + .lstat_sync(path) + .map(|data| FsStatSlim::from_deno_fs_stat(&data)) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsCreateDir for FsSysTraitsAdapter { + #[inline] + fn base_fs_create_dir( + &self, + path: &Path, + options: &sys_traits::CreateDirOptions, + ) -> std::io::Result<()> { + self + .0 + .mkdir_sync(path, options.recursive, options.mode) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRemoveFile for FsSysTraitsAdapter { + #[inline] + fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> { + self + .0 + .remove_sync(path, false) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRename for FsSysTraitsAdapter { + #[inline] + fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> { + self + .0 + .rename_sync(from, to) + .map_err(|err| err.into_io_error()) + } +} + +pub struct FsFileAdapter(pub Rc); + +impl FsFile for FsFileAdapter {} + +impl FsFileSetPermissions for FsFileAdapter { + #[inline] + fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> { + if cfg!(windows) { + Ok(()) // ignore + } else { + self + .0 + .clone() + .chmod_sync(mode) + .map_err(|err| err.into_io_error()) + } + } +} + +impl std::io::Read for FsFileAdapter { + #[inline] + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self + .0 + .clone() + .read_sync(buf) + .map_err(|err| err.into_io_error()) + } +} + +impl std::io::Seek for FsFileAdapter { + fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result { + self + .0 + .clone() + .seek_sync(pos) + .map_err(|err| err.into_io_error()) + } +} + +impl std::io::Write for FsFileAdapter { + #[inline] + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self + .0 + .clone() + .write_sync(buf) + .map_err(|err| err.into_io_error()) + } + + #[inline] + fn flush(&mut self) -> std::io::Result<()> { + self + .0 + .clone() + .sync_sync() + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsOpen for FsSysTraitsAdapter { + type File = FsFileAdapter; + + fn base_fs_open( + &self, + path: &Path, + options: &sys_traits::OpenOptions, + ) -> std::io::Result { + self + .0 + .open_sync( + path, + OpenOptions { + read: options.read, + write: options.write, + create: options.create, + truncate: options.truncate, + append: options.append, + create_new: options.create_new, + mode: options.mode, + }, + None, + ) + .map(FsFileAdapter) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::SystemRandom for FsSysTraitsAdapter { + #[inline] + fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> { + getrandom::getrandom(buf).map_err(|err| { + std::io::Error::new(std::io::ErrorKind::Other, err.to_string()) + }) + } +} + +impl sys_traits::SystemTimeNow for FsSysTraitsAdapter { + #[inline] + fn sys_time_now(&self) -> SystemTime { + SystemTime::now() + } +} + +impl sys_traits::ThreadSleep for FsSysTraitsAdapter { + #[inline] + fn thread_sleep(&self, dur: Duration) { + std::thread::sleep(dur); + } +} + +impl sys_traits::BaseEnvVar for FsSysTraitsAdapter { + fn base_env_var_os( + &self, + key: &std::ffi::OsStr, + ) -> Option { + std::env::var_os(key) + } +} diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index 26fac1e79f..cfcf249783 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -13,6 +13,8 @@ pub use crate::interface::FileSystem; pub use crate::interface::FileSystemRc; pub use crate::interface::FsDirEntry; pub use crate::interface::FsFileType; +pub use crate::interface::FsStatSlim; +pub use crate::interface::FsSysTraitsAdapter; pub use crate::interface::OpenOptions; pub use crate::ops::FsOpsError; pub use crate::ops::FsOpsErrorKind; diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 86ad213160..76d37e430c 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -723,30 +723,34 @@ fn cp(from: &Path, to: &Path) -> FsResult<()> { } } - match (fs::metadata(to), fs::symlink_metadata(to)) { - (Ok(m), _) if m.is_dir() => cp_( - source_meta, - from, - &to.join(from.file_name().ok_or_else(|| { - io::Error::new( - io::ErrorKind::InvalidInput, - "the source path is not a valid file", - ) - })?), - )?, - (_, Ok(m)) if is_identical(&source_meta, &m) => { + if let Ok(m) = fs::metadata(to) { + if m.is_dir() { + return cp_( + source_meta, + from, + &to.join(from.file_name().ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + "the source path is not a valid file", + ) + })?), + ); + } + } + + if let Ok(m) = fs::symlink_metadata(to) { + if is_identical(&source_meta, &m) { return Err( io::Error::new( io::ErrorKind::InvalidInput, "the source and destination are the same file", ) .into(), - ) + ); } - _ => cp_(source_meta, from, to)?, } - Ok(()) + cp_(source_meta, from, to) } #[cfg(not(windows))] diff --git a/ext/fs/sync.rs b/ext/fs/sync.rs index 6a913f658a..06694f1dc4 100644 --- a/ext/fs/sync.rs +++ b/ext/fs/sync.rs @@ -21,3 +21,9 @@ mod inner { pub trait MaybeSend {} impl MaybeSend for T where T: ?Sized {} } + +#[allow(clippy::disallowed_types)] +#[inline] +pub fn new_rc(value: T) -> MaybeArc { + MaybeArc::new(value) +} diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index dfb53559d6..e7aaad2fc0 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.181.0" +version = "0.182.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 7a464ecde9..9298c654c1 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.93.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index e65880942b..c97aa75552 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.91.0" +version = "0.92.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index 5a9eb7441f..5d726b3e31 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.114.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/sym/Cargo.toml b/ext/napi/sym/Cargo.toml index b07dadd634..22228bd2f6 100644 --- a/ext/napi/sym/Cargo.toml +++ b/ext/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.113.0" +version = "0.114.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/03_quic.js b/ext/net/03_quic.js new file mode 100644 index 0000000000..e100e7bd64 --- /dev/null +++ b/ext/net/03_quic.js @@ -0,0 +1,367 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +import { core, primordials } from "ext:core/mod.js"; +import { + op_quic_accept, + op_quic_accept_bi, + op_quic_accept_incoming, + op_quic_accept_uni, + op_quic_close_connection, + op_quic_close_endpoint, + op_quic_connect, + op_quic_connection_closed, + op_quic_connection_get_protocol, + op_quic_connection_get_remote_addr, + op_quic_endpoint_get_addr, + op_quic_get_send_stream_priority, + op_quic_incoming_accept, + op_quic_incoming_ignore, + op_quic_incoming_local_ip, + op_quic_incoming_refuse, + op_quic_incoming_remote_addr, + op_quic_incoming_remote_addr_validated, + op_quic_listen, + op_quic_max_datagram_size, + op_quic_open_bi, + op_quic_open_uni, + op_quic_read_datagram, + op_quic_send_datagram, + op_quic_set_send_stream_priority, +} from "ext:core/ops"; +import { + getWritableStreamResourceBacking, + ReadableStream, + readableStreamForRid, + WritableStream, + writableStreamForRid, +} from "ext:deno_web/06_streams.js"; +import { loadTlsKeyPair } from "ext:deno_net/02_tls.js"; +const { + BadResourcePrototype, +} = core; +const { + Uint8Array, + TypedArrayPrototypeSubarray, + SymbolAsyncIterator, + SafePromisePrototypeFinally, + ObjectPrototypeIsPrototypeOf, +} = primordials; + +class QuicSendStream extends WritableStream { + get sendOrder() { + return op_quic_get_send_stream_priority( + getWritableStreamResourceBacking(this).rid, + ); + } + + set sendOrder(p) { + op_quic_set_send_stream_priority( + getWritableStreamResourceBacking(this).rid, + p, + ); + } +} + +class QuicReceiveStream extends ReadableStream {} + +function readableStream(rid, closed) { + // stream can be indirectly closed by closing connection. + SafePromisePrototypeFinally(closed, () => { + core.tryClose(rid); + }); + return readableStreamForRid(rid, true, QuicReceiveStream); +} + +function writableStream(rid, closed) { + // stream can be indirectly closed by closing connection. + SafePromisePrototypeFinally(closed, () => { + core.tryClose(rid); + }); + return writableStreamForRid(rid, true, QuicSendStream); +} + +class QuicBidirectionalStream { + #readable; + #writable; + + constructor(txRid, rxRid, closed) { + this.#readable = readableStream(rxRid, closed); + this.#writable = writableStream(txRid, closed); + } + + get readable() { + return this.#readable; + } + + get writable() { + return this.#writable; + } +} + +async function* bidiStream(conn, closed) { + try { + while (true) { + const r = await op_quic_accept_bi(conn); + yield new QuicBidirectionalStream(r[0], r[1], closed); + } + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + return; + } + throw error; + } +} + +async function* uniStream(conn, closed) { + try { + while (true) { + const uniRid = await op_quic_accept_uni(conn); + yield readableStream(uniRid, closed); + } + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + return; + } + throw error; + } +} + +class QuicConn { + #resource; + #bidiStream = null; + #uniStream = null; + #closed; + + constructor(resource) { + this.#resource = resource; + + this.#closed = op_quic_connection_closed(this.#resource); + core.unrefOpPromise(this.#closed); + } + + get protocol() { + return op_quic_connection_get_protocol(this.#resource); + } + + get remoteAddr() { + return op_quic_connection_get_remote_addr(this.#resource); + } + + async createBidirectionalStream( + { sendOrder, waitUntilAvailable } = { __proto__: null }, + ) { + const { 0: txRid, 1: rxRid } = await op_quic_open_bi( + this.#resource, + waitUntilAvailable ?? false, + ); + if (sendOrder !== null && sendOrder !== undefined) { + op_quic_set_send_stream_priority(txRid, sendOrder); + } + return new QuicBidirectionalStream(txRid, rxRid, this.#closed); + } + + async createUnidirectionalStream( + { sendOrder, waitUntilAvailable } = { __proto__: null }, + ) { + const rid = await op_quic_open_uni( + this.#resource, + waitUntilAvailable ?? false, + ); + if (sendOrder !== null && sendOrder !== undefined) { + op_quic_set_send_stream_priority(rid, sendOrder); + } + return writableStream(rid, this.#closed); + } + + get incomingBidirectionalStreams() { + if (this.#bidiStream === null) { + this.#bidiStream = ReadableStream.from( + bidiStream(this.#resource, this.#closed), + ); + } + return this.#bidiStream; + } + + get incomingUnidirectionalStreams() { + if (this.#uniStream === null) { + this.#uniStream = ReadableStream.from( + uniStream(this.#resource, this.#closed), + ); + } + return this.#uniStream; + } + + get maxDatagramSize() { + return op_quic_max_datagram_size(this.#resource); + } + + async readDatagram(p) { + const view = p || new Uint8Array(this.maxDatagramSize); + const nread = await op_quic_read_datagram(this.#resource, view); + return TypedArrayPrototypeSubarray(view, 0, nread); + } + + async sendDatagram(data) { + await op_quic_send_datagram(this.#resource, data); + } + + get closed() { + core.refOpPromise(this.#closed); + return this.#closed; + } + + close({ closeCode, reason }) { + op_quic_close_connection(this.#resource, closeCode, reason); + } +} + +class QuicIncoming { + #incoming; + + constructor(incoming) { + this.#incoming = incoming; + } + + get localIp() { + return op_quic_incoming_local_ip(this.#incoming); + } + + get remoteAddr() { + return op_quic_incoming_remote_addr(this.#incoming); + } + + get remoteAddressValidated() { + return op_quic_incoming_remote_addr_validated(this.#incoming); + } + + async accept() { + const conn = await op_quic_incoming_accept(this.#incoming); + return new QuicConn(conn); + } + + refuse() { + op_quic_incoming_refuse(this.#incoming); + } + + ignore() { + op_quic_incoming_ignore(this.#incoming); + } +} + +class QuicListener { + #endpoint; + + constructor(endpoint) { + this.#endpoint = endpoint; + } + + get addr() { + return op_quic_endpoint_get_addr(this.#endpoint); + } + + async accept() { + const conn = await op_quic_accept(this.#endpoint); + return new QuicConn(conn); + } + + async incoming() { + const incoming = await op_quic_accept_incoming(this.#endpoint); + return new QuicIncoming(incoming); + } + + async next() { + let conn; + try { + conn = await this.accept(); + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + return { value: undefined, done: true }; + } + throw error; + } + return { value: conn, done: false }; + } + + [SymbolAsyncIterator]() { + return this; + } + + close({ closeCode, reason }) { + op_quic_close_endpoint(this.#endpoint, closeCode, reason); + } +} + +async function listenQuic( + { + hostname, + port, + cert, + key, + alpnProtocols, + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + }, +) { + hostname = hostname || "0.0.0.0"; + const keyPair = loadTlsKeyPair("Deno.listenQuic", { cert, key }); + const endpoint = await op_quic_listen( + { hostname, port }, + { alpnProtocols }, + { + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + }, + keyPair, + ); + return new QuicListener(endpoint); +} + +async function connectQuic( + { + hostname, + port, + serverName, + caCerts, + cert, + key, + alpnProtocols, + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + congestionControl, + }, +) { + const keyPair = loadTlsKeyPair("Deno.connectQuic", { cert, key }); + const conn = await op_quic_connect( + { hostname, port }, + { + caCerts, + alpnProtocols, + serverName, + }, + { + keepAliveInterval, + maxIdleTimeout, + maxConcurrentBidirectionalStreams, + maxConcurrentUnidirectionalStreams, + congestionControl, + }, + keyPair, + ); + return new QuicConn(conn); +} + +export { + connectQuic, + listenQuic, + QuicBidirectionalStream, + QuicConn, + QuicIncoming, + QuicListener, + QuicReceiveStream, + QuicSendStream, +}; diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index 546152bd4b..eaee7bfb4b 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.175.0" +version = "0.176.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -20,6 +20,7 @@ deno_tls.workspace = true hickory-proto = "0.25.0-alpha.4" hickory-resolver.workspace = true pin-project.workspace = true +quinn = { version = "0.11.6", default-features = false, features = ["runtime-tokio", "rustls", "ring"] } rustls-tokio-stream.workspace = true serde.workspace = true socket2.workspace = true diff --git a/ext/net/lib.deno_net.d.ts b/ext/net/lib.deno_net.d.ts index 827081f2a4..958474cbbd 100644 --- a/ext/net/lib.deno_net.d.ts +++ b/ext/net/lib.deno_net.d.ts @@ -450,5 +450,293 @@ declare namespace Deno { options?: StartTlsOptions, ): Promise; + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface QuicTransportOptions { + /** Period of inactivity before sending a keep-alive packet. Keep-alive + * packets prevent an inactive but otherwise healthy connection from timing + * out. Only one side of any given connection needs keep-alive enabled for + * the connection to be preserved. + * @default {undefined} + */ + keepAliveInterval?: number; + /** Maximum duration of inactivity to accept before timing out the + * connection. The true idle timeout is the minimum of this and the peer’s + * own max idle timeout. + * @default {undefined} + */ + maxIdleTimeout?: number; + /** Maximum number of incoming bidirectional streams that may be open + * concurrently. + * @default {100} + */ + maxConcurrentBidirectionalStreams?: number; + /** Maximum number of incoming unidirectional streams that may be open + * concurrently. + * @default {100} + */ + maxConcurrentUnidirectionalStreams?: number; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface ListenQuicOptions extends QuicTransportOptions { + /** The port to connect to. */ + port: number; + /** + * A literal IP address or host name that can be resolved to an IP address. + * @default {"0.0.0.0"} + */ + hostname?: string; + /** Server private key in PEM format */ + key: string; + /** Cert chain in PEM format */ + cert: string; + /** Application-Layer Protocol Negotiation (ALPN) protocols to announce to + * the client. QUIC requires the use of ALPN. + */ + alpnProtocols: string[]; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * Listen announces on the local transport address over QUIC. + * + * ```ts + * const lstnr = await Deno.listenQuic({ port: 443, cert: "...", key: "...", alpnProtocols: ["h3"] }); + * ``` + * + * Requires `allow-net` permission. + * + * @experimental + * @tags allow-net + * @category Network + */ + export function listenQuic(options: ListenQuicOptions): Promise; + + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface ConnectQuicOptions extends QuicTransportOptions { + /** The port to connect to. */ + port: number; + /** A literal IP address or host name that can be resolved to an IP address. */ + hostname: string; + /** The name used for validating the certificate provided by the server. If + * not provided, defaults to `hostname`. */ + serverName?: string | undefined; + /** Application-Layer Protocol Negotiation (ALPN) protocols supported by + * the client. QUIC requires the use of ALPN. + */ + alpnProtocols: string[]; + /** A list of root certificates that will be used in addition to the + * default root certificates to verify the peer's certificate. + * + * Must be in PEM format. */ + caCerts?: string[]; + /** + * The congestion control algorithm used when sending data over this connection. + */ + congestionControl?: "throughput" | "low-latency"; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * Establishes a secure connection over QUIC using a hostname and port. The + * cert file is optional and if not included Mozilla's root certificates will + * be used. See also https://github.com/ctz/webpki-roots for specifics. + * + * ```ts + * const caCert = await Deno.readTextFile("./certs/my_custom_root_CA.pem"); + * const conn1 = await Deno.connectQuic({ hostname: "example.com", port: 443, alpnProtocols: ["h3"] }); + * const conn2 = await Deno.connectQuic({ caCerts: [caCert], hostname: "example.com", port: 443, alpnProtocols: ["h3"] }); + * ``` + * + * Requires `allow-net` permission. + * + * @experimental + * @tags allow-net + * @category Network + */ + export function connectQuic(options: ConnectQuicOptions): Promise; + + /** + * **UNSTABLE**: New API, yet to be vetted. + * @experimental + * @category Network + */ + export interface QuicCloseInfo { + /** A number representing the error code for the error. */ + closeCode: number; + /** A string representing the reason for closing the connection. */ + reason: string; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * An incoming connection for which the server has not yet begun its part of the handshake. + * + * @experimental + * @category Network + */ + export interface QuicIncoming { + /** + * The local IP address which was used when the peer established the connection. + */ + readonly localIp: string; + + /** + * The peer’s UDP address. + */ + readonly remoteAddr: NetAddr; + + /** + * Whether the socket address that is initiating this connection has proven that they can receive traffic. + */ + readonly remoteAddressValidated: boolean; + + /** + * Accept this incoming connection. + */ + accept(): Promise; + + /** + * Refuse this incoming connection. + */ + refuse(): void; + + /** + * Ignore this incoming connection attempt, not sending any packet in response. + */ + ignore(): void; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * Specialized listener that accepts QUIC connections. + * + * @experimental + * @category Network + */ + export interface QuicListener extends AsyncIterable { + /** Return the address of the `QuicListener`. */ + readonly addr: NetAddr; + + /** Waits for and resolves to the next connection to the `QuicListener`. */ + accept(): Promise; + + /** Waits for and resolves to the next incoming request to the `QuicListener`. */ + incoming(): Promise; + + /** Close closes the listener. Any pending accept promises will be rejected + * with errors. */ + close(info: QuicCloseInfo): void; + + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicSendStreamOptions { + /** Indicates the send priority of this stream relative to other streams for + * which the value has been set. + * @default {undefined} + */ + sendOrder?: number; + /** Wait until there is sufficient flow credit to create the stream. + * @default {false} + */ + waitUntilAvailable?: boolean; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicConn { + /** Close closes the listener. Any pending accept promises will be rejected + * with errors. */ + close(info: QuicCloseInfo): void; + /** Opens and returns a bidirectional stream. */ + createBidirectionalStream( + options?: QuicSendStreamOptions, + ): Promise; + /** Opens and returns a unidirectional stream. */ + createUnidirectionalStream( + options?: QuicSendStreamOptions, + ): Promise; + /** Send a datagram. The provided data cannot be larger than + * `maxDatagramSize`. */ + sendDatagram(data: Uint8Array): Promise; + /** Receive a datagram. If no buffer is provider, one will be allocated. + * The size of the provided buffer should be at least `maxDatagramSize`. */ + readDatagram(buffer?: Uint8Array): Promise; + + /** Return the remote address for the connection. Clients may change + * addresses at will, for example when switching to a cellular internet + * connection. + */ + readonly remoteAddr: NetAddr; + /** The negotiated ALPN protocol, if provided. */ + readonly protocol: string | undefined; + /** Returns a promise that resolves when the connection is closed. */ + readonly closed: Promise; + /** A stream of bidirectional streams opened by the peer. */ + readonly incomingBidirectionalStreams: ReadableStream< + QuicBidirectionalStream + >; + /** A stream of unidirectional streams opened by the peer. */ + readonly incomingUnidirectionalStreams: ReadableStream; + /** Returns the datagram stream for sending and receiving datagrams. */ + readonly maxDatagramSize: number; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicBidirectionalStream { + /** Returns a QuicReceiveStream instance that can be used to read incoming data. */ + readonly readable: QuicReceiveStream; + /** Returns a QuicSendStream instance that can be used to write outgoing data. */ + readonly writable: QuicSendStream; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicSendStream extends WritableStream { + /** Indicates the send priority of this stream relative to other streams for + * which the value has been set. */ + sendOrder: number; + } + + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * @experimental + * @category Network + */ + export interface QuicReceiveStream extends ReadableStream {} + export {}; // only export exports } diff --git a/ext/net/lib.rs b/ext/net/lib.rs index f482750b38..04b3f80010 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -5,6 +5,7 @@ pub mod ops; pub mod ops_tls; #[cfg(unix)] pub mod ops_unix; +mod quic; pub mod raw; pub mod resolve_addr; pub mod tcp; @@ -158,8 +159,34 @@ deno_core::extension!(deno_net, ops_unix::op_node_unstable_net_listen_unixpacket

, ops_unix::op_net_recv_unixpacket, ops_unix::op_net_send_unixpacket

, + + quic::op_quic_accept, + quic::op_quic_accept_bi, + quic::op_quic_accept_incoming, + quic::op_quic_accept_uni, + quic::op_quic_close_connection, + quic::op_quic_close_endpoint, + quic::op_quic_connection_closed, + quic::op_quic_connection_get_protocol, + quic::op_quic_connection_get_remote_addr, + quic::op_quic_connect

, + quic::op_quic_endpoint_get_addr, + quic::op_quic_get_send_stream_priority, + quic::op_quic_incoming_accept, + quic::op_quic_incoming_refuse, + quic::op_quic_incoming_ignore, + quic::op_quic_incoming_local_ip, + quic::op_quic_incoming_remote_addr, + quic::op_quic_incoming_remote_addr_validated, + quic::op_quic_listen

, + quic::op_quic_max_datagram_size, + quic::op_quic_open_bi, + quic::op_quic_open_uni, + quic::op_quic_read_datagram, + quic::op_quic_send_datagram, + quic::op_quic_set_send_stream_priority, ], - esm = [ "01_net.js", "02_tls.js" ], + esm = [ "01_net.js", "02_tls.js", "03_quic.js" ], options = { root_cert_store_provider: Option>, unsafely_ignore_certificate_errors: Option>, diff --git a/ext/net/quic.rs b/ext/net/quic.rs new file mode 100644 index 0000000000..16f68364be --- /dev/null +++ b/ext/net/quic.rs @@ -0,0 +1,660 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use crate::resolve_addr::resolve_addr; +use crate::DefaultTlsOptions; +use crate::NetPermissions; +use crate::UnsafelyIgnoreCertificateErrors; +use deno_core::error::bad_resource; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::futures::task::noop_waker_ref; +use deno_core::op2; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::GarbageCollected; +use deno_core::JsBuffer; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::WriteOutcome; +use deno_tls::create_client_config; +use deno_tls::SocketUse; +use deno_tls::TlsKeys; +use deno_tls::TlsKeysHolder; +use quinn::crypto::rustls::QuicClientConfig; +use quinn::crypto::rustls::QuicServerConfig; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::net::IpAddr; +use std::net::Ipv4Addr; +use std::net::Ipv6Addr; +use std::net::SocketAddrV4; +use std::net::SocketAddrV6; +use std::pin::pin; +use std::rc::Rc; +use std::sync::Arc; +use std::task::Context; +use std::task::Poll; +use std::time::Duration; + +#[derive(Debug, Deserialize, Serialize)] +struct Addr { + hostname: String, + port: u16, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ListenArgs { + alpn_protocols: Option>, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct TransportConfig { + keep_alive_interval: Option, + max_idle_timeout: Option, + max_concurrent_bidirectional_streams: Option, + max_concurrent_unidirectional_streams: Option, + preferred_address_v4: Option, + preferred_address_v6: Option, + congestion_control: Option, +} + +impl TryInto for TransportConfig { + type Error = AnyError; + + fn try_into(self) -> Result { + let mut cfg = quinn::TransportConfig::default(); + + if let Some(interval) = self.keep_alive_interval { + cfg.keep_alive_interval(Some(Duration::from_millis(interval))); + } + + if let Some(timeout) = self.max_idle_timeout { + cfg.max_idle_timeout(Some(Duration::from_millis(timeout).try_into()?)); + } + + if let Some(max) = self.max_concurrent_bidirectional_streams { + cfg.max_concurrent_bidi_streams(max.into()); + } + + if let Some(max) = self.max_concurrent_unidirectional_streams { + cfg.max_concurrent_uni_streams(max.into()); + } + + if let Some(v) = self.congestion_control { + let controller: Option< + Arc, + > = match v.as_str() { + "low-latency" => { + Some(Arc::new(quinn::congestion::BbrConfig::default())) + } + "throughput" => { + Some(Arc::new(quinn::congestion::CubicConfig::default())) + } + _ => None, + }; + if let Some(controller) = controller { + cfg.congestion_controller_factory(controller); + } + } + + Ok(cfg) + } +} + +struct EndpointResource(quinn::Endpoint, Arc); + +impl GarbageCollected for EndpointResource {} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_listen( + state: Rc>, + #[serde] addr: Addr, + #[serde] args: ListenArgs, + #[serde] transport_config: TransportConfig, + #[cppgc] keys: &TlsKeysHolder, +) -> Result +where + NP: NetPermissions + 'static, +{ + state + .borrow_mut() + .borrow_mut::() + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenQuic()")?; + + let addr = resolve_addr(&addr.hostname, addr.port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + + let TlsKeys::Static(deno_tls::TlsKey(cert, key)) = keys.take() else { + unreachable!() + }; + + let mut crypto = + quinn::rustls::ServerConfig::builder_with_protocol_versions(&[ + &quinn::rustls::version::TLS13, + ]) + .with_no_client_auth() + .with_single_cert(cert.clone(), key.clone_key())?; + + if let Some(alpn_protocols) = args.alpn_protocols { + crypto.alpn_protocols = alpn_protocols + .into_iter() + .map(|alpn| alpn.into_bytes()) + .collect(); + } + + let server_config = Arc::new(QuicServerConfig::try_from(crypto)?); + let mut config = quinn::ServerConfig::with_crypto(server_config.clone()); + config.preferred_address_v4(transport_config.preferred_address_v4); + config.preferred_address_v6(transport_config.preferred_address_v6); + config.transport_config(Arc::new(transport_config.try_into()?)); + let endpoint = quinn::Endpoint::server(config, addr)?; + + Ok(EndpointResource(endpoint, server_config)) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_endpoint_get_addr( + #[cppgc] endpoint: &EndpointResource, +) -> Result { + let addr = endpoint.0.local_addr()?; + let addr = Addr { + hostname: format!("{}", addr.ip()), + port: addr.port(), + }; + Ok(addr) +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CloseInfo { + close_code: u64, + reason: String, +} + +#[op2(fast)] +pub(crate) fn op_quic_close_endpoint( + #[cppgc] endpoint: &EndpointResource, + #[bigint] close_code: u64, + #[string] reason: String, +) -> Result<(), AnyError> { + endpoint + .0 + .close(quinn::VarInt::from_u64(close_code)?, reason.as_bytes()); + Ok(()) +} + +struct ConnectionResource(quinn::Connection); + +impl GarbageCollected for ConnectionResource {} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_accept( + #[cppgc] endpoint: &EndpointResource, +) -> Result { + match endpoint.0.accept().await { + Some(incoming) => { + let conn = incoming.accept()?.await?; + Ok(ConnectionResource(conn)) + } + None => Err(bad_resource("QuicListener is closed")), + } +} + +struct IncomingResource( + RefCell>, + Arc, +); + +impl GarbageCollected for IncomingResource {} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_accept_incoming( + #[cppgc] endpoint: &EndpointResource, +) -> Result { + match endpoint.0.accept().await { + Some(incoming) => Ok(IncomingResource( + RefCell::new(Some(incoming)), + endpoint.1.clone(), + )), + None => Err(bad_resource("QuicListener is closed")), + } +} + +#[op2] +#[string] +pub(crate) fn op_quic_incoming_local_ip( + #[cppgc] incoming_resource: &IncomingResource, +) -> Result, AnyError> { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + Ok(incoming.local_ip().map(|ip| ip.to_string())) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_incoming_remote_addr( + #[cppgc] incoming_resource: &IncomingResource, +) -> Result { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + let addr = incoming.remote_address(); + Ok(Addr { + hostname: format!("{}", addr.ip()), + port: addr.port(), + }) +} + +#[op2(fast)] +pub(crate) fn op_quic_incoming_remote_addr_validated( + #[cppgc] incoming_resource: &IncomingResource, +) -> Result { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + Ok(incoming.remote_address_validated()) +} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_incoming_accept( + #[cppgc] incoming_resource: &IncomingResource, + #[serde] transport_config: Option, +) -> Result { + let Some(incoming) = incoming_resource.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + let conn = match transport_config { + Some(transport_config) => { + let mut config = + quinn::ServerConfig::with_crypto(incoming_resource.1.clone()); + config.preferred_address_v4(transport_config.preferred_address_v4); + config.preferred_address_v6(transport_config.preferred_address_v6); + config.transport_config(Arc::new(transport_config.try_into()?)); + incoming.accept_with(Arc::new(config))?.await? + } + None => incoming.accept()?.await?, + }; + Ok(ConnectionResource(conn)) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_incoming_refuse( + #[cppgc] incoming: &IncomingResource, +) -> Result<(), AnyError> { + let Some(incoming) = incoming.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + incoming.refuse(); + Ok(()) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_incoming_ignore( + #[cppgc] incoming: &IncomingResource, +) -> Result<(), AnyError> { + let Some(incoming) = incoming.0.borrow_mut().take() else { + return Err(bad_resource("QuicIncoming already used")); + }; + incoming.ignore(); + Ok(()) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ConnectArgs { + ca_certs: Option>, + alpn_protocols: Option>, + server_name: Option, +} + +#[op2(async)] +#[cppgc] +pub(crate) async fn op_quic_connect( + state: Rc>, + #[serde] addr: Addr, + #[serde] args: ConnectArgs, + #[serde] transport_config: TransportConfig, + #[cppgc] key_pair: &TlsKeysHolder, +) -> Result +where + NP: NetPermissions + 'static, +{ + state + .borrow_mut() + .borrow_mut::() + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.connectQuic()")?; + + let sock_addr = resolve_addr(&addr.hostname, addr.port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + + let root_cert_store = state + .borrow() + .borrow::() + .root_cert_store()?; + + let unsafely_ignore_certificate_errors = state + .borrow() + .try_borrow::() + .and_then(|it| it.0.clone()); + + let ca_certs = args + .ca_certs + .unwrap_or_default() + .into_iter() + .map(|s| s.into_bytes()) + .collect::>(); + + let mut tls_config = create_client_config( + root_cert_store, + ca_certs, + unsafely_ignore_certificate_errors, + key_pair.take(), + SocketUse::GeneralSsl, + )?; + + if let Some(alpn_protocols) = args.alpn_protocols { + tls_config.alpn_protocols = + alpn_protocols.into_iter().map(|s| s.into_bytes()).collect(); + } + + let client_config = QuicClientConfig::try_from(tls_config)?; + let mut client_config = quinn::ClientConfig::new(Arc::new(client_config)); + client_config.transport_config(Arc::new(transport_config.try_into()?)); + + let local_addr = match sock_addr.ip() { + IpAddr::V4(_) => IpAddr::from(Ipv4Addr::new(0, 0, 0, 0)), + IpAddr::V6(_) => IpAddr::from(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)), + }; + + let conn = quinn::Endpoint::client((local_addr, 0).into())? + .connect_with( + client_config, + sock_addr, + &args.server_name.unwrap_or(addr.hostname), + )? + .await?; + + Ok(ConnectionResource(conn)) +} + +#[op2] +#[string] +pub(crate) fn op_quic_connection_get_protocol( + #[cppgc] connection: &ConnectionResource, +) -> Option { + connection + .0 + .handshake_data() + .and_then(|h| h.downcast::().ok()) + .and_then(|h| h.protocol) + .map(|p| String::from_utf8_lossy(&p).into_owned()) +} + +#[op2] +#[serde] +pub(crate) fn op_quic_connection_get_remote_addr( + #[cppgc] connection: &ConnectionResource, +) -> Result { + let addr = connection.0.remote_address(); + Ok(Addr { + hostname: format!("{}", addr.ip()), + port: addr.port(), + }) +} + +#[op2(fast)] +pub(crate) fn op_quic_close_connection( + #[cppgc] connection: &ConnectionResource, + #[bigint] close_code: u64, + #[string] reason: String, +) -> Result<(), AnyError> { + connection + .0 + .close(quinn::VarInt::from_u64(close_code)?, reason.as_bytes()); + Ok(()) +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_connection_closed( + #[cppgc] connection: &ConnectionResource, +) -> Result { + let e = connection.0.closed().await; + match e { + quinn::ConnectionError::LocallyClosed => Ok(CloseInfo { + close_code: 0, + reason: "".into(), + }), + quinn::ConnectionError::ApplicationClosed(i) => Ok(CloseInfo { + close_code: i.error_code.into(), + reason: String::from_utf8_lossy(&i.reason).into_owned(), + }), + e => Err(e.into()), + } +} + +struct SendStreamResource(AsyncRefCell); + +impl SendStreamResource { + fn new(stream: quinn::SendStream) -> Self { + Self(AsyncRefCell::new(stream)) + } +} + +impl Resource for SendStreamResource { + fn name(&self) -> Cow { + "quicSendStream".into() + } + + fn write(self: Rc, view: BufView) -> AsyncResult { + Box::pin(async move { + let mut r = RcRef::map(self, |r| &r.0).borrow_mut().await; + let nwritten = r.write(&view).await?; + Ok(WriteOutcome::Partial { nwritten, view }) + }) + } +} + +struct RecvStreamResource(AsyncRefCell); + +impl RecvStreamResource { + fn new(stream: quinn::RecvStream) -> Self { + Self(AsyncRefCell::new(stream)) + } +} + +impl Resource for RecvStreamResource { + fn name(&self) -> Cow { + "quicReceiveStream".into() + } + + fn read(self: Rc, limit: usize) -> AsyncResult { + Box::pin(async move { + let mut r = RcRef::map(self, |r| &r.0).borrow_mut().await; + let mut data = vec![0; limit]; + let nread = r.read(&mut data).await?.unwrap_or(0); + data.truncate(nread); + Ok(BufView::from(data)) + }) + } +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_accept_bi( + #[cppgc] connection: &ConnectionResource, + state: Rc>, +) -> Result<(ResourceId, ResourceId), AnyError> { + match connection.0.accept_bi().await { + Ok((tx, rx)) => { + let mut state = state.borrow_mut(); + let tx_rid = state.resource_table.add(SendStreamResource::new(tx)); + let rx_rid = state.resource_table.add(RecvStreamResource::new(rx)); + Ok((tx_rid, rx_rid)) + } + Err(e) => match e { + quinn::ConnectionError::LocallyClosed + | quinn::ConnectionError::ApplicationClosed(..) => { + Err(bad_resource("QuicConn is closed")) + } + _ => Err(e.into()), + }, + } +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_open_bi( + #[cppgc] connection: &ConnectionResource, + state: Rc>, + wait_for_available: bool, +) -> Result<(ResourceId, ResourceId), AnyError> { + let (tx, rx) = if wait_for_available { + connection.0.open_bi().await? + } else { + let waker = noop_waker_ref(); + let mut cx = Context::from_waker(waker); + match pin!(connection.0.open_bi()).poll(&mut cx) { + Poll::Ready(r) => r?, + Poll::Pending => { + return Err(generic_error("Connection has reached the maximum number of outgoing concurrent bidirectional streams")); + } + } + }; + let mut state = state.borrow_mut(); + let tx_rid = state.resource_table.add(SendStreamResource::new(tx)); + let rx_rid = state.resource_table.add(RecvStreamResource::new(rx)); + Ok((tx_rid, rx_rid)) +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_accept_uni( + #[cppgc] connection: &ConnectionResource, + state: Rc>, +) -> Result { + match connection.0.accept_uni().await { + Ok(rx) => { + let rid = state + .borrow_mut() + .resource_table + .add(RecvStreamResource::new(rx)); + Ok(rid) + } + Err(e) => match e { + quinn::ConnectionError::LocallyClosed + | quinn::ConnectionError::ApplicationClosed(..) => { + Err(bad_resource("QuicConn is closed")) + } + _ => Err(e.into()), + }, + } +} + +#[op2(async)] +#[serde] +pub(crate) async fn op_quic_open_uni( + #[cppgc] connection: &ConnectionResource, + state: Rc>, + wait_for_available: bool, +) -> Result { + let tx = if wait_for_available { + connection.0.open_uni().await? + } else { + let waker = noop_waker_ref(); + let mut cx = Context::from_waker(waker); + match pin!(connection.0.open_uni()).poll(&mut cx) { + Poll::Ready(r) => r?, + Poll::Pending => { + return Err(generic_error("Connection has reached the maximum number of outgoing concurrent unidirectional streams")); + } + } + }; + let rid = state + .borrow_mut() + .resource_table + .add(SendStreamResource::new(tx)); + Ok(rid) +} + +#[op2(async)] +pub(crate) async fn op_quic_send_datagram( + #[cppgc] connection: &ConnectionResource, + #[buffer] buf: JsBuffer, +) -> Result<(), AnyError> { + connection.0.send_datagram_wait(buf.to_vec().into()).await?; + Ok(()) +} + +#[op2(async)] +pub(crate) async fn op_quic_read_datagram( + #[cppgc] connection: &ConnectionResource, + #[buffer] mut buf: JsBuffer, +) -> Result { + let data = connection.0.read_datagram().await?; + buf[0..data.len()].copy_from_slice(&data); + Ok(data.len() as _) +} + +#[op2(fast)] +pub(crate) fn op_quic_max_datagram_size( + #[cppgc] connection: &ConnectionResource, +) -> Result { + Ok(connection.0.max_datagram_size().unwrap_or(0) as _) +} + +#[op2(fast)] +pub(crate) fn op_quic_get_send_stream_priority( + state: Rc>, + #[smi] rid: ResourceId, +) -> Result { + let resource = state + .borrow() + .resource_table + .get::(rid)?; + let r = RcRef::map(resource, |r| &r.0).try_borrow(); + match r { + Some(s) => Ok(s.priority()?), + None => Err(generic_error("Unable to get priority")), + } +} + +#[op2(fast)] +pub(crate) fn op_quic_set_send_stream_priority( + state: Rc>, + #[smi] rid: ResourceId, + priority: i32, +) -> Result<(), AnyError> { + let resource = state + .borrow() + .resource_table + .get::(rid)?; + let r = RcRef::map(resource, |r| &r.0).try_borrow(); + match r { + Some(s) => { + s.set_priority(priority)?; + Ok(()) + } + None => Err(generic_error("Unable to set priority")), + } +} diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 127633a09b..50e72dfcbe 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.121.0" +version = "0.122.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -17,7 +17,7 @@ path = "lib.rs" sync_fs = ["deno_package_json/sync", "node_resolver/sync"] [dependencies] -aead-gcm-stream = "0.3" +aead-gcm-stream = "0.4" aes.workspace = true async-trait.workspace = true base64.workspace = true diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 1e6c920c9e..b9b459efc1 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -14,7 +14,9 @@ use deno_core::url::Url; #[allow(unused_imports)] use deno_core::v8; use deno_core::v8::ExternalReference; +use deno_fs::FsSysTraitsAdapter; use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NpmPackageFolderResolverRc; use once_cell::sync::Lazy; @@ -807,92 +809,28 @@ deno_core::extension!(deno_node, }, ); -pub type NodeResolver = node_resolver::NodeResolver; -#[allow(clippy::disallowed_types)] -pub type NodeResolverRc = - deno_fs::sync::MaybeArc>; -pub type PackageJsonResolver = - node_resolver::PackageJsonResolver; -#[allow(clippy::disallowed_types)] -pub type PackageJsonResolverRc = deno_fs::sync::MaybeArc< - node_resolver::PackageJsonResolver, ->; - #[derive(Debug)] -pub struct DenoFsNodeResolverEnv { - fs: deno_fs::FileSystemRc, -} +pub struct RealIsBuiltInNodeModuleChecker; -impl DenoFsNodeResolverEnv { - pub fn new(fs: deno_fs::FileSystemRc) -> Self { - Self { fs } - } -} - -impl node_resolver::env::NodeResolverEnv for DenoFsNodeResolverEnv { +impl IsBuiltInNodeModuleChecker for RealIsBuiltInNodeModuleChecker { + #[inline] fn is_builtin_node_module(&self, specifier: &str) -> bool { is_builtin_node_module(specifier) } - - fn realpath_sync( - &self, - path: &std::path::Path, - ) -> std::io::Result { - self - .fs - .realpath_sync(path) - .map_err(|err| err.into_io_error()) - } - - fn stat_sync( - &self, - path: &std::path::Path, - ) -> std::io::Result { - self - .fs - .stat_sync(path) - .map(|stat| node_resolver::env::NodeResolverFsStat { - is_file: stat.is_file, - is_dir: stat.is_directory, - is_symlink: stat.is_symlink, - }) - .map_err(|err| err.into_io_error()) - } - - fn exists_sync(&self, path: &std::path::Path) -> bool { - self.fs.exists_sync(path) - } - - fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs { - self - } } -impl deno_package_json::fs::DenoPkgJsonFs for DenoFsNodeResolverEnv { - fn read_to_string_lossy( - &self, - path: &std::path::Path, - ) -> Result, std::io::Error> { - self - .fs - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } -} - -pub struct DenoPkgJsonFsAdapter<'a>(pub &'a dyn deno_fs::FileSystem); - -impl<'a> deno_package_json::fs::DenoPkgJsonFs for DenoPkgJsonFsAdapter<'a> { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> Result, std::io::Error> { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } -} +pub type NodeResolver = node_resolver::NodeResolver< + RealIsBuiltInNodeModuleChecker, + FsSysTraitsAdapter, +>; +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = deno_fs::sync::MaybeArc; +pub type PackageJsonResolver = + node_resolver::PackageJsonResolver; +#[allow(clippy::disallowed_types)] +pub type PackageJsonResolverRc = deno_fs::sync::MaybeArc< + node_resolver::PackageJsonResolver, +>; pub fn create_host_defined_options<'s>( scope: &mut v8::HandleScope<'s>, diff --git a/ext/node/ops/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs index ec45146b49..7f5b108a04 100644 --- a/ext/node/ops/crypto/cipher.rs +++ b/ext/node/ops/crypto/cipher.rs @@ -172,27 +172,19 @@ impl Cipher { ) -> Result { use Cipher::*; Ok(match algorithm_name { - "aes-128-cbc" => { + "aes128" | "aes-128-cbc" => { Aes128Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into()))) } "aes-128-ecb" => Aes128Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-192-ecb" => Aes192Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-128-gcm" => { - if iv.len() != 12 { - return Err(CipherError::InvalidIvLength); - } - let cipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); Aes128Gcm(Box::new(cipher)) } "aes-256-gcm" => { - if iv.len() != 12 { - return Err(CipherError::InvalidIvLength); - } - let cipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); @@ -395,20 +387,12 @@ impl Decipher { "aes-192-ecb" => Aes192Ecb(Box::new(ecb::Decryptor::new(key.into()))), "aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))), "aes-128-gcm" => { - if iv.len() != 12 { - return Err(DecipherError::InvalidIvLength); - } - let decipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); Aes128Gcm(Box::new(decipher)) } "aes-256-gcm" => { - if iv.len() != 12 { - return Err(DecipherError::InvalidIvLength); - } - let decipher = aead_gcm_stream::AesGcm::::new(key.into(), iv); diff --git a/ext/node/polyfills/_brotli.js b/ext/node/polyfills/_brotli.js index ebd0351561..108e5319a9 100644 --- a/ext/node/polyfills/_brotli.js +++ b/ext/node/polyfills/_brotli.js @@ -10,9 +10,12 @@ const { ArrayPrototypeMap, TypedArrayPrototypeSlice, TypedArrayPrototypeSubarray, - TypedArrayPrototypeGetByteLength, - DataViewPrototypeGetBuffer, TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + DataViewPrototypeGetBuffer, + DataViewPrototypeGetByteLength, + DataViewPrototypeGetByteOffset, } = primordials; const { isTypedArray, isDataView, close } = core; import { @@ -40,9 +43,17 @@ const toU8 = (input) => { } if (isTypedArray(input)) { - return new Uint8Array(TypedArrayPrototypeGetBuffer(input)); + return new Uint8Array( + TypedArrayPrototypeGetBuffer(input), + TypedArrayPrototypeGetByteOffset(input), + TypedArrayPrototypeGetByteLength(input), + ); } else if (isDataView(input)) { - return new Uint8Array(DataViewPrototypeGetBuffer(input)); + return new Uint8Array( + DataViewPrototypeGetBuffer(input), + DataViewPrototypeGetByteOffset(input), + DataViewPrototypeGetByteLength(input), + ); } return input; diff --git a/ext/node/polyfills/_fs/_fs_ftruncate.ts b/ext/node/polyfills/_fs/_fs_ftruncate.ts index 92af46f521..79320137f9 100644 --- a/ext/node/polyfills/_fs/_fs_ftruncate.ts +++ b/ext/node/polyfills/_fs/_fs_ftruncate.ts @@ -16,16 +16,24 @@ export function ftruncate( : undefined; const callback: CallbackWithError = typeof lenOrCallback === "function" ? lenOrCallback - : maybeCallback as CallbackWithError; + : (maybeCallback as CallbackWithError); if (!callback) throw new Error("No callback function supplied"); - new FsFile(fd, Symbol.for("Deno.internal.FsFile")).truncate(len).then( - () => callback(null), - callback, - ); + new FsFile(fd, Symbol.for("Deno.internal.FsFile")) + .truncate(len) + .then(() => callback(null), callback); } export function ftruncateSync(fd: number, len?: number) { new FsFile(fd, Symbol.for("Deno.internal.FsFile")).truncateSync(len); } + +export function ftruncatePromise(fd: number, len?: number): Promise { + return new Promise((resolve, reject) => { + ftruncate(fd, len, (err) => { + if (err) reject(err); + else resolve(); + }); + }); +} diff --git a/ext/node/polyfills/internal/crypto/util.ts b/ext/node/polyfills/internal/crypto/util.ts index a39b031ee3..6c925f6577 100644 --- a/ext/node/polyfills/internal/crypto/util.ts +++ b/ext/node/polyfills/internal/crypto/util.ts @@ -67,22 +67,16 @@ export const ellipticCurves: Array = [ }, // NIST P-224 EC ]; -// deno-fmt-ignore const supportedCiphers = [ - "aes-128-ecb", "aes-192-ecb", - "aes-256-ecb", "aes-128-cbc", - "aes-192-cbc", "aes-256-cbc", - "aes128", "aes192", - "aes256", "aes-128-cfb", - "aes-192-cfb", "aes-256-cfb", - "aes-128-cfb8", "aes-192-cfb8", - "aes-256-cfb8", "aes-128-cfb1", - "aes-192-cfb1", "aes-256-cfb1", - "aes-128-ofb", "aes-192-ofb", - "aes-256-ofb", "aes-128-ctr", - "aes-192-ctr", "aes-256-ctr", - "aes-128-gcm", "aes-192-gcm", - "aes-256-gcm" + "aes-128-ecb", + "aes-192-ecb", + "aes-256-ecb", + "aes-128-cbc", + "aes-256-cbc", + "aes128", + "aes256", + "aes-128-gcm", + "aes-256-gcm", ]; export function getCiphers(): string[] { diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 61b53fa968..d79232aed7 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -624,6 +624,15 @@ function createInvalidArgType( return msg; } +export class ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH extends NodeRangeError { + constructor() { + super( + "ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH", + "Input buffers must have the same length", + ); + } +} + export class ERR_INVALID_ARG_TYPE_RANGE extends NodeRangeError { constructor(name: string, expected: string | string[], actual: unknown) { const msg = createInvalidArgType(name, expected); @@ -2842,6 +2851,7 @@ export default { ERR_INVALID_ADDRESS_FAMILY, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_TYPE_RANGE, + ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH, ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_VALUE_RANGE, ERR_INVALID_ASYNC_ID, diff --git a/ext/node/polyfills/internal/fs/handle.ts b/ext/node/polyfills/internal/fs/handle.ts index 9ec0fc97e2..ee035f2f5c 100644 --- a/ext/node/polyfills/internal/fs/handle.ts +++ b/ext/node/polyfills/internal/fs/handle.ts @@ -13,6 +13,7 @@ import { ReadOptions, TextOptionsArgument, } from "ext:deno_node/_fs/_fs_common.ts"; +import { ftruncatePromise } from "ext:deno_node/_fs/_fs_ftruncate.ts"; import { core } from "ext:core/mod.js"; interface WriteResult { @@ -73,6 +74,10 @@ export class FileHandle extends EventEmitter { } } + truncate(len?: number): Promise { + return fsCall(ftruncatePromise, this, len); + } + readFile( opt?: TextOptionsArgument | BinaryOptionsArgument | FileOptionsArgument, ): Promise { @@ -85,11 +90,7 @@ export class FileHandle extends EventEmitter { length: number, position: number, ): Promise; - write( - str: string, - position: number, - encoding: string, - ): Promise; + write(str: string, position: number, encoding: string): Promise; write( bufferOrStr: Uint8Array | string, offsetOrPosition: number, @@ -120,16 +121,10 @@ export class FileHandle extends EventEmitter { const encoding = lengthOrEncoding; return new Promise((resolve, reject) => { - write( - this.fd, - str, - position, - encoding, - (err, bytesWritten, buffer) => { - if (err) reject(err); - else resolve({ buffer, bytesWritten }); - }, - ); + write(this.fd, str, position, encoding, (err, bytesWritten, buffer) => { + if (err) reject(err); + else resolve({ buffer, bytesWritten }); + }); }); } } diff --git a/ext/node/polyfills/internal_binding/_timingSafeEqual.ts b/ext/node/polyfills/internal_binding/_timingSafeEqual.ts index 559b7685b8..d9811c5505 100644 --- a/ext/node/polyfills/internal_binding/_timingSafeEqual.ts +++ b/ext/node/polyfills/internal_binding/_timingSafeEqual.ts @@ -4,6 +4,7 @@ // deno-lint-ignore-file prefer-primordials import { Buffer } from "node:buffer"; +import { ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH } from "ext:deno_node/internal/errors.ts"; function toDataView(ab: ArrayBufferLike | ArrayBufferView): DataView { if (ArrayBuffer.isView(ab)) { @@ -19,7 +20,7 @@ function stdTimingSafeEqual( b: ArrayBufferView | ArrayBufferLike | DataView, ): boolean { if (a.byteLength !== b.byteLength) { - return false; + throw new ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH(); } if (!(a instanceof DataView)) { a = toDataView(a); diff --git a/ext/node/polyfills/worker_threads.ts b/ext/node/polyfills/worker_threads.ts index 1b175fb1dd..dc844169c5 100644 --- a/ext/node/polyfills/worker_threads.ts +++ b/ext/node/polyfills/worker_threads.ts @@ -21,7 +21,7 @@ import { nodeWorkerThreadCloseCb, refMessagePort, serializeJsMessageData, - unrefPollForMessages, + unrefParentPort, } from "ext:deno_web/13_message_port.js"; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { notImplemented } from "ext:deno_node/_utils.ts"; @@ -451,10 +451,10 @@ internals.__initWorkerThreads = ( parentPort.emit("close"); }); parentPort.unref = () => { - parentPort[unrefPollForMessages] = true; + parentPort[unrefParentPort] = true; }; parentPort.ref = () => { - parentPort[unrefPollForMessages] = false; + parentPort[unrefParentPort] = false; }; if (isWorkerThread) { diff --git a/ext/telemetry/Cargo.toml b/ext/telemetry/Cargo.toml index d0bdc6be1b..fedaed6656 100644 --- a/ext/telemetry/Cargo.toml +++ b/ext/telemetry/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_telemetry" -version = "0.5.0" +version = "0.6.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/telemetry/lib.rs b/ext/telemetry/lib.rs index 816e838743..8018843dc4 100644 --- a/ext/telemetry/lib.rs +++ b/ext/telemetry/lib.rs @@ -6,16 +6,22 @@ use deno_core::futures::channel::mpsc; use deno_core::futures::channel::mpsc::UnboundedSender; use deno_core::futures::future::BoxFuture; use deno_core::futures::stream; +use deno_core::futures::FutureExt; use deno_core::futures::Stream; use deno_core::futures::StreamExt; use deno_core::op2; use deno_core::v8; +use deno_core::GarbageCollected; use deno_core::OpState; use once_cell::sync::Lazy; use once_cell::sync::OnceCell; use opentelemetry::logs::AnyValue; use opentelemetry::logs::LogRecord as LogRecordTrait; use opentelemetry::logs::Severity; +use opentelemetry::metrics::AsyncInstrumentBuilder; +use opentelemetry::metrics::InstrumentBuilder; +use opentelemetry::metrics::MeterProvider; +use opentelemetry::otel_debug; use opentelemetry::otel_error; use opentelemetry::trace::SpanContext; use opentelemetry::trace::SpanId; @@ -28,7 +34,6 @@ use opentelemetry::KeyValue; use opentelemetry::StringValue; use opentelemetry::Value; use opentelemetry_otlp::HttpExporterBuilder; -use opentelemetry_otlp::MetricExporter; use opentelemetry_otlp::Protocol; use opentelemetry_otlp::WithExportConfig; use opentelemetry_otlp::WithHttpConfig; @@ -36,10 +41,11 @@ use opentelemetry_sdk::export::trace::SpanData; use opentelemetry_sdk::logs::BatchLogProcessor; use opentelemetry_sdk::logs::LogProcessor; use opentelemetry_sdk::logs::LogRecord; -use opentelemetry_sdk::metrics::data::Metric; -use opentelemetry_sdk::metrics::data::ResourceMetrics; -use opentelemetry_sdk::metrics::data::ScopeMetrics; use opentelemetry_sdk::metrics::exporter::PushMetricExporter; +use opentelemetry_sdk::metrics::reader::MetricReader; +use opentelemetry_sdk::metrics::ManualReader; +use opentelemetry_sdk::metrics::MetricResult; +use opentelemetry_sdk::metrics::SdkMeterProvider; use opentelemetry_sdk::metrics::Temporality; use opentelemetry_sdk::trace::BatchSpanProcessor; use opentelemetry_sdk::trace::SpanProcessor; @@ -52,14 +58,21 @@ use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_VERSION; use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; +use std::cell::RefCell; +use std::collections::HashMap; use std::env; use std::fmt::Debug; use std::pin::Pin; +use std::rc::Rc; +use std::sync::Arc; +use std::sync::Mutex; use std::task::Context; use std::task::Poll; use std::thread; use std::time::Duration; use std::time::SystemTime; +use tokio::sync::oneshot; +use tokio::task::JoinSet; deno_core::extension!( deno_telemetry, @@ -75,23 +88,24 @@ deno_core::extension!( op_otel_span_attribute3, op_otel_span_set_dropped, op_otel_span_flush, - op_otel_metrics_resource_attribute, - op_otel_metrics_resource_attribute2, - op_otel_metrics_resource_attribute3, - op_otel_metrics_scope, - op_otel_metrics_sum, - op_otel_metrics_gauge, - op_otel_metrics_sum_or_gauge_data_point, - op_otel_metrics_histogram, - op_otel_metrics_histogram_data_point, - op_otel_metrics_histogram_data_point_entry_final, - op_otel_metrics_histogram_data_point_entry1, - op_otel_metrics_histogram_data_point_entry2, - op_otel_metrics_histogram_data_point_entry3, - op_otel_metrics_data_point_attribute, - op_otel_metrics_data_point_attribute2, - op_otel_metrics_data_point_attribute3, - op_otel_metrics_submit, + op_otel_metric_create_counter, + op_otel_metric_create_up_down_counter, + op_otel_metric_create_gauge, + op_otel_metric_create_histogram, + op_otel_metric_create_observable_counter, + op_otel_metric_create_observable_gauge, + op_otel_metric_create_observable_up_down_counter, + op_otel_metric_attribute3, + op_otel_metric_record0, + op_otel_metric_record1, + op_otel_metric_record2, + op_otel_metric_record3, + op_otel_metric_observable_record0, + op_otel_metric_observable_record1, + op_otel_metric_observable_record2, + op_otel_metric_observable_record3, + op_otel_metric_wait_to_observe, + op_otel_metric_observation_done, ], esm = ["telemetry.ts", "util.ts"], ); @@ -105,6 +119,7 @@ pub struct OtelRuntimeConfig { #[derive(Default, Debug, Clone, Serialize, Deserialize)] pub struct OtelConfig { pub tracing_enabled: bool, + pub metrics_enabled: bool, pub console: OtelConsoleConfig, pub deterministic: bool, } @@ -113,6 +128,7 @@ impl OtelConfig { pub fn as_v8(&self) -> Box<[u8]> { Box::new([ self.tracing_enabled as u8, + self.metrics_enabled as u8, self.console as u8, self.deterministic as u8, ]) @@ -137,6 +153,10 @@ static OTEL_SHARED_RUNTIME_SPAWN_TASK_TX: Lazy< UnboundedSender>, > = Lazy::new(otel_create_shared_runtime); +static OTEL_PRE_COLLECT_CALLBACKS: Lazy< + Mutex>>>, +> = Lazy::new(Default::default); + fn otel_create_shared_runtime() -> UnboundedSender> { let (spawn_task_tx, mut spawn_task_rx) = mpsc::unbounded::>(); @@ -273,6 +293,181 @@ impl Stream for BatchMessageChannelReceiver { } } +enum DenoPeriodicReaderMessage { + Register(std::sync::Weak), + Export, + ForceFlush(oneshot::Sender>), + Shutdown(oneshot::Sender>), +} + +#[derive(Debug)] +struct DenoPeriodicReader { + tx: tokio::sync::mpsc::Sender, + temporality: Temporality, +} + +impl MetricReader for DenoPeriodicReader { + fn register_pipeline( + &self, + pipeline: std::sync::Weak, + ) { + let _ = self + .tx + .try_send(DenoPeriodicReaderMessage::Register(pipeline)); + } + + fn collect( + &self, + _rm: &mut opentelemetry_sdk::metrics::data::ResourceMetrics, + ) -> opentelemetry_sdk::metrics::MetricResult<()> { + unreachable!("collect should not be called on DenoPeriodicReader"); + } + + fn force_flush(&self) -> opentelemetry_sdk::metrics::MetricResult<()> { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.try_send(DenoPeriodicReaderMessage::ForceFlush(tx)); + deno_core::futures::executor::block_on(rx).unwrap()?; + Ok(()) + } + + fn shutdown(&self) -> opentelemetry_sdk::metrics::MetricResult<()> { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.try_send(DenoPeriodicReaderMessage::Shutdown(tx)); + deno_core::futures::executor::block_on(rx).unwrap()?; + Ok(()) + } + + fn temporality( + &self, + _kind: opentelemetry_sdk::metrics::InstrumentKind, + ) -> Temporality { + self.temporality + } +} + +const METRIC_EXPORT_INTERVAL_NAME: &str = "OTEL_METRIC_EXPORT_INTERVAL"; +const DEFAULT_INTERVAL: Duration = Duration::from_secs(60); + +impl DenoPeriodicReader { + fn new(exporter: opentelemetry_otlp::MetricExporter) -> Self { + let interval = env::var(METRIC_EXPORT_INTERVAL_NAME) + .ok() + .and_then(|v| v.parse().map(Duration::from_millis).ok()) + .unwrap_or(DEFAULT_INTERVAL); + + let (tx, mut rx) = tokio::sync::mpsc::channel(256); + + let temporality = PushMetricExporter::temporality(&exporter); + + let worker = async move { + let inner = ManualReader::builder() + .with_temporality(PushMetricExporter::temporality(&exporter)) + .build(); + + let collect_and_export = |collect_observed: bool| { + let inner = &inner; + let exporter = &exporter; + async move { + let mut resource_metrics = + opentelemetry_sdk::metrics::data::ResourceMetrics { + resource: Default::default(), + scope_metrics: Default::default(), + }; + if collect_observed { + let callbacks = { + let mut callbacks = OTEL_PRE_COLLECT_CALLBACKS.lock().unwrap(); + std::mem::take(&mut *callbacks) + }; + let mut futures = JoinSet::new(); + for callback in callbacks { + let (tx, rx) = oneshot::channel(); + if let Ok(()) = callback.send(tx) { + futures.spawn(rx); + } + } + while futures.join_next().await.is_some() {} + } + inner.collect(&mut resource_metrics)?; + if resource_metrics.scope_metrics.is_empty() { + return Ok(()); + } + exporter.export(&mut resource_metrics).await?; + Ok(()) + } + }; + + let mut ticker = tokio::time::interval(interval); + ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Delay); + ticker.tick().await; + + loop { + let message = tokio::select! { + _ = ticker.tick() => DenoPeriodicReaderMessage::Export, + message = rx.recv() => if let Some(message) = message { + message + } else { + break; + }, + }; + + match message { + DenoPeriodicReaderMessage::Register(new_pipeline) => { + inner.register_pipeline(new_pipeline); + } + DenoPeriodicReaderMessage::Export => { + otel_debug!( + name: "DenoPeriodicReader.ExportTriggered", + message = "Export message received.", + ); + if let Err(err) = collect_and_export(true).await { + otel_error!( + name: "DenoPeriodicReader.ExportFailed", + message = "Failed to export metrics", + reason = format!("{}", err)); + } + } + DenoPeriodicReaderMessage::ForceFlush(sender) => { + otel_debug!( + name: "DenoPeriodicReader.ForceFlushCalled", + message = "Flush message received.", + ); + let res = collect_and_export(false).await; + if let Err(send_error) = sender.send(res) { + otel_debug!( + name: "DenoPeriodicReader.Flush.SendResultError", + message = "Failed to send flush result.", + reason = format!("{:?}", send_error), + ); + } + } + DenoPeriodicReaderMessage::Shutdown(sender) => { + otel_debug!( + name: "DenoPeriodicReader.ShutdownCalled", + message = "Shutdown message received", + ); + let res = collect_and_export(false).await; + let _ = exporter.shutdown(); + if let Err(send_error) = sender.send(res) { + otel_debug!( + name: "DenoPeriodicReader.Shutdown.SendResultError", + message = "Failed to send shutdown result", + reason = format!("{:?}", send_error), + ); + } + break; + } + } + } + }; + + (*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX) + .unbounded_send(worker.boxed()) + .expect("failed to send task to shared OpenTelemetry runtime"); + + DenoPeriodicReader { tx, temporality } + } +} + mod hyper_client { use http_body_util::BodyExt; use http_body_util::Full; @@ -353,66 +548,10 @@ mod hyper_client { } } -enum MetricProcessorMessage { - ResourceMetrics(ResourceMetrics), - Flush(tokio::sync::oneshot::Sender<()>), -} - -struct MetricProcessor { - tx: tokio::sync::mpsc::Sender, -} - -impl MetricProcessor { - fn new(exporter: MetricExporter) -> Self { - let (tx, mut rx) = tokio::sync::mpsc::channel(2048); - let future = async move { - while let Some(message) = rx.recv().await { - match message { - MetricProcessorMessage::ResourceMetrics(mut rm) => { - if let Err(err) = exporter.export(&mut rm).await { - otel_error!( - name: "MetricProcessor.Export.Error", - error = format!("{}", err) - ); - } - } - MetricProcessorMessage::Flush(tx) => { - if let Err(()) = tx.send(()) { - otel_error!( - name: "MetricProcessor.Flush.SendResultError", - error = "()", - ); - } - } - } - } - }; - - (*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX) - .unbounded_send(Box::pin(future)) - .expect("failed to send task to shared OpenTelemetry runtime"); - - Self { tx } - } - - fn submit(&self, rm: ResourceMetrics) { - let _ = self - .tx - .try_send(MetricProcessorMessage::ResourceMetrics(rm)); - } - - fn force_flush(&self) -> Result<(), anyhow::Error> { - let (tx, rx) = tokio::sync::oneshot::channel(); - self.tx.try_send(MetricProcessorMessage::Flush(tx))?; - deno_core::futures::executor::block_on(rx)?; - Ok(()) - } -} - struct Processors { spans: BatchSpanProcessor, logs: BatchLogProcessor, - metrics: MetricProcessor, + meter_provider: SdkMeterProvider, } static OTEL_PROCESSORS: OnceCell = OnceCell::new(); @@ -421,7 +560,7 @@ static BUILT_IN_INSTRUMENTATION_SCOPE: OnceCell< opentelemetry::InstrumentationScope, > = OnceCell::new(); -pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { +pub fn init(rt_config: OtelRuntimeConfig) -> anyhow::Result<()> { // Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_* // crates don't do this automatically. // TODO(piscisaureus): enable GRPC support. @@ -454,8 +593,8 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { // Add the runtime name and version to the resource attributes. Also override // the `telemetry.sdk` attributes to include the Deno runtime. resource = resource.merge(&Resource::new(vec![ - KeyValue::new(PROCESS_RUNTIME_NAME, config.runtime_name), - KeyValue::new(PROCESS_RUNTIME_VERSION, config.runtime_version.clone()), + KeyValue::new(PROCESS_RUNTIME_NAME, rt_config.runtime_name), + KeyValue::new(PROCESS_RUNTIME_VERSION, rt_config.runtime_version.clone()), KeyValue::new( TELEMETRY_SDK_LANGUAGE, format!( @@ -474,7 +613,7 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { TELEMETRY_SDK_VERSION, format!( "{}-{}", - config.runtime_version, + rt_config.runtime_version, resource.get(Key::new(TELEMETRY_SDK_VERSION)).unwrap() ), ), @@ -494,11 +633,30 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { BatchSpanProcessor::builder(span_exporter, OtelSharedRuntime).build(); span_processor.set_resource(&resource); + let temporality_preference = + env::var("OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE") + .ok() + .map(|s| s.to_lowercase()); + let temporality = match temporality_preference.as_deref() { + None | Some("cumulative") => Temporality::Cumulative, + Some("delta") => Temporality::Delta, + Some("lowmemory") => Temporality::LowMemory, + Some(other) => { + return Err(anyhow!( + "Invalid value for OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: {}", + other + )); + } + }; let metric_exporter = HttpExporterBuilder::default() .with_http_client(client.clone()) .with_protocol(protocol) - .build_metrics_exporter(Temporality::Cumulative)?; - let metric_processor = MetricProcessor::new(metric_exporter); + .build_metrics_exporter(temporality)?; + let metric_reader = DenoPeriodicReader::new(metric_exporter); + let meter_provider = SdkMeterProvider::builder() + .with_reader(metric_reader) + .with_resource(resource.clone()) + .build(); let log_exporter = HttpExporterBuilder::default() .with_http_client(client) @@ -512,13 +670,13 @@ pub fn init(config: OtelRuntimeConfig) -> anyhow::Result<()> { .set(Processors { spans: span_processor, logs: log_processor, - metrics: metric_processor, + meter_provider, }) .map_err(|_| anyhow!("failed to init otel"))?; let builtin_instrumentation_scope = opentelemetry::InstrumentationScope::builder("deno") - .with_version(config.runtime_version.clone()) + .with_version(rt_config.runtime_version.clone()) .build(); BUILT_IN_INSTRUMENTATION_SCOPE .set(builtin_instrumentation_scope) @@ -534,12 +692,12 @@ pub fn flush() { if let Some(Processors { spans, logs, - metrics, + meter_provider, }) = OTEL_PROCESSORS.get() { let _ = spans.force_flush(); let _ = logs.force_flush(); - let _ = metrics.force_flush(); + let _ = meter_provider.force_flush(); } } @@ -659,8 +817,8 @@ fn parse_span_id( } } -macro_rules! attr { - ($scope:ident, $attributes:expr $(=> $dropped_attributes_count:expr)?, $name:expr, $value:expr) => { +macro_rules! attr_raw { + ($scope:ident, $name:expr, $value:expr) => {{ let name = if let Ok(name) = $name.try_cast() { let view = v8::ValueView::new($scope, name); match view.data() { @@ -695,7 +853,18 @@ macro_rules! attr { None }; if let (Some(name), Some(value)) = (name, value) { - $attributes.push(KeyValue::new(name, value)); + Some(KeyValue::new(name, value)) + } else { + None + } + }}; +} + +macro_rules! attr { + ($scope:ident, $attributes:expr $(=> $dropped_attributes_count:expr)?, $name:expr, $value:expr) => { + let attr = attr_raw!($scope, $name, $value); + if let Some(kv) = attr { + $attributes.push(kv); } $( else { @@ -909,7 +1078,8 @@ fn op_otel_span_attribute<'s>( ) { if let Some(temporary_span) = state.try_borrow_mut::() { temporary_span.0.attributes.reserve_exact( - (capacity as usize) - temporary_span.0.attributes.capacity(), + (capacity as usize) + .saturating_sub(temporary_span.0.attributes.capacity()), ); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key, value); } @@ -927,7 +1097,8 @@ fn op_otel_span_attribute2<'s>( ) { if let Some(temporary_span) = state.try_borrow_mut::() { temporary_span.0.attributes.reserve_exact( - (capacity as usize) - temporary_span.0.attributes.capacity(), + (capacity as usize) + .saturating_sub(temporary_span.0.attributes.capacity()), ); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key1, value1); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key2, value2); @@ -949,7 +1120,8 @@ fn op_otel_span_attribute3<'s>( ) { if let Some(temporary_span) = state.try_borrow_mut::() { temporary_span.0.attributes.reserve_exact( - (capacity as usize) - temporary_span.0.attributes.capacity(), + (capacity as usize) + .saturating_sub(temporary_span.0.attributes.capacity()), ); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key1, value1); attr!(scope, temporary_span.0.attributes => temporary_span.0.dropped_attributes_count, key2, value2); @@ -984,538 +1156,572 @@ fn op_otel_span_flush(state: &mut OpState) { spans.on_end(temporary_span.0); } -// Holds data being built from JS before -// it is submitted to the rust processor. -struct TemporaryMetricsExport { - resource_attributes: Vec, - scope_metrics: Vec, - metric: Option, +enum Instrument { + Counter(opentelemetry::metrics::Counter), + UpDownCounter(opentelemetry::metrics::UpDownCounter), + Gauge(opentelemetry::metrics::Gauge), + Histogram(opentelemetry::metrics::Histogram), + Observable(Arc, f64>>>), } -struct TemporaryMetric { - name: String, - description: String, - unit: String, - data: TemporaryMetricData, -} +impl GarbageCollected for Instrument {} -enum TemporaryMetricData { - Sum(opentelemetry_sdk::metrics::data::Sum), - Gauge(opentelemetry_sdk::metrics::data::Gauge), - Histogram(opentelemetry_sdk::metrics::data::Histogram), -} - -impl From for Metric { - fn from(value: TemporaryMetric) -> Self { - Metric { - name: Cow::Owned(value.name), - description: Cow::Owned(value.description), - unit: Cow::Owned(value.unit), - data: match value.data { - TemporaryMetricData::Sum(sum) => Box::new(sum), - TemporaryMetricData::Gauge(gauge) => Box::new(gauge), - TemporaryMetricData::Histogram(histogram) => Box::new(histogram), - }, - } - } -} - -#[op2(fast)] -fn op_otel_metrics_resource_attribute<'s>( - scope: &mut v8::HandleScope<'s>, +fn create_instrument<'a, T>( + cb: impl FnOnce( + &'_ opentelemetry::metrics::Meter, + String, + ) -> InstrumentBuilder<'_, T>, + cb2: impl FnOnce(InstrumentBuilder<'_, T>) -> Instrument, state: &mut OpState, - #[smi] capacity: u32, - key: v8::Local<'s, v8::Value>, - value: v8::Local<'s, v8::Value>, -) { - let metrics_export = if let Some(metrics_export) = - state.try_borrow_mut::() - { - metrics_export.resource_attributes.reserve_exact( - (capacity as usize) - metrics_export.resource_attributes.capacity(), - ); - metrics_export - } else { - state.put(TemporaryMetricsExport { - resource_attributes: Vec::with_capacity(capacity as usize), - scope_metrics: vec![], - metric: None, - }); - state.borrow_mut() + scope: &mut v8::HandleScope<'a>, + name: v8::Local<'a, v8::Value>, + description: v8::Local<'a, v8::Value>, + unit: v8::Local<'a, v8::Value>, +) -> Result { + let Some(InstrumentationScope(instrumentation_scope)) = + state.try_borrow::() + else { + return Err(anyhow!("instrumentation scope not available")); }; - attr!(scope, metrics_export.resource_attributes, key, value); -} -#[op2(fast)] -fn op_otel_metrics_resource_attribute2<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key1: v8::Local<'s, v8::Value>, - value1: v8::Local<'s, v8::Value>, - key2: v8::Local<'s, v8::Value>, - value2: v8::Local<'s, v8::Value>, -) { - let metrics_export = if let Some(metrics_export) = - state.try_borrow_mut::() - { - metrics_export.resource_attributes.reserve_exact( - (capacity as usize) - metrics_export.resource_attributes.capacity(), - ); - metrics_export - } else { - state.put(TemporaryMetricsExport { - resource_attributes: Vec::with_capacity(capacity as usize), - scope_metrics: vec![], - metric: None, - }); - state.borrow_mut() + let meter = OTEL_PROCESSORS + .get() + .unwrap() + .meter_provider + .meter_with_scope(instrumentation_scope.clone()); + + let name = owned_string(scope, name.try_cast()?); + let mut builder = cb(&meter, name); + if !description.is_null_or_undefined() { + let description = owned_string(scope, description.try_cast()?); + builder = builder.with_description(description); }; - attr!(scope, metrics_export.resource_attributes, key1, value1); - attr!(scope, metrics_export.resource_attributes, key2, value2); -} - -#[allow(clippy::too_many_arguments)] -#[op2(fast)] -fn op_otel_metrics_resource_attribute3<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key1: v8::Local<'s, v8::Value>, - value1: v8::Local<'s, v8::Value>, - key2: v8::Local<'s, v8::Value>, - value2: v8::Local<'s, v8::Value>, - key3: v8::Local<'s, v8::Value>, - value3: v8::Local<'s, v8::Value>, -) { - let metrics_export = if let Some(metrics_export) = - state.try_borrow_mut::() - { - metrics_export.resource_attributes.reserve_exact( - (capacity as usize) - metrics_export.resource_attributes.capacity(), - ); - metrics_export - } else { - state.put(TemporaryMetricsExport { - resource_attributes: Vec::with_capacity(capacity as usize), - scope_metrics: vec![], - metric: None, - }); - state.borrow_mut() + if !unit.is_null_or_undefined() { + let unit = owned_string(scope, unit.try_cast()?); + builder = builder.with_unit(unit); }; - attr!(scope, metrics_export.resource_attributes, key1, value1); - attr!(scope, metrics_export.resource_attributes, key2, value2); - attr!(scope, metrics_export.resource_attributes, key3, value3); + + Ok(cb2(builder)) } -#[op2(fast)] -fn op_otel_metrics_scope<'s>( - scope: &mut v8::HandleScope<'s>, +#[op2] +#[cppgc] +fn op_otel_metric_create_counter<'s>( state: &mut OpState, + scope: &mut v8::HandleScope<'s>, name: v8::Local<'s, v8::Value>, - schema_url: v8::Local<'s, v8::Value>, - version: v8::Local<'s, v8::Value>, -) { - let name = owned_string(scope, name.cast()); - - let scope_builder = opentelemetry::InstrumentationScope::builder(name); - let scope_builder = if schema_url.is_null_or_undefined() { - scope_builder - } else { - scope_builder.with_schema_url(owned_string(scope, schema_url.cast())) - }; - let scope_builder = if version.is_null_or_undefined() { - scope_builder - } else { - scope_builder.with_version(owned_string(scope, version.cast())) - }; - let scope = scope_builder.build(); - let scope_metric = ScopeMetrics { + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_instrument( + |meter, name| meter.f64_counter(name), + |i| Instrument::Counter(i.build()), + state, scope, - metrics: vec![], - }; - - match state.try_borrow_mut::() { - Some(temp) => { - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - temp.scope_metrics.push(scope_metric); - } - None => { - state.put(TemporaryMetricsExport { - resource_attributes: vec![], - scope_metrics: vec![scope_metric], - metric: None, - }); - } - } -} - -#[op2(fast)] -fn op_otel_metrics_sum<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - name: v8::Local<'s, v8::Value>, - description: v8::Local<'s, v8::Value>, - unit: v8::Local<'s, v8::Value>, - #[smi] temporality: u8, - is_monotonic: bool, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - - let name = owned_string(scope, name.cast()); - let description = owned_string(scope, description.cast()); - let unit = owned_string(scope, unit.cast()); - let temporality = match temporality { - 0 => Temporality::Delta, - 1 => Temporality::Cumulative, - _ => return, - }; - let sum = opentelemetry_sdk::metrics::data::Sum { - data_points: vec![], - temporality, - is_monotonic, - }; - - temp.metric = Some(TemporaryMetric { name, description, unit, - data: TemporaryMetricData::Sum(sum), - }); + ) } -#[op2(fast)] -fn op_otel_metrics_gauge<'s>( - scope: &mut v8::HandleScope<'s>, +#[op2] +#[cppgc] +fn op_otel_metric_create_up_down_counter<'s>( state: &mut OpState, + scope: &mut v8::HandleScope<'s>, name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - - let name = owned_string(scope, name.cast()); - let description = owned_string(scope, description.cast()); - let unit = owned_string(scope, unit.cast()); - - let gauge = opentelemetry_sdk::metrics::data::Gauge { - data_points: vec![], - }; - - temp.metric = Some(TemporaryMetric { +) -> Result { + create_instrument( + |meter, name| meter.f64_up_down_counter(name), + |i| Instrument::UpDownCounter(i.build()), + state, + scope, name, description, unit, - data: TemporaryMetricData::Gauge(gauge), + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_gauge<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_instrument( + |meter, name| meter.f64_gauge(name), + |i| Instrument::Gauge(i.build()), + state, + scope, + name, + description, + unit, + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_histogram<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, + #[serde] boundaries: Option>, +) -> Result { + let Some(InstrumentationScope(instrumentation_scope)) = + state.try_borrow::() + else { + return Err(anyhow!("instrumentation scope not available")); + }; + + let meter = OTEL_PROCESSORS + .get() + .unwrap() + .meter_provider + .meter_with_scope(instrumentation_scope.clone()); + + let name = owned_string(scope, name.try_cast()?); + let mut builder = meter.f64_histogram(name); + if !description.is_null_or_undefined() { + let description = owned_string(scope, description.try_cast()?); + builder = builder.with_description(description); + }; + if !unit.is_null_or_undefined() { + let unit = owned_string(scope, unit.try_cast()?); + builder = builder.with_unit(unit); + }; + if let Some(boundaries) = boundaries { + builder = builder.with_boundaries(boundaries); + } + + Ok(Instrument::Histogram(builder.build())) +} + +fn create_async_instrument<'a, T>( + cb: impl FnOnce( + &'_ opentelemetry::metrics::Meter, + String, + ) -> AsyncInstrumentBuilder<'_, T, f64>, + cb2: impl FnOnce(AsyncInstrumentBuilder<'_, T, f64>), + state: &mut OpState, + scope: &mut v8::HandleScope<'a>, + name: v8::Local<'a, v8::Value>, + description: v8::Local<'a, v8::Value>, + unit: v8::Local<'a, v8::Value>, +) -> Result { + let Some(InstrumentationScope(instrumentation_scope)) = + state.try_borrow::() + else { + return Err(anyhow!("instrumentation scope not available")); + }; + + let meter = OTEL_PROCESSORS + .get() + .unwrap() + .meter_provider + .meter_with_scope(instrumentation_scope.clone()); + + let name = owned_string(scope, name.try_cast()?); + let mut builder = cb(&meter, name); + if !description.is_null_or_undefined() { + let description = owned_string(scope, description.try_cast()?); + builder = builder.with_description(description); + }; + if !unit.is_null_or_undefined() { + let unit = owned_string(scope, unit.try_cast()?); + builder = builder.with_unit(unit); + }; + + let data_share = Arc::new(Mutex::new(HashMap::new())); + let data_share_: Arc, f64>>> = data_share.clone(); + builder = builder.with_callback(move |i| { + let data = { + let mut data = data_share_.lock().unwrap(); + std::mem::take(&mut *data) + }; + for (attributes, value) in data { + i.observe(value, &attributes); + } }); + cb2(builder); + + Ok(Instrument::Observable(data_share)) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_observable_counter<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_async_instrument( + |meter, name| meter.f64_observable_counter(name), + |i| { + i.build(); + }, + state, + scope, + name, + description, + unit, + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_observable_up_down_counter<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_async_instrument( + |meter, name| meter.f64_observable_up_down_counter(name), + |i| { + i.build(); + }, + state, + scope, + name, + description, + unit, + ) +} + +#[op2] +#[cppgc] +fn op_otel_metric_create_observable_gauge<'s>( + state: &mut OpState, + scope: &mut v8::HandleScope<'s>, + name: v8::Local<'s, v8::Value>, + description: v8::Local<'s, v8::Value>, + unit: v8::Local<'s, v8::Value>, +) -> Result { + create_async_instrument( + |meter, name| meter.f64_observable_gauge(name), + |i| { + i.build(); + }, + state, + scope, + name, + description, + unit, + ) +} + +struct MetricAttributes { + attributes: Vec, } #[op2(fast)] -fn op_otel_metrics_sum_or_gauge_data_point( +fn op_otel_metric_record0( state: &mut OpState, + #[cppgc] instrument: &Instrument, value: f64, - start_time: f64, - time: f64, ) { - let Some(temp) = state.try_borrow_mut::() else { - return; + let values = state.try_take::(); + let attributes = match &values { + Some(values) => &*values.attributes, + None => &[], }; - - let start_time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(start_time)) - .unwrap(); - let time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(time)) - .unwrap(); - - let data_point = opentelemetry_sdk::metrics::data::DataPoint { - value, - start_time: Some(start_time), - time: Some(time), - attributes: vec![], - exemplars: vec![], - }; - - match &mut temp.metric { - Some(TemporaryMetric { - data: TemporaryMetricData::Sum(sum), - .. - }) => sum.data_points.push(data_point), - Some(TemporaryMetric { - data: TemporaryMetricData::Gauge(gauge), - .. - }) => gauge.data_points.push(data_point), + match instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), _ => {} } } #[op2(fast)] -fn op_otel_metrics_histogram<'s>( - scope: &mut v8::HandleScope<'s>, +fn op_otel_metric_record1( state: &mut OpState, - name: v8::Local<'s, v8::Value>, - description: v8::Local<'s, v8::Value>, - unit: v8::Local<'s, v8::Value>, - #[smi] temporality: u8, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, ) { - let Some(temp) = state.try_borrow_mut::() else { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { return; }; - - if let Some(current_metric) = temp.metric.take() { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); - } - - let name = owned_string(scope, name.cast()); - let description = owned_string(scope, description.cast()); - let unit = owned_string(scope, unit.cast()); - - let temporality = match temporality { - 0 => Temporality::Delta, - 1 => Temporality::Cumulative, - _ => return, - }; - let histogram = opentelemetry_sdk::metrics::data::Histogram { - data_points: vec![], - temporality, - }; - - temp.metric = Some(TemporaryMetric { - name, - description, - unit, - data: TemporaryMetricData::Histogram(histogram), - }); -} - -#[allow(clippy::too_many_arguments)] -#[op2(fast)] -fn op_otel_metrics_histogram_data_point( - state: &mut OpState, - #[number] count: u64, - min: f64, - max: f64, - sum: f64, - start_time: f64, - time: f64, - #[smi] buckets: u32, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - let min = if min.is_nan() { None } else { Some(min) }; - let max = if max.is_nan() { None } else { Some(max) }; - - let start_time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(start_time)) - .unwrap(); - let time = SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(time)) - .unwrap(); - - let data_point = opentelemetry_sdk::metrics::data::HistogramDataPoint { - bounds: Vec::with_capacity(buckets as usize), - bucket_counts: Vec::with_capacity((buckets as usize) + 1), - count, - sum, - min, - max, - start_time, - time, - attributes: vec![], - exemplars: vec![], - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - histogram.data_points.push(data_point); - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry_final( - state: &mut OpState, - #[number] count1: u64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - histogram - .data_points - .last_mut() - .unwrap() - .bucket_counts - .push(count1) - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry1( - state: &mut OpState, - #[number] count1: u64, - bound1: f64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - let data_point = histogram.data_points.last_mut().unwrap(); - data_point.bucket_counts.push(count1); - data_point.bounds.push(bound1); - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry2( - state: &mut OpState, - #[number] count1: u64, - bound1: f64, - #[number] count2: u64, - bound2: f64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - let data_point = histogram.data_points.last_mut().unwrap(); - data_point.bucket_counts.push(count1); - data_point.bounds.push(bound1); - data_point.bucket_counts.push(count2); - data_point.bounds.push(bound2); - } -} - -#[op2(fast)] -fn op_otel_metrics_histogram_data_point_entry3( - state: &mut OpState, - #[number] count1: u64, - bound1: f64, - #[number] count2: u64, - bound2: f64, - #[number] count3: u64, - bound3: f64, -) { - let Some(temp) = state.try_borrow_mut::() else { - return; - }; - - if let Some(TemporaryMetric { - data: TemporaryMetricData::Histogram(histogram), - .. - }) = &mut temp.metric - { - let data_point = histogram.data_points.last_mut().unwrap(); - data_point.bucket_counts.push(count1); - data_point.bounds.push(bound1); - data_point.bucket_counts.push(count2); - data_point.bounds.push(bound2); - data_point.bucket_counts.push(count3); - data_point.bounds.push(bound3); - } -} - -#[op2(fast)] -fn op_otel_metrics_data_point_attribute<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key: v8::Local<'s, v8::Value>, - value: v8::Local<'s, v8::Value>, -) { - if let Some(TemporaryMetricsExport { - metric: Some(metric), - .. - }) = state.try_borrow_mut::() - { - let attributes = match &mut metric.data { - TemporaryMetricData::Sum(sum) => { - &mut sum.data_points.last_mut().unwrap().attributes + let mut values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let attributes = match &mut values { + Some(values) => { + if let Some(kv) = attr1 { + values.attributes.reserve_exact(1); + values.attributes.push(kv); } - TemporaryMetricData::Gauge(gauge) => { - &mut gauge.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Histogram(histogram) => { - &mut histogram.data_points.last_mut().unwrap().attributes - } - }; - attributes.reserve_exact((capacity as usize) - attributes.capacity()); - attr!(scope, attributes, key, value); - } -} - -#[op2(fast)] -fn op_otel_metrics_data_point_attribute2<'s>( - scope: &mut v8::HandleScope<'s>, - state: &mut OpState, - #[smi] capacity: u32, - key1: v8::Local<'s, v8::Value>, - value1: v8::Local<'s, v8::Value>, - key2: v8::Local<'s, v8::Value>, - value2: v8::Local<'s, v8::Value>, -) { - if let Some(TemporaryMetricsExport { - metric: Some(metric), - .. - }) = state.try_borrow_mut::() - { - let attributes = match &mut metric.data { - TemporaryMetricData::Sum(sum) => { - &mut sum.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Gauge(gauge) => { - &mut gauge.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Histogram(histogram) => { - &mut histogram.data_points.last_mut().unwrap().attributes - } - }; - attributes.reserve_exact((capacity as usize) - attributes.capacity()); - attr!(scope, attributes, key1, value1); - attr!(scope, attributes, key2, value2); + &*values.attributes + } + None => match attr1 { + Some(kv1) => &[kv1] as &[KeyValue], + None => &[], + }, + }; + match &*instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), + _ => {} } } #[allow(clippy::too_many_arguments)] #[op2(fast)] -fn op_otel_metrics_data_point_attribute3<'s>( +fn op_otel_metric_record2( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let mut values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attributes = match &mut values { + Some(values) => { + values.attributes.reserve_exact(2); + if let Some(kv1) = attr1 { + values.attributes.push(kv1); + } + if let Some(kv2) = attr2 { + values.attributes.push(kv2); + } + &*values.attributes + } + None => match (attr1, attr2) { + (Some(kv1), Some(kv2)) => &[kv1, kv2] as &[KeyValue], + (Some(kv1), None) => &[kv1], + (None, Some(kv2)) => &[kv2], + (None, None) => &[], + }, + }; + match &*instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), + _ => {} + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_record3( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, + key3: v8::Local<'_, v8::Value>, + value3: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let mut values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attr3 = attr_raw!(scope, key3, value3); + let attributes = match &mut values { + Some(values) => { + values.attributes.reserve_exact(3); + if let Some(kv1) = attr1 { + values.attributes.push(kv1); + } + if let Some(kv2) = attr2 { + values.attributes.push(kv2); + } + if let Some(kv3) = attr3 { + values.attributes.push(kv3); + } + &*values.attributes + } + None => match (attr1, attr2, attr3) { + (Some(kv1), Some(kv2), Some(kv3)) => &[kv1, kv2, kv3] as &[KeyValue], + (Some(kv1), Some(kv2), None) => &[kv1, kv2], + (Some(kv1), None, Some(kv3)) => &[kv1, kv3], + (None, Some(kv2), Some(kv3)) => &[kv2, kv3], + (Some(kv1), None, None) => &[kv1], + (None, Some(kv2), None) => &[kv2], + (None, None, Some(kv3)) => &[kv3], + (None, None, None) => &[], + }, + }; + match &*instrument { + Instrument::Counter(counter) => counter.add(value, attributes), + Instrument::UpDownCounter(counter) => counter.add(value, attributes), + Instrument::Gauge(gauge) => gauge.record(value, attributes), + Instrument::Histogram(histogram) => histogram.record(value, attributes), + _ => {} + } +} + +#[op2(fast)] +fn op_otel_metric_observable_record0( + state: &mut OpState, + #[cppgc] instrument: &Instrument, + value: f64, +) { + let values = state.try_take::(); + let attributes = values.map(|attr| attr.attributes).unwrap_or_default(); + if let Instrument::Observable(data_share) = instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[op2(fast)] +fn op_otel_metric_observable_record1( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let values = state.try_take::(); + let attr1 = attr_raw!(scope, key1, value1); + let mut attributes = values + .map(|mut attr| { + attr.attributes.reserve_exact(1); + attr.attributes + }) + .unwrap_or_else(|| Vec::with_capacity(1)); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Instrument::Observable(data_share) = &*instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_observable_record2( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let values = state.try_take::(); + let mut attributes = values + .map(|mut attr| { + attr.attributes.reserve_exact(2); + attr.attributes + }) + .unwrap_or_else(|| Vec::with_capacity(2)); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Some(kv2) = attr2 { + attributes.push(kv2); + } + if let Instrument::Observable(data_share) = &*instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_observable_record3( + state: &mut OpState, + scope: &mut v8::HandleScope<'_>, + instrument: v8::Local<'_, v8::Value>, + value: f64, + key1: v8::Local<'_, v8::Value>, + value1: v8::Local<'_, v8::Value>, + key2: v8::Local<'_, v8::Value>, + value2: v8::Local<'_, v8::Value>, + key3: v8::Local<'_, v8::Value>, + value3: v8::Local<'_, v8::Value>, +) { + let Some(instrument) = deno_core::_ops::try_unwrap_cppgc_object::( + &mut *scope, + instrument, + ) else { + return; + }; + let values = state.try_take::(); + let mut attributes = values + .map(|mut attr| { + attr.attributes.reserve_exact(3); + attr.attributes + }) + .unwrap_or_else(|| Vec::with_capacity(3)); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attr3 = attr_raw!(scope, key3, value3); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Some(kv2) = attr2 { + attributes.push(kv2); + } + if let Some(kv3) = attr3 { + attributes.push(kv3); + } + if let Instrument::Observable(data_share) = &*instrument { + let mut data = data_share.lock().unwrap(); + data.insert(attributes, value); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_metric_attribute3<'s>( scope: &mut v8::HandleScope<'s>, state: &mut OpState, #[smi] capacity: u32, @@ -1526,49 +1732,60 @@ fn op_otel_metrics_data_point_attribute3<'s>( key3: v8::Local<'s, v8::Value>, value3: v8::Local<'s, v8::Value>, ) { - if let Some(TemporaryMetricsExport { - metric: Some(metric), - .. - }) = state.try_borrow_mut::() + let mut values = state.try_borrow_mut::(); + let attr1 = attr_raw!(scope, key1, value1); + let attr2 = attr_raw!(scope, key2, value2); + let attr3 = attr_raw!(scope, key3, value3); + if let Some(values) = &mut values { + values.attributes.reserve_exact( + (capacity as usize).saturating_sub(values.attributes.capacity()), + ); + if let Some(kv1) = attr1 { + values.attributes.push(kv1); + } + if let Some(kv2) = attr2 { + values.attributes.push(kv2); + } + if let Some(kv3) = attr3 { + values.attributes.push(kv3); + } + } else { + let mut attributes = Vec::with_capacity(capacity as usize); + if let Some(kv1) = attr1 { + attributes.push(kv1); + } + if let Some(kv2) = attr2 { + attributes.push(kv2); + } + if let Some(kv3) = attr3 { + attributes.push(kv3); + } + state.put(MetricAttributes { attributes }); + } +} + +struct ObservationDone(oneshot::Sender<()>); + +#[op2(async)] +async fn op_otel_metric_wait_to_observe(state: Rc>) -> bool { + let (tx, rx) = oneshot::channel(); { - let attributes = match &mut metric.data { - TemporaryMetricData::Sum(sum) => { - &mut sum.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Gauge(gauge) => { - &mut gauge.data_points.last_mut().unwrap().attributes - } - TemporaryMetricData::Histogram(histogram) => { - &mut histogram.data_points.last_mut().unwrap().attributes - } - }; - attributes.reserve_exact((capacity as usize) - attributes.capacity()); - attr!(scope, attributes, key1, value1); - attr!(scope, attributes, key2, value2); - attr!(scope, attributes, key3, value3); + OTEL_PRE_COLLECT_CALLBACKS + .lock() + .expect("mutex poisoned") + .push(tx); + } + if let Ok(done) = rx.await { + state.borrow_mut().put(ObservationDone(done)); + true + } else { + false } } #[op2(fast)] -fn op_otel_metrics_submit(state: &mut OpState) { - let Some(mut temp) = state.try_take::() else { - return; - }; - - let Some(Processors { metrics, .. }) = OTEL_PROCESSORS.get() else { - return; - }; - - if let Some(current_metric) = temp.metric { - let metric = Metric::from(current_metric); - temp.scope_metrics.last_mut().unwrap().metrics.push(metric); +fn op_otel_metric_observation_done(state: &mut OpState) { + if let Some(ObservationDone(done)) = state.try_take::() { + let _ = done.send(()); } - - let resource = Resource::new(temp.resource_attributes); - let scope_metrics = temp.scope_metrics; - - metrics.submit(ResourceMetrics { - resource, - scope_metrics, - }); } diff --git a/ext/telemetry/telemetry.ts b/ext/telemetry/telemetry.ts index d1335f65b5..86b4fe059d 100644 --- a/ext/telemetry/telemetry.ts +++ b/ext/telemetry/telemetry.ts @@ -7,23 +7,24 @@ import { op_otel_instrumentation_scope_enter, op_otel_instrumentation_scope_enter_builtin, op_otel_log, - op_otel_metrics_data_point_attribute, - op_otel_metrics_data_point_attribute2, - op_otel_metrics_data_point_attribute3, - op_otel_metrics_gauge, - op_otel_metrics_histogram, - op_otel_metrics_histogram_data_point, - op_otel_metrics_histogram_data_point_entry1, - op_otel_metrics_histogram_data_point_entry2, - op_otel_metrics_histogram_data_point_entry3, - op_otel_metrics_histogram_data_point_entry_final, - op_otel_metrics_resource_attribute, - op_otel_metrics_resource_attribute2, - op_otel_metrics_resource_attribute3, - op_otel_metrics_scope, - op_otel_metrics_submit, - op_otel_metrics_sum, - op_otel_metrics_sum_or_gauge_data_point, + op_otel_metric_attribute3, + op_otel_metric_create_counter, + op_otel_metric_create_gauge, + op_otel_metric_create_histogram, + op_otel_metric_create_observable_counter, + op_otel_metric_create_observable_gauge, + op_otel_metric_create_observable_up_down_counter, + op_otel_metric_create_up_down_counter, + op_otel_metric_observable_record0, + op_otel_metric_observable_record1, + op_otel_metric_observable_record2, + op_otel_metric_observable_record3, + op_otel_metric_observation_done, + op_otel_metric_record0, + op_otel_metric_record1, + op_otel_metric_record2, + op_otel_metric_record3, + op_otel_metric_wait_to_observe, op_otel_span_attribute, op_otel_span_attribute2, op_otel_span_attribute3, @@ -36,25 +37,32 @@ import { Console } from "ext:deno_console/01_console.js"; import { performance } from "ext:deno_web/15_performance.js"; const { - SafeWeakMap, Array, - ObjectEntries, - ReflectApply, - SymbolFor, + ArrayPrototypePush, Error, - Uint8Array, - TypedArrayPrototypeSubarray, ObjectAssign, ObjectDefineProperty, - WeakRefPrototypeDeref, + ObjectEntries, + ObjectPrototypeIsPrototypeOf, + ReflectApply, + SafeIterator, + SafeMap, + SafePromiseAll, + SafeSet, + SafeWeakMap, + SafeWeakRef, + SafeWeakSet, String, StringPrototypePadStart, - ObjectPrototypeIsPrototypeOf, - SafeWeakRef, + SymbolFor, + TypedArrayPrototypeSubarray, + Uint8Array, + WeakRefPrototypeDeref, } = primordials; const { AsyncVariable, setAsyncContext } = core; export let TRACING_ENABLED = false; +export let METRICS_ENABLED = false; let DETERMINISTIC = false; // Note: These start at 0 in the JS library, @@ -202,30 +210,9 @@ const instrumentationScopes = new SafeWeakMap< >(); let activeInstrumentationLibrary: WeakRef | null = null; -function submitSpan( - spanId: string | Uint8Array, - traceId: string | Uint8Array, - traceFlags: number, - parentSpanId: string | Uint8Array | null, - span: Omit< - ReadableSpan, - | "spanContext" - | "startTime" - | "endTime" - | "parentSpanId" - | "duration" - | "ended" - | "resource" - >, - startTime: number, - endTime: number, +function activateInstrumentationLibrary( + instrumentationLibrary: InstrumentationLibrary, ) { - if (!TRACING_ENABLED) return; - if (!(traceFlags & TRACE_FLAG_SAMPLED)) return; - - // TODO(@lucacasonato): `resource` is ignored for now, should we implement it? - - const instrumentationLibrary = span.instrumentationLibrary; if ( !activeInstrumentationLibrary || WeakRefPrototypeDeref(activeInstrumentationLibrary) !== @@ -255,6 +242,32 @@ function submitSpan( } } } +} + +function submitSpan( + spanId: string | Uint8Array, + traceId: string | Uint8Array, + traceFlags: number, + parentSpanId: string | Uint8Array | null, + span: Omit< + ReadableSpan, + | "spanContext" + | "startTime" + | "endTime" + | "parentSpanId" + | "duration" + | "ended" + | "resource" + >, + startTime: number, + endTime: number, +) { + if (!TRACING_ENABLED) return; + if (!(traceFlags & TRACE_FLAG_SAMPLED)) return; + + // TODO(@lucacasonato): `resource` is ignored for now, should we implement it? + + activateInstrumentationLibrary(span.instrumentationLibrary); op_otel_span_start( traceId, @@ -368,7 +381,7 @@ export let endSpan: (span: Span) => void; export class Span { #traceId: string | Uint8Array; - #spanId: Uint8Array; + #spanId: string | Uint8Array; #traceFlags = TRACE_FLAG_SAMPLED; #spanContext: SpanContext | null = null; @@ -687,260 +700,510 @@ class ContextManager { } } -function attributeValue(value: IAnyValue) { - return value.boolValue ?? value.stringValue ?? value.doubleValue ?? - value.intValue; +// metrics + +interface MeterOptions { + schemaUrl?: string; } -function submitMetrics(resource, scopeMetrics) { - let i = 0; - while (i < resource.attributes.length) { - if (i + 2 < resource.attributes.length) { - op_otel_metrics_resource_attribute3( - resource.attributes.length, - resource.attributes[i].key, - attributeValue(resource.attributes[i].value), - resource.attributes[i + 1].key, - attributeValue(resource.attributes[i + 1].value), - resource.attributes[i + 2].key, - attributeValue(resource.attributes[i + 2].value), - ); - i += 3; - } else if (i + 1 < resource.attributes.length) { - op_otel_metrics_resource_attribute2( - resource.attributes.length, - resource.attributes[i].key, - attributeValue(resource.attributes[i].value), - resource.attributes[i + 1].key, - attributeValue(resource.attributes[i + 1].value), - ); - i += 2; - } else { - op_otel_metrics_resource_attribute( - resource.attributes.length, - resource.attributes[i].key, - attributeValue(resource.attributes[i].value), - ); - i += 1; - } +interface MetricOptions { + description?: string; + + unit?: string; + + valueType?: ValueType; + + advice?: MetricAdvice; +} + +enum ValueType { + INT = 0, + DOUBLE = 1, +} + +interface MetricAdvice { + /** + * Hint the explicit bucket boundaries for SDK if the metric is been + * aggregated with a HistogramAggregator. + */ + explicitBucketBoundaries?: number[]; +} + +export class MeterProvider { + getMeter(name: string, version?: string, options?: MeterOptions): Meter { + return new Meter({ name, version, schemaUrl: options?.schemaUrl }); + } +} + +type MetricAttributes = Attributes; + +type Instrument = { __key: "instrument" }; + +let batchResultHasObservables: ( + res: BatchObservableResult, + observables: Observable[], +) => boolean; + +class BatchObservableResult { + #observables: WeakSet; + + constructor(observables: WeakSet) { + this.#observables = observables; } - for (let smi = 0; smi < scopeMetrics.length; smi += 1) { - const { scope, metrics } = scopeMetrics[smi]; + static { + batchResultHasObservables = (cb, observables) => { + for (const observable of new SafeIterator(observables)) { + if (!cb.#observables.has(observable)) return false; + } + return true; + }; + } - op_otel_metrics_scope(scope.name, scope.schemaUrl, scope.version); + observe( + metric: Observable, + value: number, + attributes?: MetricAttributes, + ): void { + if (!this.#observables.has(metric)) return; + getObservableResult(metric).observe(value, attributes); + } +} - for (let mi = 0; mi < metrics.length; mi += 1) { - const metric = metrics[mi]; - switch (metric.dataPointType) { - case 3: - op_otel_metrics_sum( - metric.descriptor.name, - // deno-lint-ignore prefer-primordials - metric.descriptor.description, - metric.descriptor.unit, - metric.aggregationTemporality, - metric.isMonotonic, - ); - for (let di = 0; di < metric.dataPoints.length; di += 1) { - const dataPoint = metric.dataPoints[di]; - op_otel_metrics_sum_or_gauge_data_point( - dataPoint.value, - hrToSecs(dataPoint.startTime), - hrToSecs(dataPoint.endTime), - ); - const attributes = ObjectEntries(dataPoint.attributes); - let i = 0; - while (i < attributes.length) { - if (i + 2 < attributes.length) { - op_otel_metrics_data_point_attribute3( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - attributes[i + 2][0], - attributes[i + 2][1], - ); - i += 3; - } else if (i + 1 < attributes.length) { - op_otel_metrics_data_point_attribute2( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - ); - i += 2; - } else { - op_otel_metrics_data_point_attribute( - attributes.length, - attributes[i][0], - attributes[i][1], - ); - i += 1; - } - } - } - break; - case 2: - op_otel_metrics_gauge( - metric.descriptor.name, - // deno-lint-ignore prefer-primordials - metric.descriptor.description, - metric.descriptor.unit, - ); - for (let di = 0; di < metric.dataPoints.length; di += 1) { - const dataPoint = metric.dataPoints[di]; - op_otel_metrics_sum_or_gauge_data_point( - dataPoint.value, - hrToSecs(dataPoint.startTime), - hrToSecs(dataPoint.endTime), - ); - const attributes = ObjectEntries(dataPoint.attributes); - let i = 0; - while (i < attributes.length) { - if (i + 2 < attributes.length) { - op_otel_metrics_data_point_attribute3( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - attributes[i + 2][0], - attributes[i + 2][1], - ); - i += 3; - } else if (i + 1 < attributes.length) { - op_otel_metrics_data_point_attribute2( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - ); - i += 2; - } else { - op_otel_metrics_data_point_attribute( - attributes.length, - attributes[i][0], - attributes[i][1], - ); - i += 1; - } - } - } - break; - case 0: - op_otel_metrics_histogram( - metric.descriptor.name, - // deno-lint-ignore prefer-primordials - metric.descriptor.description, - metric.descriptor.unit, - metric.aggregationTemporality, - ); - for (let di = 0; di < metric.dataPoints.length; di += 1) { - const dataPoint = metric.dataPoints[di]; - const { boundaries, counts } = dataPoint.value.buckets; - op_otel_metrics_histogram_data_point( - dataPoint.value.count, - dataPoint.value.min ?? NaN, - dataPoint.value.max ?? NaN, - dataPoint.value.sum, - hrToSecs(dataPoint.startTime), - hrToSecs(dataPoint.endTime), - boundaries.length, - ); - let j = 0; - while (j < boundaries.length) { - if (j + 3 < boundaries.length) { - op_otel_metrics_histogram_data_point_entry3( - counts[j], - boundaries[j], - counts[j + 1], - boundaries[j + 1], - counts[j + 2], - boundaries[j + 2], - ); - j += 3; - } else if (j + 2 < boundaries.length) { - op_otel_metrics_histogram_data_point_entry2( - counts[j], - boundaries[j], - counts[j + 1], - boundaries[j + 1], - ); - j += 2; - } else { - op_otel_metrics_histogram_data_point_entry1( - counts[j], - boundaries[j], - ); - j += 1; - } - } - op_otel_metrics_histogram_data_point_entry_final(counts[j]); - const attributes = ObjectEntries(dataPoint.attributes); - let i = 0; - while (i < attributes.length) { - if (i + 2 < attributes.length) { - op_otel_metrics_data_point_attribute3( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - attributes[i + 2][0], - attributes[i + 2][1], - ); - i += 3; - } else if (i + 1 < attributes.length) { - op_otel_metrics_data_point_attribute2( - attributes.length, - attributes[i][0], - attributes[i][1], - attributes[i + 1][0], - attributes[i + 1][1], - ); - i += 2; - } else { - op_otel_metrics_data_point_attribute( - attributes.length, - attributes[i][0], - attributes[i][1], - ); - i += 1; - } - } - } - break; - default: - continue; +const BATCH_CALLBACKS = new SafeMap< + BatchObservableCallback, + BatchObservableResult +>(); +const INDIVIDUAL_CALLBACKS = new SafeMap>(); + +class Meter { + #instrumentationLibrary: InstrumentationLibrary; + + constructor(instrumentationLibrary: InstrumentationLibrary) { + this.#instrumentationLibrary = instrumentationLibrary; + } + + createCounter( + name: string, + options?: MetricOptions, + ): Counter { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Counter(null, false); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Counter(instrument, false); + } + + createUpDownCounter( + name: string, + options?: MetricOptions, + ): Counter { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Counter(null, true); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_up_down_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Counter(instrument, true); + } + + createGauge( + name: string, + options?: MetricOptions, + ): Gauge { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Gauge(null); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_gauge( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Gauge(instrument); + } + + createHistogram( + name: string, + options?: MetricOptions, + ): Histogram { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) return new Histogram(null); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_histogram( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + options?.advice?.explicitBucketBoundaries, + ) as Instrument; + return new Histogram(instrument); + } + + createObservableCounter( + name: string, + options?: MetricOptions, + ): Observable { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) new Observable(new ObservableResult(null, true)); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_observable_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Observable(new ObservableResult(instrument, true)); + } + + createObservableGauge( + name: string, + options?: MetricOptions, + ): Observable { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) new Observable(new ObservableResult(null, false)); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_observable_gauge( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Observable(new ObservableResult(instrument, false)); + } + + createObservableUpDownCounter( + name: string, + options?: MetricOptions, + ): Observable { + if (options?.valueType !== undefined && options?.valueType !== 1) { + throw new Error("Only valueType: DOUBLE is supported"); + } + if (!METRICS_ENABLED) new Observable(new ObservableResult(null, false)); + activateInstrumentationLibrary(this.#instrumentationLibrary); + const instrument = op_otel_metric_create_observable_up_down_counter( + name, + // deno-lint-ignore prefer-primordials + options?.description, + options?.unit, + ) as Instrument; + return new Observable(new ObservableResult(instrument, false)); + } + + addBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void { + if (!METRICS_ENABLED) return; + const result = new BatchObservableResult(new SafeWeakSet(observables)); + startObserving(); + BATCH_CALLBACKS.set(callback, result); + } + + removeBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void { + if (!METRICS_ENABLED) return; + const result = BATCH_CALLBACKS.get(callback); + if (result && batchResultHasObservables(result, observables)) { + BATCH_CALLBACKS.delete(callback); + } + } +} + +type BatchObservableCallback = ( + observableResult: BatchObservableResult, +) => void | Promise; + +function record( + instrument: Instrument | null, + value: number, + attributes?: MetricAttributes, +) { + if (instrument === null) return; + if (attributes === undefined) { + op_otel_metric_record0(instrument, value); + } else { + const attrs = ObjectEntries(attributes); + if (attrs.length === 0) { + op_otel_metric_record0(instrument, value); + } + let i = 0; + while (i < attrs.length) { + const remaining = attrs.length - i; + if (remaining > 3) { + op_otel_metric_attribute3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 3) { + op_otel_metric_record3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 2) { + op_otel_metric_record2( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + ); + i += 2; + } else if (remaining === 1) { + op_otel_metric_record1( + instrument, + value, + attrs[i][0], + attrs[i][1], + ); + i += 1; } } } - - op_otel_metrics_submit(); } -class MetricExporter { - export(metrics, resultCallback: (result: ExportResult) => void) { - try { - submitMetrics(metrics.resource, metrics.scopeMetrics); - resultCallback({ code: 0 }); - } catch (error) { - resultCallback({ - code: 1, - error: ObjectPrototypeIsPrototypeOf(error, Error) - ? error as Error - : new Error(String(error)), - }); +function recordObservable( + instrument: Instrument | null, + value: number, + attributes?: MetricAttributes, +) { + if (instrument === null) return; + if (attributes === undefined) { + op_otel_metric_observable_record0(instrument, value); + } else { + const attrs = ObjectEntries(attributes); + if (attrs.length === 0) { + op_otel_metric_observable_record0(instrument, value); + } + let i = 0; + while (i < attrs.length) { + const remaining = attrs.length - i; + if (remaining > 3) { + op_otel_metric_attribute3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 3) { + op_otel_metric_observable_record3( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + attrs[i + 2][0], + attrs[i + 2][1], + ); + i += 3; + } else if (remaining === 2) { + op_otel_metric_observable_record2( + instrument, + value, + attrs[i][0], + attrs[i][1], + attrs[i + 1][0], + attrs[i + 1][1], + ); + i += 2; + } else if (remaining === 1) { + op_otel_metric_observable_record1( + instrument, + value, + attrs[i][0], + attrs[i][1], + ); + i += 1; + } } } +} - async forceFlush() {} +class Counter { + #instrument: Instrument | null; + #upDown: boolean; - async shutdown() {} + constructor(instrument: Instrument | null, upDown: boolean) { + this.#instrument = instrument; + this.#upDown = upDown; + } + + add(value: number, attributes?: MetricAttributes, _context?: Context): void { + if (value < 0 && !this.#upDown) { + throw new Error("Counter can only be incremented"); + } + record(this.#instrument, value, attributes); + } +} + +class Gauge { + #instrument: Instrument | null; + + constructor(instrument: Instrument | null) { + this.#instrument = instrument; + } + + record( + value: number, + attributes?: MetricAttributes, + _context?: Context, + ): void { + record(this.#instrument, value, attributes); + } +} + +class Histogram { + #instrument: Instrument | null; + + constructor(instrument: Instrument | null) { + this.#instrument = instrument; + } + + record( + value: number, + attributes?: MetricAttributes, + _context?: Context, + ): void { + record(this.#instrument, value, attributes); + } +} + +type ObservableCallback = ( + observableResult: ObservableResult, +) => void | Promise; + +let getObservableResult: (observable: Observable) => ObservableResult; + +class Observable { + #result: ObservableResult; + + constructor(result: ObservableResult) { + this.#result = result; + } + + static { + getObservableResult = (observable) => observable.#result; + } + + addCallback(callback: ObservableCallback): void { + const res = INDIVIDUAL_CALLBACKS.get(this); + if (res) res.add(callback); + else INDIVIDUAL_CALLBACKS.set(this, new SafeSet([callback])); + startObserving(); + } + + removeCallback(callback: ObservableCallback): void { + const res = INDIVIDUAL_CALLBACKS.get(this); + if (res) res.delete(callback); + if (res?.size === 0) INDIVIDUAL_CALLBACKS.delete(this); + } +} + +class ObservableResult { + #instrument: Instrument | null; + #isRegularCounter: boolean; + + constructor(instrument: Instrument | null, isRegularCounter: boolean) { + this.#instrument = instrument; + this.#isRegularCounter = isRegularCounter; + } + + observe( + this: ObservableResult, + value: number, + attributes?: MetricAttributes, + ): void { + if (this.#isRegularCounter) { + if (value < 0) { + throw new Error("Observable counters can only be incremented"); + } + } + recordObservable(this.#instrument, value, attributes); + } +} + +async function observe(): Promise { + const promises: Promise[] = []; + // Primordials are not needed, because this is a SafeMap. + // deno-lint-ignore prefer-primordials + for (const { 0: observable, 1: callbacks } of INDIVIDUAL_CALLBACKS) { + const result = getObservableResult(observable); + // Primordials are not needed, because this is a SafeSet. + // deno-lint-ignore prefer-primordials + for (const callback of callbacks) { + // PromiseTry is not in primordials? + // deno-lint-ignore prefer-primordials + ArrayPrototypePush(promises, Promise.try(callback, result)); + } + } + // Primordials are not needed, because this is a SafeMap. + // deno-lint-ignore prefer-primordials + for (const { 0: callback, 1: result } of BATCH_CALLBACKS) { + // PromiseTry is not in primordials? + // deno-lint-ignore prefer-primordials + ArrayPrototypePush(promises, Promise.try(callback, result)); + } + await SafePromiseAll(promises); +} + +let isObserving = false; +function startObserving() { + if (!isObserving) { + isObserving = true; + (async () => { + while (true) { + const promise = op_otel_metric_wait_to_observe(); + core.unrefOpPromise(promise); + const ok = await promise; + if (!ok) break; + await observe(); + op_otel_metric_observation_done(); + } + })(); + } } const otelConsoleConfig = { @@ -951,14 +1214,21 @@ const otelConsoleConfig = { export function bootstrap( config: [ + 0 | 1, 0 | 1, typeof otelConsoleConfig[keyof typeof otelConsoleConfig], 0 | 1, ], ): void { - const { 0: tracingEnabled, 1: consoleConfig, 2: deterministic } = config; + const { + 0: tracingEnabled, + 1: metricsEnabled, + 2: consoleConfig, + 3: deterministic, + } = config; TRACING_ENABLED = tracingEnabled === 1; + METRICS_ENABLED = metricsEnabled === 1; DETERMINISTIC = deterministic === 1; switch (consoleConfig) { @@ -980,5 +1250,5 @@ export function bootstrap( export const telemetry = { SpanExporter, ContextManager, - MetricExporter, + MeterProvider, }; diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 690267b7e0..6bf1b8ea03 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.170.0" +version = "0.171.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index de4fc67df8..9ca3ce6752 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.183.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index e673ee2bb4..f3ac711fc7 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -908,8 +908,8 @@ const _original = Symbol("[[original]]"); * @param {boolean=} autoClose If the resource should be auto-closed when the stream closes. Defaults to true. * @returns {ReadableStream} */ -function readableStreamForRid(rid, autoClose = true) { - const stream = new ReadableStream(_brand); +function readableStreamForRid(rid, autoClose = true, Super) { + const stream = new (Super ?? ReadableStream)(_brand); stream[_resourceBacking] = { rid, autoClose }; const tryClose = () => { @@ -1130,8 +1130,8 @@ async function readableStreamCollectIntoUint8Array(stream) { * @param {boolean=} autoClose If the resource should be auto-closed when the stream closes. Defaults to true. * @returns {ReadableStream} */ -function writableStreamForRid(rid, autoClose = true) { - const stream = new WritableStream(_brand); +function writableStreamForRid(rid, autoClose = true, Super) { + const stream = new (Super ?? WritableStream)(_brand); stream[_resourceBacking] = { rid, autoClose }; const tryClose = () => { diff --git a/ext/web/13_message_port.js b/ext/web/13_message_port.js index cf72c43e6f..79fec9de2f 100644 --- a/ext/web/13_message_port.js +++ b/ext/web/13_message_port.js @@ -102,8 +102,8 @@ const nodeWorkerThreadCloseCb = Symbol("nodeWorkerThreadCloseCb"); const nodeWorkerThreadCloseCbInvoked = Symbol("nodeWorkerThreadCloseCbInvoked"); export const refMessagePort = Symbol("refMessagePort"); /** It is used by 99_main.js and worker_threads to - * unref/ref on the global pollForMessages promise. */ -export const unrefPollForMessages = Symbol("unrefPollForMessages"); + * unref/ref on the global message event handler count. */ +export const unrefParentPort = Symbol("unrefParentPort"); /** * @param {number} id diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index b4cd69f970..44fb2e46bf 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.214.0" +version = "0.215.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webgpu/Cargo.toml b/ext/webgpu/Cargo.toml index 858cdb2dab..3a491afcf8 100644 --- a/ext/webgpu/Cargo.toml +++ b/ext/webgpu/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webgpu" -version = "0.150.0" +version = "0.151.0" authors = ["the Deno authors"] edition.workspace = true license = "MIT" diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 0ad7d8ac10..60cb9f29f8 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.183.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 2cd48a3816..8b8359f074 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.188.0" +version = "0.189.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index ff76458f33..4f9795d098 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.178.0" +version = "0.179.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/resolvers/deno/Cargo.toml b/resolvers/deno/Cargo.toml index a7273c7e73..12c18d4452 100644 --- a/resolvers/deno/Cargo.toml +++ b/resolvers/deno/Cargo.toml @@ -29,6 +29,7 @@ deno_path_util.workspace = true deno_semver.workspace = true node_resolver.workspace = true node_resolver.features = ["sync"] +sys_traits.workspace = true thiserror.workspace = true url.workspace = true diff --git a/resolvers/deno/cjs.rs b/resolvers/deno/cjs.rs index 6ae648deab..2ec253d41a 100644 --- a/resolvers/deno/cjs.rs +++ b/resolvers/deno/cjs.rs @@ -1,29 +1,30 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::sync::MaybeDashMap; use deno_media_type::MediaType; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::ClosestPkgJsonError; use node_resolver::InNpmPackageCheckerRc; use node_resolver::PackageJsonResolverRc; use node_resolver::ResolutionMode; +use sys_traits::FsRead; use url::Url; +use crate::sync::MaybeDashMap; + /// Keeps track of what module specifiers were resolved as CJS. /// /// Modules that are `.js`, `.ts`, `.jsx`, and `tsx` are only known to /// be CJS or ESM after they're loaded based on their contents. So these /// files will be "maybe CJS" until they're loaded. #[derive(Debug)] -pub struct CjsTracker { - is_cjs_resolver: IsCjsResolver, +pub struct CjsTracker { + is_cjs_resolver: IsCjsResolver, known: MaybeDashMap, } -impl CjsTracker { +impl CjsTracker { pub fn new( in_npm_pkg_checker: InNpmPackageCheckerRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, mode: IsCjsResolutionMode, ) -> Self { Self { @@ -124,16 +125,16 @@ pub enum IsCjsResolutionMode { /// Resolves whether a module is CJS or ESM. #[derive(Debug)] -pub struct IsCjsResolver { +pub struct IsCjsResolver { in_npm_pkg_checker: InNpmPackageCheckerRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, mode: IsCjsResolutionMode, } -impl IsCjsResolver { +impl IsCjsResolver { pub fn new( in_npm_pkg_checker: InNpmPackageCheckerRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, mode: IsCjsResolutionMode, ) -> Self { Self { diff --git a/resolvers/deno/fs.rs b/resolvers/deno/fs.rs deleted file mode 100644 index f2021a73a9..0000000000 --- a/resolvers/deno/fs.rs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::borrow::Cow; -use std::path::Path; -use std::path::PathBuf; - -pub struct DirEntry { - pub name: String, - pub is_file: bool, - pub is_directory: bool, -} - -pub trait DenoResolverFs { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> std::io::Result>; - fn realpath_sync(&self, path: &Path) -> std::io::Result; - fn exists_sync(&self, path: &Path) -> bool; - fn is_dir_sync(&self, path: &Path) -> bool; - fn read_dir_sync(&self, dir_path: &Path) -> std::io::Result>; -} diff --git a/resolvers/deno/lib.rs b/resolvers/deno/lib.rs index 05fa416da1..c943aacdae 100644 --- a/resolvers/deno/lib.rs +++ b/resolvers/deno/lib.rs @@ -14,11 +14,10 @@ use deno_config::workspace::WorkspaceResolver; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValueParseError; use deno_semver::npm::NpmPackageReqReference; -use fs::DenoResolverFs; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::NodeResolveError; use node_resolver::errors::PackageSubpathResolveError; use node_resolver::InNpmPackageCheckerRc; +use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NodeResolution; use node_resolver::NodeResolutionKind; use node_resolver::NodeResolverRc; @@ -32,11 +31,14 @@ use npm::ResolveReqWithSubPathErrorKind; use sloppy_imports::SloppyImportResolverFs; use sloppy_imports::SloppyImportsResolutionKind; use sloppy_imports::SloppyImportsResolverRc; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; use thiserror::Error; use url::Url; pub mod cjs; -pub mod fs; pub mod npm; pub mod sloppy_imports; mod sync; @@ -80,22 +82,22 @@ pub enum DenoResolveErrorKind { #[derive(Debug)] pub struct NodeAndNpmReqResolver< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { - pub node_resolver: NodeResolverRc, - pub npm_req_resolver: NpmReqResolverRc, + pub node_resolver: NodeResolverRc, + pub npm_req_resolver: NpmReqResolverRc, } pub struct DenoResolverOptions< 'a, - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TSloppyImportResolverFs: SloppyImportResolverFs, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { pub in_npm_pkg_checker: InNpmPackageCheckerRc, pub node_and_req_resolver: - Option>, + Option>, pub sloppy_imports_resolver: Option>, pub workspace_resolver: WorkspaceResolverRc, @@ -110,12 +112,13 @@ pub struct DenoResolverOptions< /// import map, JSX settings. #[derive(Debug)] pub struct DenoResolver< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TSloppyImportResolverFs: SloppyImportResolverFs, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { in_npm_pkg_checker: InNpmPackageCheckerRc, - node_and_npm_resolver: Option>, + node_and_npm_resolver: + Option>, sloppy_imports_resolver: Option>, workspace_resolver: WorkspaceResolverRc, @@ -124,13 +127,17 @@ pub struct DenoResolver< } impl< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TSloppyImportResolverFs: SloppyImportResolverFs, - > DenoResolver + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, + > DenoResolver { pub fn new( - options: DenoResolverOptions, + options: DenoResolverOptions< + TIsBuiltInNodeModuleChecker, + TSloppyImportResolverFs, + TSys, + >, ) -> Self { Self { in_npm_pkg_checker: options.in_npm_pkg_checker, diff --git a/resolvers/deno/npm/byonm.rs b/resolvers/deno/npm/byonm.rs index 08d06f9cac..3056a70f61 100644 --- a/resolvers/deno/npm/byonm.rs +++ b/resolvers/deno/npm/byonm.rs @@ -9,8 +9,8 @@ use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonRc; use deno_path_util::url_to_file_path; use deno_semver::package::PackageReq; +use deno_semver::StackString; use deno_semver::Version; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageJsonLoadError; @@ -18,11 +18,14 @@ use node_resolver::errors::PackageNotFoundError; use node_resolver::InNpmPackageChecker; use node_resolver::NpmPackageFolderResolver; use node_resolver::PackageJsonResolverRc; +use sys_traits::FsCanonicalize; +use sys_traits::FsDirEntry; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; use thiserror::Error; use url::Url; -use crate::fs::DenoResolverFs; - use super::local::normalize_pkg_name_for_node_modules_deno_folder; use super::CliNpmReqResolver; use super::ResolvePkgFolderFromDenoReqError; @@ -30,7 +33,7 @@ use super::ResolvePkgFolderFromDenoReqError; #[derive(Debug, Error)] pub enum ByonmResolvePkgFolderFromDenoReqError { #[error("Could not find \"{}\" in a node_modules folder. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", .0)] - MissingAlias(String), + MissingAlias(StackString), #[error(transparent)] PackageJson(#[from] PackageJsonLoadError), #[error("Could not find a matching package for 'npm:{}' in the node_modules directory. Ensure you have all your JSR and npm dependencies listed in your deno.json or package.json, then run `deno install`. Alternatively, turn on auto-install by specifying `\"nodeModulesDir\": \"auto\"` in your deno.json file.", .0)] @@ -39,44 +42,45 @@ pub enum ByonmResolvePkgFolderFromDenoReqError { Io(#[from] std::io::Error), } -pub struct ByonmNpmResolverCreateOptions< - Fs: DenoResolverFs, - TEnv: NodeResolverEnv, -> { +pub struct ByonmNpmResolverCreateOptions { // todo(dsherret): investigate removing this pub root_node_modules_dir: Option, - pub fs: Fs, - pub pkg_json_resolver: PackageJsonResolverRc, + pub sys: TSys, + pub pkg_json_resolver: PackageJsonResolverRc, } #[allow(clippy::disallowed_types)] -pub type ByonmNpmResolverRc = - crate::sync::MaybeArc>; +pub type ByonmNpmResolverRc = + crate::sync::MaybeArc>; #[derive(Debug)] -pub struct ByonmNpmResolver { - fs: Fs, - pkg_json_resolver: PackageJsonResolverRc, +pub struct ByonmNpmResolver< + TSys: FsCanonicalize + FsRead + FsMetadata + FsReadDir, +> { + sys: TSys, + pkg_json_resolver: PackageJsonResolverRc, root_node_modules_dir: Option, } -impl Clone - for ByonmNpmResolver +impl Clone + for ByonmNpmResolver { fn clone(&self) -> Self { Self { - fs: self.fs.clone(), + sys: self.sys.clone(), pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.root_node_modules_dir.clone(), } } } -impl ByonmNpmResolver { - pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { +impl + ByonmNpmResolver +{ + pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { Self { root_node_modules_dir: options.root_node_modules_dir, - fs: options.fs, + sys: options.sys, pkg_json_resolver: options.pkg_json_resolver, } } @@ -128,19 +132,20 @@ impl ByonmNpmResolver { req: &PackageReq, referrer: &Url, ) -> Result { - fn node_resolve_dir( - fs: &Fs, + fn node_resolve_dir( + sys: &TSys, alias: &str, start_dir: &Path, ) -> std::io::Result> { for ancestor in start_dir.ancestors() { let node_modules_folder = ancestor.join("node_modules"); let sub_dir = join_package_name(&node_modules_folder, alias); - if fs.is_dir_sync(&sub_dir) { - return Ok(Some(deno_path_util::canonicalize_path_maybe_not_exists( - &sub_dir, - &|path| fs.realpath_sync(path), - )?)); + if sys.fs_is_dir_no_err(&sub_dir) { + return Ok(Some( + deno_path_util::fs::canonicalize_path_maybe_not_exists( + sys, &sub_dir, + )?, + )); } } Ok(None) @@ -153,7 +158,7 @@ impl ByonmNpmResolver { Some((pkg_json, alias)) => { // now try node resolution if let Some(resolved) = - node_resolve_dir(&self.fs, &alias, pkg_json.dir_path())? + node_resolve_dir(&self.sys, &alias, pkg_json.dir_path())? { return Ok(resolved); } @@ -177,16 +182,14 @@ impl ByonmNpmResolver { &self, req: &PackageReq, referrer: &Url, - ) -> Result, PackageJsonLoadError> { + ) -> Result, PackageJsonLoadError> { fn resolve_alias_from_pkg_json( req: &PackageReq, pkg_json: &PackageJson, - ) -> Option { + ) -> Option { let deps = pkg_json.resolve_local_package_json_deps(); - for (key, value) in deps - .dependencies - .into_iter() - .chain(deps.dev_dependencies.into_iter()) + for (key, value) in + deps.dependencies.iter().chain(deps.dev_dependencies.iter()) { if let Ok(value) = value { match value { @@ -194,12 +197,14 @@ impl ByonmNpmResolver { if dep_req.name == req.name && dep_req.version_req.intersects(&req.version_req) { - return Some(key); + return Some(key.clone()); } } PackageJsonDepValue::Workspace(_workspace) => { - if key == req.name && req.version_req.tag() == Some("workspace") { - return Some(key); + if key.as_str() == req.name + && req.version_req.tag() == Some("workspace") + { + return Some(key.clone()); } } } @@ -246,7 +251,7 @@ impl ByonmNpmResolver { if let Ok(Some(dep_pkg_json)) = self.load_pkg_json(&pkg_folder.join("package.json")) { - if dep_pkg_json.name.as_ref() == Some(&req.name) { + if dep_pkg_json.name.as_deref() == Some(req.name.as_str()) { let matches_req = dep_pkg_json .version .as_ref() @@ -297,7 +302,7 @@ impl ByonmNpmResolver { // now check if node_modules/.deno/ matches this constraint let root_node_modules_dir = self.root_node_modules_dir.as_ref()?; let node_modules_deno_dir = root_node_modules_dir.join(".deno"); - let Ok(entries) = self.fs.read_dir_sync(&node_modules_deno_dir) else { + let Ok(entries) = self.sys.fs_read_dir(&node_modules_deno_dir) else { return None; }; let search_prefix = format!( @@ -310,10 +315,17 @@ impl ByonmNpmResolver { // - @denotest+add@1.0.0 // - @denotest+add@1.0.0_1 for entry in entries { - if !entry.is_directory { + let Ok(entry) = entry else { + continue; + }; + let Ok(file_type) = entry.file_type() else { + continue; + }; + if !file_type.is_dir() { continue; } - let Some(version_and_copy_idx) = entry.name.strip_prefix(&search_prefix) + let entry_name = entry.file_name().to_string_lossy().into_owned(); + let Some(version_and_copy_idx) = entry_name.strip_prefix(&search_prefix) else { continue; }; @@ -326,8 +338,8 @@ impl ByonmNpmResolver { }; if let Some(tag) = req.version_req.tag() { let initialized_file = - node_modules_deno_dir.join(&entry.name).join(".initialized"); - let Ok(contents) = self.fs.read_to_string_lossy(&initialized_file) + node_modules_deno_dir.join(&entry_name).join(".initialized"); + let Ok(contents) = self.sys.fs_read_to_string_lossy(&initialized_file) else { continue; }; @@ -335,19 +347,19 @@ impl ByonmNpmResolver { if tags.any(|t| t == tag) { if let Some((best_version_version, _)) = &best_version { if version > *best_version_version { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } else { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } } else if req.version_req.matches(&version) { if let Some((best_version_version, _)) = &best_version { if version > *best_version_version { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } else { - best_version = Some((version, entry.name)); + best_version = Some((version, entry_name)); } } } @@ -362,9 +374,14 @@ impl ByonmNpmResolver { } impl< - Fs: DenoResolverFs + Send + Sync + std::fmt::Debug, - TEnv: NodeResolverEnv, - > CliNpmReqResolver for ByonmNpmResolver + Sys: FsCanonicalize + + FsMetadata + + FsRead + + FsReadDir + + Send + + Sync + + std::fmt::Debug, + > CliNpmReqResolver for ByonmNpmResolver { fn resolve_pkg_folder_from_deno_module_req( &self, @@ -379,17 +396,22 @@ impl< } impl< - Fs: DenoResolverFs + Send + Sync + std::fmt::Debug, - TEnv: NodeResolverEnv, - > NpmPackageFolderResolver for ByonmNpmResolver + Sys: FsCanonicalize + + FsMetadata + + FsRead + + FsReadDir + + Send + + Sync + + std::fmt::Debug, + > NpmPackageFolderResolver for ByonmNpmResolver { fn resolve_package_folder_from_package( &self, name: &str, referrer: &Url, ) -> Result { - fn inner( - fs: &Fs, + fn inner( + sys: &TSys, name: &str, referrer: &Url, ) -> Result { @@ -406,7 +428,7 @@ impl< }; let sub_dir = join_package_name(&node_modules_folder, name); - if fs.is_dir_sync(&sub_dir) { + if sys.fs_is_dir_no_err(&sub_dir) { return Ok(sub_dir); } } @@ -422,8 +444,8 @@ impl< ) } - let path = inner(&self.fs, name, referrer)?; - self.fs.realpath_sync(&path).map_err(|err| { + let path = inner(&self.sys, name, referrer)?; + self.sys.fs_canonicalize(&path).map_err(|err| { PackageFolderResolveIoError { package_name: name.to_string(), referrer: referrer.clone(), diff --git a/resolvers/deno/npm/mod.rs b/resolvers/deno/npm/mod.rs index 64ec86fe3f..082940eb34 100644 --- a/resolvers/deno/npm/mod.rs +++ b/resolvers/deno/npm/mod.rs @@ -6,7 +6,6 @@ use std::path::PathBuf; use boxed_error::Boxed; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; -use node_resolver::env::NodeResolverEnv; use node_resolver::errors::NodeResolveError; use node_resolver::errors::NodeResolveErrorKind; use node_resolver::errors::PackageFolderResolveErrorKind; @@ -15,15 +14,18 @@ use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::PackageResolveErrorKind; use node_resolver::errors::PackageSubpathResolveError; use node_resolver::InNpmPackageCheckerRc; +use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NodeResolution; use node_resolver::NodeResolutionKind; use node_resolver::NodeResolverRc; use node_resolver::ResolutionMode; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; use thiserror::Error; use url::Url; -use crate::fs::DenoResolverFs; - pub use byonm::ByonmInNpmPackageChecker; pub use byonm::ByonmNpmResolver; pub use byonm::ByonmNpmResolverCreateOptions; @@ -95,40 +97,46 @@ pub trait CliNpmReqResolver: Debug + Send + Sync { } pub struct NpmReqResolverOptions< - Fs: DenoResolverFs, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { /// The resolver when "bring your own node_modules" is enabled where Deno /// does not setup the node_modules directories automatically, but instead /// uses what already exists on the file system. - pub byonm_resolver: Option>, - pub fs: Fs, + pub byonm_resolver: Option>, pub in_npm_pkg_checker: InNpmPackageCheckerRc, - pub node_resolver: NodeResolverRc, + pub node_resolver: NodeResolverRc, pub npm_req_resolver: CliNpmReqResolverRc, + pub sys: TSys, } #[allow(clippy::disallowed_types)] -pub type NpmReqResolverRc = - crate::sync::MaybeArc>; +pub type NpmReqResolverRc = + crate::sync::MaybeArc>; #[derive(Debug)] -pub struct NpmReqResolver -{ - byonm_resolver: Option>, - fs: Fs, +pub struct NpmReqResolver< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, +> { + byonm_resolver: Option>, + sys: TSys, in_npm_pkg_checker: InNpmPackageCheckerRc, - node_resolver: NodeResolverRc, + node_resolver: NodeResolverRc, npm_resolver: CliNpmReqResolverRc, } -impl - NpmReqResolver +impl< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, + > NpmReqResolver { - pub fn new(options: NpmReqResolverOptions) -> Self { + pub fn new( + options: NpmReqResolverOptions, + ) -> Self { Self { byonm_resolver: options.byonm_resolver, - fs: options.fs, + sys: options.sys, in_npm_pkg_checker: options.in_npm_pkg_checker, node_resolver: options.node_resolver, npm_resolver: options.npm_req_resolver, @@ -175,7 +183,7 @@ impl Err(err) => { if self.byonm_resolver.is_some() { let package_json_path = package_folder.join("package.json"); - if !self.fs.exists_sync(&package_json_path) { + if !self.sys.fs_exists_no_err(&package_json_path) { return Err( MissingPackageNodeModulesFolderError { package_json_path }.into(), ); diff --git a/resolvers/node/Cargo.toml b/resolvers/node/Cargo.toml index 52aedbee9d..1e35c0a355 100644 --- a/resolvers/node/Cargo.toml +++ b/resolvers/node/Cargo.toml @@ -29,6 +29,7 @@ once_cell.workspace = true path-clean = "=0.1.0" regex.workspace = true serde_json.workspace = true +sys_traits.workspace = true thiserror.workspace = true tokio.workspace = true url.workspace = true diff --git a/resolvers/node/analyze.rs b/resolvers/node/analyze.rs index a444f4d923..2024e6a1e8 100644 --- a/resolvers/node/analyze.rs +++ b/resolvers/node/analyze.rs @@ -16,11 +16,14 @@ use once_cell::sync::Lazy; use anyhow::Context; use anyhow::Error as AnyError; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsRead; use url::Url; -use crate::env::NodeResolverEnv; use crate::npm::InNpmPackageCheckerRc; use crate::resolution::NodeResolverRc; +use crate::IsBuiltInNodeModuleChecker; use crate::NodeResolutionKind; use crate::NpmPackageFolderResolverRc; use crate::PackageJsonResolverRc; @@ -60,34 +63,38 @@ pub trait CjsCodeAnalyzer { pub struct NodeCodeTranslator< TCjsCodeAnalyzer: CjsCodeAnalyzer, - TNodeResolverEnv: NodeResolverEnv, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, > { cjs_code_analyzer: TCjsCodeAnalyzer, - env: TNodeResolverEnv, in_npm_pkg_checker: InNpmPackageCheckerRc, - node_resolver: NodeResolverRc, + node_resolver: NodeResolverRc, npm_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, } -impl - NodeCodeTranslator +impl< + TCjsCodeAnalyzer: CjsCodeAnalyzer, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, + > NodeCodeTranslator { pub fn new( cjs_code_analyzer: TCjsCodeAnalyzer, - env: TNodeResolverEnv, in_npm_pkg_checker: InNpmPackageCheckerRc, - node_resolver: NodeResolverRc, + node_resolver: NodeResolverRc, npm_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, ) -> Self { Self { cjs_code_analyzer, - env, in_npm_pkg_checker, node_resolver, npm_resolver, pkg_json_resolver, + sys, } } @@ -162,7 +169,7 @@ impl add_export( &mut source, export, - &format!("mod[\"{}\"]", escape_for_double_quote_string(export)), + &format!("mod[{}]", to_double_quote_string(export)), &mut temp_var_count, ); } @@ -366,7 +373,7 @@ impl // old school if package_subpath != "." { let d = module_dir.join(package_subpath); - if self.env.is_dir_sync(&d) { + if self.sys.fs_is_dir_no_err(&d) { // subdir might have a package.json that specifies the entrypoint let package_json_path = d.join("package.json"); let maybe_package_json = self @@ -423,13 +430,13 @@ impl referrer: &Path, ) -> Result { let p = p.clean(); - if self.env.exists_sync(&p) { + if self.sys.fs_exists_no_err(&p) { let file_name = p.file_name().unwrap(); let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if self.env.is_file_sync(&p_js) { + if self.sys.fs_is_file_no_err(&p_js) { return Ok(p_js); - } else if self.env.is_dir_sync(&p) { + } else if self.sys.fs_is_dir_no_err(&p) { return Ok(p.join("index.js")); } else { return Ok(p); @@ -438,14 +445,14 @@ impl { let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if self.env.is_file_sync(&p_js) { + if self.sys.fs_is_file_no_err(&p_js) { return Ok(p_js); } } { let p_json = p.with_file_name(format!("{}.json", file_name.to_str().unwrap())); - if self.env.is_file_sync(&p_json) { + if self.sys.fs_is_file_no_err(&p_json) { return Ok(p_json); } } @@ -561,8 +568,8 @@ fn add_export( "const __deno_export_{temp_var_count}__ = {initializer};" )); source.push(format!( - "export {{ __deno_export_{temp_var_count}__ as \"{}\" }};", - escape_for_double_quote_string(name) + "export {{ __deno_export_{temp_var_count}__ as {} }};", + to_double_quote_string(name) )); } else { source.push(format!("export const {name} = {initializer};")); @@ -620,14 +627,9 @@ fn not_found(path: &str, referrer: &Path) -> AnyError { std::io::Error::new(std::io::ErrorKind::NotFound, msg).into() } -fn escape_for_double_quote_string(text: &str) -> Cow { - // this should be rare, so doing a scan first before allocating is ok - if text.chars().any(|c| matches!(c, '"' | '\\')) { - // don't bother making this more complex for perf because it's rare - Cow::Owned(text.replace('\\', "\\\\").replace('"', "\\\"")) - } else { - Cow::Borrowed(text) - } +fn to_double_quote_string(text: &str) -> String { + // serde can handle this for us + serde_json::to_string(text).unwrap() } #[cfg(test)] @@ -665,4 +667,13 @@ mod tests { Some(("@some-package/core".to_string(), "./actions".to_string())) ); } + + #[test] + fn test_to_double_quote_string() { + assert_eq!(to_double_quote_string("test"), "\"test\""); + assert_eq!( + to_double_quote_string("\r\n\t\"test"), + "\"\\r\\n\\t\\\"test\"" + ); + } } diff --git a/resolvers/node/env.rs b/resolvers/node/env.rs deleted file mode 100644 index b520ece0f8..0000000000 --- a/resolvers/node/env.rs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::path::Path; -use std::path::PathBuf; - -use crate::sync::MaybeSend; -use crate::sync::MaybeSync; - -pub struct NodeResolverFsStat { - pub is_file: bool, - pub is_dir: bool, - pub is_symlink: bool, -} - -pub trait NodeResolverEnv: std::fmt::Debug + MaybeSend + MaybeSync { - fn is_builtin_node_module(&self, specifier: &str) -> bool; - - fn realpath_sync(&self, path: &Path) -> std::io::Result; - - fn stat_sync(&self, path: &Path) -> std::io::Result; - - fn exists_sync(&self, path: &Path) -> bool; - - fn is_file_sync(&self, path: &Path) -> bool { - self - .stat_sync(path) - .map(|stat| stat.is_file) - .unwrap_or(false) - } - - fn is_dir_sync(&self, path: &Path) -> bool { - self - .stat_sync(path) - .map(|stat| stat.is_dir) - .unwrap_or(false) - } - - fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs; -} diff --git a/resolvers/node/errors.rs b/resolvers/node/errors.rs index 600a365a8f..26b1a1d84a 100644 --- a/resolvers/node/errors.rs +++ b/resolvers/node/errors.rs @@ -320,7 +320,6 @@ impl NodeJsErrorCoded for PackageJsonLoadError { impl NodeJsErrorCoded for ClosestPkgJsonError { fn code(&self) -> NodeJsErrorCode { match self.as_kind() { - ClosestPkgJsonErrorKind::CanonicalizingDir(e) => e.code(), ClosestPkgJsonErrorKind::Load(e) => e.code(), } } @@ -331,26 +330,10 @@ pub struct ClosestPkgJsonError(pub Box); #[derive(Debug, Error)] pub enum ClosestPkgJsonErrorKind { - #[error(transparent)] - CanonicalizingDir(#[from] CanonicalizingPkgJsonDirError), #[error(transparent)] Load(#[from] PackageJsonLoadError), } -#[derive(Debug, Error)] -#[error("[{}] Failed canonicalizing package.json directory '{}'.", self.code(), dir_path.display())] -pub struct CanonicalizingPkgJsonDirError { - pub dir_path: PathBuf, - #[source] - pub source: std::io::Error, -} - -impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - // todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError #[derive(Debug, Error)] #[error( diff --git a/resolvers/node/lib.rs b/resolvers/node/lib.rs index c73c395dfc..075f819ebb 100644 --- a/resolvers/node/lib.rs +++ b/resolvers/node/lib.rs @@ -4,7 +4,6 @@ #![deny(clippy::print_stdout)] pub mod analyze; -pub mod env; pub mod errors; mod npm; mod package_json; @@ -23,6 +22,7 @@ pub use package_json::PackageJsonThreadLocalCache; pub use path::PathClean; pub use resolution::parse_npm_pkg_name; pub use resolution::resolve_specifier_into_node_modules; +pub use resolution::IsBuiltInNodeModuleChecker; pub use resolution::NodeResolution; pub use resolution::NodeResolutionKind; pub use resolution::NodeResolver; diff --git a/resolvers/node/package_json.rs b/resolvers/node/package_json.rs index e3793af84a..ebbe099014 100644 --- a/resolvers/node/package_json.rs +++ b/resolvers/node/package_json.rs @@ -1,17 +1,16 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_package_json::PackageJson; -use deno_package_json::PackageJsonRc; -use deno_path_util::strip_unc_prefix; use std::cell::RefCell; use std::collections::HashMap; use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; + +use deno_package_json::PackageJson; +use deno_package_json::PackageJsonRc; +use sys_traits::FsRead; use url::Url; -use crate::env::NodeResolverEnv; -use crate::errors::CanonicalizingPkgJsonDirError; use crate::errors::ClosestPkgJsonError; use crate::errors::PackageJsonLoadError; @@ -40,17 +39,17 @@ impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache { } #[allow(clippy::disallowed_types)] -pub type PackageJsonResolverRc = - crate::sync::MaybeArc>; +pub type PackageJsonResolverRc = + crate::sync::MaybeArc>; #[derive(Debug)] -pub struct PackageJsonResolver { - env: TEnv, +pub struct PackageJsonResolver { + sys: TSys, } -impl PackageJsonResolver { - pub fn new(env: TEnv) -> Self { - Self { env } +impl PackageJsonResolver { + pub fn new(sys: TSys) -> Self { + Self { sys } } pub fn get_closest_package_json( @@ -67,37 +66,8 @@ impl PackageJsonResolver { &self, file_path: &Path, ) -> Result, ClosestPkgJsonError> { - // we use this for deno compile using byonm because the script paths - // won't be in virtual file system, but the package.json paths will be - fn canonicalize_first_ancestor_exists( - dir_path: &Path, - env: &TEnv, - ) -> Result, std::io::Error> { - for ancestor in dir_path.ancestors() { - match env.realpath_sync(ancestor) { - Ok(dir_path) => return Ok(Some(dir_path)), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => { - // keep searching - } - Err(err) => return Err(err), - } - } - Ok(None) - } - let parent_dir = file_path.parent().unwrap(); - let Some(start_dir) = canonicalize_first_ancestor_exists( - parent_dir, &self.env, - ) - .map_err(|source| CanonicalizingPkgJsonDirError { - dir_path: parent_dir.to_path_buf(), - source, - })? - else { - return Ok(None); - }; - let start_dir = strip_unc_prefix(start_dir); - for current_dir in start_dir.ancestors() { + for current_dir in parent_dir.ancestors() { let package_json_path = current_dir.join("package.json"); if let Some(pkg_json) = self.load_package_json(&package_json_path)? { return Ok(Some(pkg_json)); @@ -112,9 +82,9 @@ impl PackageJsonResolver { path: &Path, ) -> Result, PackageJsonLoadError> { let result = PackageJson::load_from_path( - path, - self.env.pkg_json_fs(), + &self.sys, Some(&PackageJsonThreadLocalCache), + path, ); match result { Ok(pkg_json) => Ok(Some(pkg_json)), diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs index 5f87698cd6..95631daf39 100644 --- a/resolvers/node/resolution.rs +++ b/resolvers/node/resolution.rs @@ -9,9 +9,13 @@ use anyhow::Error as AnyError; use deno_path_util::url_from_file_path; use serde_json::Map; use serde_json::Value; +use sys_traits::FileType; +use sys_traits::FsCanonicalize; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; +use sys_traits::FsRead; use url::Url; -use crate::env::NodeResolverEnv; use crate::errors; use crate::errors::DataUrlReferrerError; use crate::errors::FinalizeResolutionError; @@ -98,29 +102,44 @@ impl NodeResolution { } } -#[allow(clippy::disallowed_types)] -pub type NodeResolverRc = crate::sync::MaybeArc>; - -#[derive(Debug)] -pub struct NodeResolver { - env: TEnv, - in_npm_pkg_checker: InNpmPackageCheckerRc, - npm_pkg_folder_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, +pub trait IsBuiltInNodeModuleChecker: std::fmt::Debug { + fn is_builtin_node_module(&self, specifier: &str) -> bool; } -impl NodeResolver { +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = + crate::sync::MaybeArc>; + +#[derive(Debug)] +pub struct NodeResolver< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, +> { + in_npm_pkg_checker: InNpmPackageCheckerRc, + is_built_in_node_module_checker: TIsBuiltInNodeModuleChecker, + npm_pkg_folder_resolver: NpmPackageFolderResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, +} + +impl< + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TSys: FsCanonicalize + FsMetadata + FsRead, + > NodeResolver +{ pub fn new( - env: TEnv, in_npm_pkg_checker: InNpmPackageCheckerRc, + is_built_in_node_module_checker: TIsBuiltInNodeModuleChecker, npm_pkg_folder_resolver: NpmPackageFolderResolverRc, - pkg_json_resolver: PackageJsonResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + sys: TSys, ) -> Self { Self { - env, in_npm_pkg_checker, + is_built_in_node_module_checker, npm_pkg_folder_resolver, pkg_json_resolver, + sys, } } @@ -140,7 +159,10 @@ impl NodeResolver { // Note: if we are here, then the referrer is an esm module // TODO(bartlomieju): skipped "policy" part as we don't plan to support it - if self.env.is_builtin_node_module(specifier) { + if self + .is_built_in_node_module_checker + .is_builtin_node_module(specifier) + { return Ok(NodeResolution::BuiltIn(specifier.to_string())); } @@ -282,32 +304,25 @@ impl NodeResolver { p_str.to_string() }; - let (is_dir, is_file) = if let Ok(stats) = self.env.stat_sync(Path::new(&p)) - { - (stats.is_dir, stats.is_file) - } else { - (false, false) - }; - if is_dir { - return Err( + let maybe_file_type = self.sys.fs_metadata(p).map(|m| m.file_type()); + match maybe_file_type { + Ok(FileType::Dir) => Err( UnsupportedDirImportError { dir_url: resolved.clone(), maybe_referrer: maybe_referrer.map(ToOwned::to_owned), } .into(), - ); - } else if !is_file { - return Err( + ), + Ok(FileType::File) => Ok(resolved), + _ => Err( ModuleNotFoundError { specifier: resolved, maybe_referrer: maybe_referrer.map(ToOwned::to_owned), typ: "module", } .into(), - ); + ), } - - Ok(resolved) } pub fn resolve_package_subpath_from_deno_module( @@ -318,6 +333,8 @@ impl NodeResolver { resolution_mode: ResolutionMode, resolution_kind: NodeResolutionKind, ) -> Result { + // todo(dsherret): don't allocate a string here (maybe use an + // enum that says the subpath is not prefixed with a ./) let package_subpath = package_subpath .map(|s| format!("./{s}")) .unwrap_or_else(|| ".".to_string()); @@ -395,8 +412,8 @@ impl NodeResolver { maybe_referrer: Option<&Url>, resolution_mode: ResolutionMode, ) -> Result { - fn probe_extensions( - fs: &TEnv, + fn probe_extensions( + sys: &TSys, path: &Path, lowercase_path: &str, resolution_mode: ResolutionMode, @@ -405,20 +422,20 @@ impl NodeResolver { let mut searched_for_d_cts = false; if lowercase_path.ends_with(".mjs") { let d_mts_path = with_known_extension(path, "d.mts"); - if fs.exists_sync(&d_mts_path) { + if sys.fs_exists_no_err(&d_mts_path) { return Some(d_mts_path); } searched_for_d_mts = true; } else if lowercase_path.ends_with(".cjs") { let d_cts_path = with_known_extension(path, "d.cts"); - if fs.exists_sync(&d_cts_path) { + if sys.fs_exists_no_err(&d_cts_path) { return Some(d_cts_path); } searched_for_d_cts = true; } let dts_path = with_known_extension(path, "d.ts"); - if fs.exists_sync(&dts_path) { + if sys.fs_exists_no_err(&dts_path) { return Some(dts_path); } @@ -432,7 +449,7 @@ impl NodeResolver { _ => None, // already searched above }; if let Some(specific_dts_path) = specific_dts_path { - if fs.exists_sync(&specific_dts_path) { + if sys.fs_exists_no_err(&specific_dts_path) { return Some(specific_dts_path); } } @@ -447,11 +464,11 @@ impl NodeResolver { return Ok(url_from_file_path(path).unwrap()); } if let Some(path) = - probe_extensions(&self.env, path, &lowercase_path, resolution_mode) + probe_extensions(&self.sys, path, &lowercase_path, resolution_mode) { return Ok(url_from_file_path(&path).unwrap()); } - if self.env.is_dir_sync(path) { + if self.sys.fs_is_dir_no_err(path) { let resolution_result = self.resolve_package_dir_subpath( path, /* sub path */ ".", @@ -465,7 +482,7 @@ impl NodeResolver { } let index_path = path.join("index.js"); if let Some(path) = probe_extensions( - &self.env, + &self.sys, &index_path, &index_path.to_string_lossy().to_lowercase(), resolution_mode, @@ -669,7 +686,10 @@ impl NodeResolver { return match result { Ok(url) => Ok(url), Err(err) => { - if self.env.is_builtin_node_module(target) { + if self + .is_built_in_node_module_checker + .is_builtin_node_module(target) + { Ok(Url::parse(&format!("node:{}", target)).unwrap()) } else { Err(err) @@ -1351,7 +1371,7 @@ impl NodeResolver { if let Some(main) = maybe_main { let guess = package_json.path.parent().unwrap().join(main).clean(); - if self.env.is_file_sync(&guess) { + if self.sys.fs_is_file_no_err(&guess) { return Ok(url_from_file_path(&guess).unwrap()); } @@ -1380,7 +1400,7 @@ impl NodeResolver { .unwrap() .join(format!("{main}{ending}")) .clean(); - if self.env.is_file_sync(&guess) { + if self.sys.fs_is_file_no_err(&guess) { // TODO(bartlomieju): emitLegacyIndexDeprecation() return Ok(url_from_file_path(&guess).unwrap()); } @@ -1415,7 +1435,7 @@ impl NodeResolver { }; for index_file_name in index_file_names { let guess = directory.join(index_file_name).clean(); - if self.env.is_file_sync(&guess) { + if self.sys.fs_is_file_no_err(&guess) { // TODO(bartlomieju): emitLegacyIndexDeprecation() return Ok(url_from_file_path(&guess).unwrap()); } @@ -1452,9 +1472,7 @@ impl NodeResolver { { // Specifiers in the node_modules directory are canonicalized // so canoncalize then check if it's in the node_modules directory. - let specifier = resolve_specifier_into_node_modules(specifier, &|path| { - self.env.realpath_sync(path) - }); + let specifier = resolve_specifier_into_node_modules(&self.sys, specifier); return Some(specifier); } @@ -1715,16 +1733,15 @@ pub fn parse_npm_pkg_name( /// not be fully resolved at the time deno_graph is analyzing it /// because the node_modules folder might not exist at that time. pub fn resolve_specifier_into_node_modules( + sys: &impl FsCanonicalize, specifier: &Url, - canonicalize: &impl Fn(&Path) -> std::io::Result, ) -> Url { deno_path_util::url_to_file_path(specifier) .ok() // this path might not exist at the time the graph is being created // because the node_modules folder might not yet exist .and_then(|path| { - deno_path_util::canonicalize_path_maybe_not_exists(&path, canonicalize) - .ok() + deno_path_util::fs::canonicalize_path_maybe_not_exists(sys, &path).ok() }) .and_then(|path| deno_path_util::url_from_file_path(&path).ok()) .unwrap_or_else(|| specifier.clone()) diff --git a/resolvers/npm_cache/Cargo.toml b/resolvers/npm_cache/Cargo.toml index a0a106c89b..48d0a32437 100644 --- a/resolvers/npm_cache/Cargo.toml +++ b/resolvers/npm_cache/Cargo.toml @@ -23,7 +23,9 @@ async-trait.workspace = true base64.workspace = true boxed_error.workspace = true deno_cache_dir.workspace = true +deno_error.workspace = true deno_npm.workspace = true +deno_path_util.workspace = true deno_semver.workspace = true deno_unsync = { workspace = true, features = ["tokio"] } faster-hex.workspace = true @@ -36,6 +38,7 @@ percent-encoding.workspace = true rand.workspace = true ring.workspace = true serde_json.workspace = true +sys_traits.workspace = true tar.workspace = true tempfile = "3.4.0" thiserror.workspace = true diff --git a/resolvers/npm_cache/fs_util.rs b/resolvers/npm_cache/fs_util.rs new file mode 100644 index 0000000000..ed123f085c --- /dev/null +++ b/resolvers/npm_cache/fs_util.rs @@ -0,0 +1,99 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use anyhow::Context; +use anyhow::Error as AnyError; +use std::io::ErrorKind; +use std::path::Path; +use std::time::Duration; +use sys_traits::FsCreateDirAll; +use sys_traits::FsDirEntry; +use sys_traits::FsHardLink; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::ThreadSleep; + +/// Hardlinks the files in one directory to another directory. +/// +/// Note: Does not handle symlinks. +pub fn hard_link_dir_recursive< + TSys: FsCreateDirAll + FsHardLink + FsReadDir + FsRemoveFile + ThreadSleep, +>( + sys: &TSys, + from: &Path, + to: &Path, +) -> Result<(), AnyError> { + sys + .fs_create_dir_all(to) + .with_context(|| format!("Creating {}", to.display()))?; + let read_dir = sys + .fs_read_dir(from) + .with_context(|| format!("Reading {}", from.display()))?; + + for entry in read_dir { + let entry = entry?; + let file_type = entry.file_type()?; + let new_from = from.join(entry.file_name()); + let new_to = to.join(entry.file_name()); + + if file_type.is_dir() { + hard_link_dir_recursive(sys, &new_from, &new_to).with_context(|| { + format!("Dir {} to {}", new_from.display(), new_to.display()) + })?; + } else if file_type.is_file() { + // note: chance for race conditions here between attempting to create, + // then removing, then attempting to create. There doesn't seem to be + // a way to hard link with overwriting in Rust, but maybe there is some + // way with platform specific code. The workaround here is to handle + // scenarios where something else might create or remove files. + if let Err(err) = sys.fs_hard_link(&new_from, &new_to) { + if err.kind() == ErrorKind::AlreadyExists { + if let Err(err) = sys.fs_remove_file(&new_to) { + if err.kind() == ErrorKind::NotFound { + // Assume another process/thread created this hard link to the file we are wanting + // to remove then sleep a little bit to let the other process/thread move ahead + // faster to reduce contention. + sys.thread_sleep(Duration::from_millis(10)); + } else { + return Err(err).with_context(|| { + format!( + "Removing file to hard link {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + + // Always attempt to recreate the hardlink. In contention scenarios, the other process + // might have been killed or exited after removing the file, but before creating the hardlink + if let Err(err) = sys.fs_hard_link(&new_from, &new_to) { + // Assume another process/thread created this hard link to the file we are wanting + // to now create then sleep a little bit to let the other process/thread move ahead + // faster to reduce contention. + if err.kind() == ErrorKind::AlreadyExists { + sys.thread_sleep(Duration::from_millis(10)); + } else { + return Err(err).with_context(|| { + format!( + "Hard linking {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + } else { + return Err(err).with_context(|| { + format!( + "Hard linking {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + } + } + + Ok(()) +} diff --git a/resolvers/npm_cache/lib.rs b/resolvers/npm_cache/lib.rs index c16c29aaf2..e681fa71ac 100644 --- a/resolvers/npm_cache/lib.rs +++ b/resolvers/npm_cache/lib.rs @@ -14,19 +14,32 @@ use deno_cache_dir::npm::NpmCacheDir; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::NpmPackageCacheFolderId; +use deno_path_util::fs::atomic_write_file_with_retries; use deno_semver::package::PackageNv; +use deno_semver::StackString; use deno_semver::Version; use http::HeaderName; use http::HeaderValue; use http::StatusCode; use parking_lot::Mutex; +use sys_traits::FsCreateDirAll; +use sys_traits::FsHardLink; +use sys_traits::FsMetadata; +use sys_traits::FsOpen; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::FsRename; +use sys_traits::SystemRandom; +use sys_traits::ThreadSleep; use url::Url; +mod fs_util; mod registry_info; mod remote; mod tarball; mod tarball_extract; +pub use fs_util::hard_link_dir_recursive; pub use registry_info::RegistryInfoProvider; pub use tarball::TarballCache; @@ -54,18 +67,7 @@ impl std::fmt::Display for DownloadError { } #[async_trait::async_trait(?Send)] -pub trait NpmCacheEnv: Send + Sync + 'static { - fn exists(&self, path: &Path) -> bool; - fn hard_link_dir_recursive( - &self, - from: &Path, - to: &Path, - ) -> Result<(), AnyError>; - fn atomic_write_file_with_retries( - &self, - file_path: &Path, - data: &[u8], - ) -> std::io::Result<()>; +pub trait NpmCacheHttpClient: Send + Sync + 'static { async fn download_with_retries_on_any_tokio_runtime( &self, url: Url, @@ -125,27 +127,48 @@ impl NpmCacheSetting { /// Stores a single copy of npm packages in a cache. #[derive(Debug)] -pub struct NpmCache { - env: Arc, +pub struct NpmCache< + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom, +> { cache_dir: Arc, + sys: TSys, cache_setting: NpmCacheSetting, npmrc: Arc, previously_reloaded_packages: Mutex>, } -impl NpmCache { +impl< + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom, + > NpmCache +{ pub fn new( cache_dir: Arc, + sys: TSys, cache_setting: NpmCacheSetting, - env: Arc, npmrc: Arc, ) -> Self { Self { cache_dir, + sys, cache_setting, - env, - previously_reloaded_packages: Default::default(), npmrc, + previously_reloaded_packages: Default::default(), } } @@ -210,9 +233,11 @@ impl NpmCache { // it seems Windows does an "AccessDenied" error when moving a // directory with hard links, so that's why this solution is done with_folder_sync_lock(&folder_id.nv, &package_folder, || { - self - .env - .hard_link_dir_recursive(&original_package_folder, &package_folder) + hard_link_dir_recursive( + &self.sys, + &original_package_folder, + &package_folder, + ) })?; Ok(()) } @@ -260,7 +285,7 @@ impl NpmCache { .and_then(|cache_id| { Some(NpmPackageCacheFolderId { nv: PackageNv { - name: cache_id.name, + name: StackString::from_string(cache_id.name), version: Version::parse_from_npm(&cache_id.version).ok()?, }, copy_index: cache_id.copy_index, @@ -289,9 +314,12 @@ impl NpmCache { ) -> Result<(), AnyError> { let file_cache_path = self.get_registry_package_info_file_cache_path(name); let file_text = serde_json::to_string(&package_info)?; - self - .env - .atomic_write_file_with_retries(&file_cache_path, file_text.as_bytes())?; + atomic_write_file_with_retries( + &self.sys, + &file_cache_path, + file_text.as_bytes(), + 0o644, + )?; Ok(()) } @@ -303,6 +331,7 @@ impl NpmCache { const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock"; +// todo(dsherret): use `sys` here instead of `std::fs`. fn with_folder_sync_lock( package: &PackageNv, output_folder: &Path, diff --git a/resolvers/npm_cache/registry_info.rs b/resolvers/npm_cache/registry_info.rs index 543ddadc5a..57e188200d 100644 --- a/resolvers/npm_cache/registry_info.rs +++ b/resolvers/npm_cache/registry_info.rs @@ -18,16 +18,51 @@ use deno_unsync::sync::MultiRuntimeAsyncValueCreator; use futures::future::LocalBoxFuture; use futures::FutureExt; use parking_lot::Mutex; +use sys_traits::FsCreateDirAll; +use sys_traits::FsHardLink; +use sys_traits::FsMetadata; +use sys_traits::FsOpen; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::FsRename; +use sys_traits::SystemRandom; +use sys_traits::ThreadSleep; +use thiserror::Error; use url::Url; use crate::remote::maybe_auth_header_for_npm_registry; use crate::NpmCache; -use crate::NpmCacheEnv; +use crate::NpmCacheHttpClient; use crate::NpmCacheSetting; type LoadResult = Result>; type LoadFuture = LocalBoxFuture<'static, LoadResult>; +#[derive(Debug, Error)] +#[error(transparent)] +pub struct AnyhowJsError(pub AnyError); + +impl deno_error::JsErrorClass for AnyhowJsError { + fn get_class(&self) -> &'static str { + "generic" + } + + fn get_message(&self) -> std::borrow::Cow<'static, str> { + self.0.to_string().into() + } + + fn get_additional_properties( + &self, + ) -> Option< + Vec<( + std::borrow::Cow<'static, str>, + std::borrow::Cow<'static, str>, + )>, + > { + None + } +} + #[derive(Debug, Clone)] enum FutureResult { PackageNotExists, @@ -96,25 +131,54 @@ impl MemoryCache { /// /// This is shared amongst all the workers. #[derive(Debug)] -pub struct RegistryInfoProvider { +pub struct RegistryInfoProvider< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, +> { // todo(#27198): remove this - cache: Arc>, - env: Arc, + cache: Arc>, + http_client: Arc, npmrc: Arc, force_reload_flag: AtomicFlag, memory_cache: Mutex, previously_loaded_packages: Mutex>, } -impl RegistryInfoProvider { +impl< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, + > RegistryInfoProvider +{ pub fn new( - cache: Arc>, - env: Arc, + cache: Arc>, + http_client: Arc, npmrc: Arc, ) -> Self { Self { cache, - env, + http_client, npmrc, force_reload_flag: AtomicFlag::lowered(), memory_cache: Default::default(), @@ -144,7 +208,9 @@ impl RegistryInfoProvider { } } - pub fn as_npm_registry_api(self: &Arc) -> NpmRegistryApiAdapter { + pub fn as_npm_registry_api( + self: &Arc, + ) -> NpmRegistryApiAdapter { NpmRegistryApiAdapter(self.clone()) } @@ -157,9 +223,9 @@ impl RegistryInfoProvider { Ok(None) => Err(NpmRegistryPackageInfoLoadError::PackageNotExists { package_name: name.to_string(), }), - Err(err) => { - Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new(err))) - } + Err(err) => Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new( + AnyhowJsError(err), + ))), } } @@ -315,7 +381,7 @@ impl RegistryInfoProvider { downloader.previously_loaded_packages.lock().insert(name.to_string()); let maybe_bytes = downloader - .env + .http_client .download_with_retries_on_any_tokio_runtime( package_url, maybe_auth_header, @@ -352,12 +418,39 @@ impl RegistryInfoProvider { } } -pub struct NpmRegistryApiAdapter( - Arc>, -); +pub struct NpmRegistryApiAdapter< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, +>(Arc>); #[async_trait(?Send)] -impl NpmRegistryApi for NpmRegistryApiAdapter { +impl< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsReadDir + + FsRemoveFile + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, + > NpmRegistryApi for NpmRegistryApiAdapter +{ async fn package_info( &self, name: &str, diff --git a/resolvers/npm_cache/tarball.rs b/resolvers/npm_cache/tarball.rs index 5c8e460fd6..3a7e9df8a9 100644 --- a/resolvers/npm_cache/tarball.rs +++ b/resolvers/npm_cache/tarball.rs @@ -15,13 +15,22 @@ use futures::future::LocalBoxFuture; use futures::FutureExt; use http::StatusCode; use parking_lot::Mutex; +use sys_traits::FsCreateDirAll; +use sys_traits::FsHardLink; +use sys_traits::FsMetadata; +use sys_traits::FsOpen; +use sys_traits::FsReadDir; +use sys_traits::FsRemoveFile; +use sys_traits::FsRename; +use sys_traits::SystemRandom; +use sys_traits::ThreadSleep; use url::Url; use crate::remote::maybe_auth_header_for_npm_registry; use crate::tarball_extract::verify_and_extract_tarball; use crate::tarball_extract::TarballExtractionMode; use crate::NpmCache; -use crate::NpmCacheEnv; +use crate::NpmCacheHttpClient; use crate::NpmCacheSetting; type LoadResult = Result<(), Arc>; @@ -42,22 +51,54 @@ enum MemoryCacheItem { /// /// This is shared amongst all the workers. #[derive(Debug)] -pub struct TarballCache { - cache: Arc>, - env: Arc, +pub struct TarballCache< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsRemoveFile + + FsReadDir + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, +> { + cache: Arc>, + http_client: Arc, + sys: TSys, npmrc: Arc, memory_cache: Mutex>, } -impl TarballCache { +impl< + THttpClient: NpmCacheHttpClient, + TSys: FsCreateDirAll + + FsHardLink + + FsMetadata + + FsOpen + + FsRemoveFile + + FsReadDir + + FsRename + + ThreadSleep + + SystemRandom + + Send + + Sync + + 'static, + > TarballCache +{ pub fn new( - cache: Arc>, - env: Arc, + cache: Arc>, + http_client: Arc, + sys: TSys, npmrc: Arc, ) -> Self { Self { cache, - env, + http_client, + sys, npmrc, memory_cache: Default::default(), } @@ -131,7 +172,7 @@ impl TarballCache { let package_folder = tarball_cache.cache.package_folder_for_nv_and_url(&package_nv, registry_url); let should_use_cache = tarball_cache.cache.should_use_cache_for_package(&package_nv); - let package_folder_exists = tarball_cache.env.exists(&package_folder); + let package_folder_exists = tarball_cache.sys.fs_exists_no_err(&package_folder); if should_use_cache && package_folder_exists { return Ok(()); } else if tarball_cache.cache.cache_setting() == &NpmCacheSetting::Only { @@ -156,7 +197,7 @@ impl TarballCache { tarball_cache.npmrc.tarball_config(&tarball_uri); let maybe_auth_header = maybe_registry_config.and_then(|c| maybe_auth_header_for_npm_registry(c).ok()?); - let result = tarball_cache.env + let result = tarball_cache.http_client .download_with_retries_on_any_tokio_runtime(tarball_uri, maybe_auth_header) .await; let maybe_bytes = match result { diff --git a/resolvers/npm_cache/tarball_extract.rs b/resolvers/npm_cache/tarball_extract.rs index c4c614b35f..affe93eaa4 100644 --- a/resolvers/npm_cache/tarball_extract.rs +++ b/resolvers/npm_cache/tarball_extract.rs @@ -236,7 +236,7 @@ mod test { #[test] pub fn test_verify_tarball() { let package = PackageNv { - name: "package".to_string(), + name: "package".into(), version: Version::parse_from_npm("1.0.0").unwrap(), }; let actual_checksum = diff --git a/runtime/fmt_errors.rs b/runtime/fmt_errors.rs index 6f120b5d46..3c60c3a3d7 100644 --- a/runtime/fmt_errors.rs +++ b/runtime/fmt_errors.rs @@ -422,6 +422,20 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec { "Run again with `--unstable-webgpu` flag to enable this API.", ), ]; + } else if msg.contains("listenQuic is not a function") { + return vec![ + FixSuggestion::info("listenQuic is an unstable API."), + FixSuggestion::hint( + "Run again with `--unstable-net` flag to enable this API.", + ), + ]; + } else if msg.contains("connectQuic is not a function") { + return vec![ + FixSuggestion::info("connectQuic is an unstable API."), + FixSuggestion::hint( + "Run again with `--unstable-net` flag to enable this API.", + ), + ]; // Try to capture errors like: // ``` // Uncaught Error: Cannot find module '../build/Release/canvas.node' diff --git a/runtime/js/90_deno_ns.js b/runtime/js/90_deno_ns.js index a510ee33c4..5511649279 100644 --- a/runtime/js/90_deno_ns.js +++ b/runtime/js/90_deno_ns.js @@ -13,6 +13,7 @@ import * as console from "ext:deno_console/01_console.js"; import * as ffi from "ext:deno_ffi/00_ffi.js"; import * as net from "ext:deno_net/01_net.js"; import * as tls from "ext:deno_net/02_tls.js"; +import * as quic from "ext:deno_net/03_quic.js"; import * as serve from "ext:deno_http/00_serve.ts"; import * as http from "ext:deno_http/01_http.js"; import * as websocket from "ext:deno_http/02_websocket.ts"; @@ -174,6 +175,15 @@ denoNsUnstableById[unstableIds.net] = { op_net_listen_udp, op_net_listen_unixpacket, ), + + connectQuic: quic.connectQuic, + listenQuic: quic.listenQuic, + QuicBidirectionalStream: quic.QuicBidirectionalStream, + QuicConn: quic.QuicConn, + QuicListener: quic.QuicListener, + QuicReceiveStream: quic.QuicReceiveStream, + QuicSendStream: quic.QuicSendStream, + QuicIncoming: quic.QuicIncoming, }; // denoNsUnstableById[unstableIds.unsafeProto] = { __proto__: null } diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 19432745d4..a11444bc36 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -170,12 +170,14 @@ function postMessage(message, transferOrOptions = { __proto__: null }) { let isClosing = false; let globalDispatchEvent; +let closeOnIdle; function hasMessageEventListener() { // the function name is kind of a misnomer, but we want to behave // as if we have message event listeners if a node message port is explicitly // refed (and the inverse as well) - return event.listenerCount(globalThis, "message") > 0 || + return (event.listenerCount(globalThis, "message") > 0 && + !globalThis[messagePort.unrefParentPort]) || messagePort.refedMessagePortsCount > 0; } @@ -188,7 +190,10 @@ async function pollForMessages() { } while (!isClosing) { const recvMessage = op_worker_recv_message(); - if (globalThis[messagePort.unrefPollForMessages] === true) { + // In a Node.js worker, unref() the op promise to prevent it from + // keeping the event loop alive. This avoids the need to explicitly + // call self.close() or worker.terminate(). + if (closeOnIdle) { core.unrefOpPromise(recvMessage); } const data = await recvMessage; @@ -521,6 +526,9 @@ const NOT_IMPORTED_OPS = [ // Used in jupyter API "op_base64_encode", + // Used in the lint API + "op_lint_create_serialized_ast", + // Related to `Deno.test()` API "op_test_event_step_result_failed", "op_test_event_step_result_ignored", @@ -915,6 +923,7 @@ function bootstrapWorkerRuntime( 6: argv0, 7: nodeDebug, 13: otelConfig, + 14: closeOnIdle_, } = runtimeOptions; performance.setTimeOrigin(); @@ -967,6 +976,7 @@ function bootstrapWorkerRuntime( globalThis.pollForMessages = pollForMessages; globalThis.hasMessageEventListener = hasMessageEventListener; + closeOnIdle = closeOnIdle_; for (let i = 0; i <= unstableFeatures.length; i++) { const id = unstableFeatures[i]; diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index bbd0301db4..1c5fb36f93 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -183,7 +183,7 @@ impl PermissionState { PermissionState::Prompt if prompt => { let msg = { let info = info(); - StringBuilder::build(|builder| { + StringBuilder::::build(|builder| { builder.append(name); builder.append(" access"); if let Some(info) = &info { @@ -498,7 +498,7 @@ impl UnaryPermission { } let maybe_formatted_display_name = desc.map(|d| format_display_name(d.display_name())); - let message = StringBuilder::build(|builder| { + let message = StringBuilder::::build(|builder| { builder.append(TQuery::flag_name()); builder.append(" access"); if let Some(display_name) = &maybe_formatted_display_name { diff --git a/runtime/sys_info.rs b/runtime/sys_info.rs index f99cfc99f9..99bfcfe103 100644 --- a/runtime/sys_info.rs +++ b/runtime/sys_info.rs @@ -295,11 +295,9 @@ pub fn mem_info() -> Option { // TODO(@littledivy): Put this in a once_cell let page_size = libc::sysconf(libc::_SC_PAGESIZE) as u64; mem_info.available = - (stat.free_count as u64 + stat.inactive_count as u64) * page_size - / 1024; + (stat.free_count as u64 + stat.inactive_count as u64) * page_size; mem_info.free = - (stat.free_count as u64 - stat.speculative_count as u64) * page_size - / 1024; + (stat.free_count as u64 - stat.speculative_count as u64) * page_size; } } } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index e3a69b39c0..faf4f3fc52 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -58,7 +58,6 @@ use std::task::Poll; use crate::inspector_server::InspectorServer; use crate::ops; use crate::ops::process::NpmProcessStateProviderRc; -use crate::ops::worker_host::WorkersTable; use crate::shared::maybe_transpile_source; use crate::shared::runtime; use crate::tokio_util::create_and_run_current_thread; @@ -385,7 +384,6 @@ pub struct WebWorker { pub js_runtime: JsRuntime, pub name: String, close_on_idle: bool, - has_executed_main_module: bool, internal_handle: WebWorkerInternalHandle, pub worker_type: WebWorkerType, pub main_module: ModuleSpecifier, @@ -658,7 +656,6 @@ impl WebWorker { has_message_event_listener_fn: None, bootstrap_fn_global: Some(bootstrap_fn_global), close_on_idle: options.close_on_idle, - has_executed_main_module: false, maybe_worker_metadata: options.maybe_worker_metadata, }, external_handle, @@ -799,7 +796,6 @@ impl WebWorker { maybe_result = &mut receiver => { debug!("received worker module evaluate {:#?}", maybe_result); - self.has_executed_main_module = true; maybe_result } @@ -837,6 +833,9 @@ impl WebWorker { } if self.close_on_idle { + if self.has_message_event_listener() { + return Poll::Pending; + } return Poll::Ready(Ok(())); } @@ -851,22 +850,7 @@ impl WebWorker { Poll::Ready(Ok(())) } } - Poll::Pending => { - // This is special code path for workers created from `node:worker_threads` - // module that have different semantics than Web workers. - // We want the worker thread to terminate automatically if we've done executing - // Top-Level await, there are no child workers spawned by that workers - // and there's no "message" event listener. - if self.close_on_idle - && self.has_executed_main_module - && !self.has_child_workers() - && !self.has_message_event_listener() - { - Poll::Ready(Ok(())) - } else { - Poll::Pending - } - } + Poll::Pending => Poll::Pending, } } @@ -904,15 +888,6 @@ impl WebWorker { None => false, } } - - fn has_child_workers(&mut self) -> bool { - !self - .js_runtime - .op_state() - .borrow() - .borrow::() - .is_empty() - } } fn print_worker_error( diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 2020c2bc8d..8364fe0d2b 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -120,6 +120,7 @@ pub struct BootstrapOptions { pub serve_port: Option, pub serve_host: Option, pub otel_config: OtelConfig, + pub close_on_idle: bool, } impl Default for BootstrapOptions { @@ -155,6 +156,7 @@ impl Default for BootstrapOptions { serve_port: Default::default(), serve_host: Default::default(), otel_config: Default::default(), + close_on_idle: false, } } } @@ -198,6 +200,8 @@ struct BootstrapV8<'a>( Option, // OTEL config Box<[u8]>, + // close on idle + bool, ); impl BootstrapOptions { @@ -225,6 +229,7 @@ impl BootstrapOptions { serve_is_main, serve_worker_count, self.otel_config.as_v8(), + self.close_on_idle, ); bootstrap.serialize(ser).unwrap() diff --git a/tests/Cargo.toml b/tests/Cargo.toml index fa51d7b77b..1300066c64 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -60,6 +60,7 @@ pretty_assertions.workspace = true regex.workspace = true reqwest.workspace = true serde.workspace = true +sys_traits = { workspace = true, features = ["real", "getrandom", "libc", "winapi"] } test_util.workspace = true tokio.workspace = true tower-lsp.workspace = true diff --git a/tests/integration/check_tests.rs b/tests/integration/check_tests.rs index b98d719fca..a1fdf83403 100644 --- a/tests/integration/check_tests.rs +++ b/tests/integration/check_tests.rs @@ -218,7 +218,7 @@ fn npm_module_check_then_error() { "npm:@denotest/breaking-change-between-versions", ) .unwrap(), - "1.0.0".to_string(), + "1.0.0".into(), ); lockfile_path.write(lockfile.as_json_string()); temp_dir.write( @@ -236,7 +236,7 @@ fn npm_module_check_then_error() { "npm:@denotest/breaking-change-between-versions", ) .unwrap(), - "2.0.0".to_string(), + "2.0.0".into(), ); lockfile_path.write(lockfile.as_json_string()); diff --git a/tests/integration/compile_tests.rs b/tests/integration/compile_tests.rs index a69e873ab4..a715233933 100644 --- a/tests/integration/compile_tests.rs +++ b/tests/integration/compile_tests.rs @@ -2,7 +2,6 @@ use deno_core::serde_json; use test_util as util; -use util::assert_contains; use util::assert_not_contains; use util::testdata_path; use util::TestContext; @@ -90,78 +89,6 @@ fn standalone_args() { .assert_exit_code(0); } -#[test] -fn standalone_error() { - let context = TestContextBuilder::new().build(); - let dir = context.temp_dir(); - let exe = if cfg!(windows) { - dir.path().join("error.exe") - } else { - dir.path().join("error") - }; - context - .new_command() - .args_vec([ - "compile", - "--output", - &exe.to_string_lossy(), - "./compile/standalone_error.ts", - ]) - .run() - .skip_output_check() - .assert_exit_code(0); - - let output = context.new_command().name(&exe).split_output().run(); - output.assert_exit_code(1); - output.assert_stdout_matches_text(""); - let stderr = output.stderr(); - // On Windows, we cannot assert the file path (because '\'). - // Instead we just check for relevant output. - assert_contains!(stderr, "error: Uncaught (in promise) Error: boom!"); - assert_contains!(stderr, "\n at boom (file://"); - assert_contains!(stderr, "standalone_error.ts:2:9"); - assert_contains!(stderr, "at foo (file://"); - assert_contains!(stderr, "standalone_error.ts:5:3"); - assert_contains!(stderr, "standalone_error.ts:7:1"); -} - -#[test] -fn standalone_error_module_with_imports() { - let context = TestContextBuilder::new().build(); - let dir = context.temp_dir(); - let exe = if cfg!(windows) { - dir.path().join("error.exe") - } else { - dir.path().join("error") - }; - context - .new_command() - .args_vec([ - "compile", - "--output", - &exe.to_string_lossy(), - "./compile/standalone_error_module_with_imports_1.ts", - ]) - .run() - .skip_output_check() - .assert_exit_code(0); - - let output = context - .new_command() - .name(&exe) - .env("NO_COLOR", "1") - .split_output() - .run(); - output.assert_stdout_matches_text("hello\n"); - let stderr = output.stderr(); - // On Windows, we cannot assert the file path (because '\'). - // Instead we just check for relevant output. - assert_contains!(stderr, "error: Uncaught (in promise) Error: boom!"); - assert_contains!(stderr, "\n at file://"); - assert_contains!(stderr, "standalone_error_module_with_imports_2.ts:2:7"); - output.assert_exit_code(1); -} - #[test] fn standalone_load_datauri() { let context = TestContextBuilder::new().build(); diff --git a/tests/integration/js_unit_tests.rs b/tests/integration/js_unit_tests.rs index 577ca043ca..afb97a3458 100644 --- a/tests/integration/js_unit_tests.rs +++ b/tests/integration/js_unit_tests.rs @@ -52,6 +52,8 @@ util::unit_test_factory!( kv_queue_test, kv_queue_undelivered_test, link_test, + lint_selectors_test, + lint_plugin_test, make_temp_test, message_channel_test, mkdir_test, @@ -66,6 +68,7 @@ util::unit_test_factory!( process_test, progressevent_test, promise_hooks_test, + quic_test, read_dir_test, read_file_test, read_link_test, diff --git a/tests/integration/jsr_tests.rs b/tests/integration/jsr_tests.rs index c4812e6bfb..d3fa5cd98f 100644 --- a/tests/integration/jsr_tests.rs +++ b/tests/integration/jsr_tests.rs @@ -159,7 +159,7 @@ console.log(version);"#, .get_mut( &JsrDepPackageReq::from_str("jsr:@denotest/no-module-graph@0.1").unwrap(), ) - .unwrap() = "0.1.0".to_string(); + .unwrap() = "0.1.0".into(); lockfile_path.write(lockfile.as_json_string()); test_context @@ -191,8 +191,8 @@ fn reload_info_not_found_cache_but_exists_remote() { Url::parse(&format!("http://127.0.0.1:4250/{}/meta.json", package)) .unwrap(); let cache = deno_cache_dir::GlobalHttpCache::new( + sys_traits::impls::RealSys, deno_dir.path().join("remote").to_path_buf(), - deno_cache_dir::TestRealDenoCacheEnv, ); let entry = cache .get(&cache.cache_item_key(&specifier).unwrap(), None) diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 56c060d958..825cef6247 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -6066,6 +6066,119 @@ fn lsp_jsr_code_action_missing_declaration() { ); } +#[test] +fn lsp_jsr_code_action_move_to_new_file() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .build(); + let temp_dir = context.temp_dir(); + let file = source_file( + temp_dir.path().join("file.ts"), + r#" + import { someFunction } from "jsr:@denotest/types-file"; + export const someValue = someFunction(); + "#, + ); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.write_request( + "workspace/executeCommand", + json!({ + "command": "deno.cache", + "arguments": [[], file.url()], + }), + ); + client.did_open_file(&file); + let list = client + .write_request_with_res_as::>( + "textDocument/codeAction", + json!({ + "textDocument": { "uri": file.url() }, + "range": { + "start": { "line": 2, "character": 19 }, + "end": { "line": 2, "character": 28 }, + }, + "context": { "diagnostics": [] }, + }), + ) + .unwrap(); + let action = list + .iter() + .find_map(|c| match c { + lsp::CodeActionOrCommand::CodeAction(a) + if &a.title == "Move to a new file" => + { + Some(a) + } + _ => None, + }) + .unwrap(); + let res = client.write_request("codeAction/resolve", json!(action)); + assert_eq!( + res, + json!({ + "title": "Move to a new file", + "kind": "refactor.move.newFile", + "edit": { + "documentChanges": [ + { + "textDocument": { "uri": file.url(), "version": 1 }, + "edits": [ + { + "range": { + "start": { "line": 1, "character": 6 }, + "end": { "line": 2, "character": 0 }, + }, + "newText": "", + }, + { + "range": { + "start": { "line": 2, "character": 0 }, + "end": { "line": 3, "character": 4 }, + }, + "newText": "", + }, + ], + }, + { + "kind": "create", + "uri": file.url().join("someValue.ts").unwrap(), + "options": { + "ignoreIfExists": true, + }, + }, + { + "textDocument": { + "uri": file.url().join("someValue.ts").unwrap(), + "version": null, + }, + "edits": [ + { + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 0 }, + }, + "newText": "import { someFunction } from \"jsr:@denotest/types-file\";\n\nexport const someValue = someFunction();\n", + }, + ], + }, + ], + }, + "isPreferred": false, + "data": { + "specifier": file.url(), + "range": { + "start": { "line": 2, "character": 19 }, + "end": { "line": 2, "character": 28 }, + }, + "refactorName": "Move to a new file", + "actionName": "Move to a new file", + }, + }), + ); +} + #[test] fn lsp_code_actions_deno_cache_npm() { let context = TestContextBuilder::new().use_temp_cwd().build(); diff --git a/tests/specs/cli/otel_basic/__test__.jsonc b/tests/specs/cli/otel_basic/__test__.jsonc index e7f8d17c7a..f9826671e8 100644 --- a/tests/specs/cli/otel_basic/__test__.jsonc +++ b/tests/specs/cli/otel_basic/__test__.jsonc @@ -1,28 +1,27 @@ { - "steps": [ - { + "tests": { + "basic": { "args": "run -A main.ts basic.ts", "output": "basic.out" }, - { + "natural_exit": { "args": "run -A main.ts natural_exit.ts", "output": "natural_exit.out" }, - { + "deno_dot_exit": { "args": "run -A main.ts deno_dot_exit.ts", "output": "deno_dot_exit.out" }, - { + "uncaught": { "args": "run -A main.ts uncaught.ts", "output": "uncaught.out" }, - { + "metric": { + "envs": { + "OTEL_METRIC_EXPORT_INTERVAL": "1000" + }, "args": "run -A main.ts metric.ts", "output": "metric.out" - }, - { - "args": "run -A --unstable-otel context.ts", - "output": "" } - ] + } } diff --git a/tests/specs/cli/otel_basic/metric.out b/tests/specs/cli/otel_basic/metric.out index 26ed4a23c6..dd53734230 100644 --- a/tests/specs/cli/otel_basic/metric.out +++ b/tests/specs/cli/otel_basic/metric.out @@ -56,6 +56,31 @@ "isMonotonic": false } }, + { + "name": "gauge", + "description": "Example of a Gauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } + }, { "name": "histogram", "description": "Example of a Histogram", @@ -119,6 +144,265 @@ ], "aggregationTemporality": 2 } + }, + { + "name": "observable_counter", + "description": "Example of a ObservableCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "observable_up_down_counter", + "description": "Example of a ObservableUpDownCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": false + } + }, + { + "name": "observable_gauge", + "description": "Example of a ObservableGauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } + }, + { + "name": "counter", + "description": "Example of a Counter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "up_down_counter", + "description": "Example of a UpDownCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": -1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": false + } + }, + { + "name": "gauge", + "description": "Example of a Gauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } + }, + { + "name": "histogram", + "description": "Example of a Histogram", + "unit": "", + "metadata": [], + "histogram": { + "dataPoints": [ + { + "attributes": [ + { + "key": "attribute", + "value": { + "doubleValue": 1 + } + } + ], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "count": 1, + "sum": 1, + "bucketCounts": [ + 0, + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "explicitBounds": [ + 0, + 5, + 10, + 25, + 50, + 75, + 100, + 250, + 500, + 750, + 1000, + 2500, + 5000, + 7500, + 10000 + ], + "exemplars": [], + "flags": 0, + "min": 1, + "max": 1 + } + ], + "aggregationTemporality": 2 + } + }, + { + "name": "observable_counter", + "description": "Example of a ObservableCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "observable_up_down_counter", + "description": "Example of a ObservableUpDownCounter", + "unit": "", + "metadata": [], + "sum": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ], + "aggregationTemporality": 2, + "isMonotonic": false + } + }, + { + "name": "observable_gauge", + "description": "Example of a ObservableGauge", + "unit": "", + "metadata": [], + "gauge": { + "dataPoints": [ + { + "attributes": [], + "startTimeUnixNano": "[WILDCARD]", + "timeUnixNano": "[WILDCARD]", + "exemplars": [], + "flags": 0, + "asDouble": 1 + } + ] + } } ] } diff --git a/tests/specs/cli/otel_basic/metric.ts b/tests/specs/cli/otel_basic/metric.ts index 7d332f0432..2b472a6fb8 100644 --- a/tests/specs/cli/otel_basic/metric.ts +++ b/tests/specs/cli/otel_basic/metric.ts @@ -1,18 +1,8 @@ -import { - MeterProvider, - PeriodicExportingMetricReader, -} from "npm:@opentelemetry/sdk-metrics@1.28.0"; +import { metrics } from "npm:@opentelemetry/api@1"; -const meterProvider = new MeterProvider(); +metrics.setGlobalMeterProvider(new Deno.telemetry.MeterProvider()); -meterProvider.addMetricReader( - new PeriodicExportingMetricReader({ - exporter: new Deno.telemetry.MetricExporter(), - exportIntervalMillis: 100, - }), -); - -const meter = meterProvider.getMeter("m"); +const meter = metrics.getMeter("m"); const counter = meter.createCounter("counter", { description: "Example of a Counter", @@ -22,13 +12,82 @@ const upDownCounter = meter.createUpDownCounter("up_down_counter", { description: "Example of a UpDownCounter", }); +const gauge = meter.createGauge("gauge", { + description: "Example of a Gauge", +}); + const histogram = meter.createHistogram("histogram", { description: "Example of a Histogram", }); +const observableCounterPromise = Promise.withResolvers(); +const observableCounter = meter.createObservableCounter("observable_counter", { + description: "Example of a ObservableCounter", +}); +observableCounter.addCallback((res) => { + res.observe(1); + observableCounterPromise.resolve(); +}); + +const observableUpDownCounterPromise = Promise.withResolvers(); +const observableUpDownCounter = meter + .createObservableUpDownCounter("observable_up_down_counter", { + description: "Example of a ObservableUpDownCounter", + }); +observableUpDownCounter.addCallback((res) => { + res.observe(1); + observableUpDownCounterPromise.resolve(); +}); + +const observableGaugePromise = Promise.withResolvers(); +const observableGauge = meter.createObservableGauge("observable_gauge", { + description: "Example of a ObservableGauge", +}); +observableGauge.addCallback((res) => { + res.observe(1); + observableGaugePromise.resolve(); +}); + +const observableCounterBatch = meter.createObservableCounter( + "observable_counter_batch", + { description: "Example of a ObservableCounter, written in batch" }, +); +const observableUpDownCounterBatch = meter.createObservableUpDownCounter( + "observable_up_down_counter_batch", + { description: "Example of a ObservableUpDownCounter, written in batch" }, +); +const observableGaugeBatch = meter.createObservableGauge( + "observable_gauge_batch", + { + description: "Example of a ObservableGauge, written in batch", + }, +); + +const observableBatchPromise = Promise.withResolvers(); +meter.addBatchObservableCallback((observer) => { + observer.observe(observableCounter, 2); + observer.observe(observableUpDownCounter, 2); + observer.observe(observableGauge, 2); + observableBatchPromise.resolve(); +}, [ + observableCounterBatch, + observableUpDownCounterBatch, + observableGaugeBatch, +]); + const attributes = { attribute: 1 }; counter.add(1, attributes); upDownCounter.add(-1, attributes); +gauge.record(1, attributes); histogram.record(1, attributes); -await meterProvider.forceFlush(); +const timer = setTimeout(() => {}, 100000); + +await Promise.all([ + observableCounterPromise.promise, + observableUpDownCounterPromise.promise, + observableGaugePromise.promise, + observableBatchPromise.promise, +]); + +clearTimeout(timer); diff --git a/tests/specs/compile/code_cache/__test__.jsonc b/tests/specs/compile/code_cache/__test__.jsonc index 72353e27da..f1c3461adc 100644 --- a/tests/specs/compile/code_cache/__test__.jsonc +++ b/tests/specs/compile/code_cache/__test__.jsonc @@ -1,6 +1,9 @@ { "tempDir": true, "steps": [{ + "args": "run -A cleanup.ts", + "output": "[WILDCARD]" + }, { "if": "unix", "args": "compile --output using_code_cache --log-level=debug main.ts", "output": "[WILDCARD]" diff --git a/tests/specs/compile/code_cache/cleanup.ts b/tests/specs/compile/code_cache/cleanup.ts new file mode 100644 index 0000000000..d9e7c805f8 --- /dev/null +++ b/tests/specs/compile/code_cache/cleanup.ts @@ -0,0 +1,11 @@ +import { tmpdir } from "node:os"; + +// cleanup the code cache file from a previous run +try { + if (Deno.build.os === "windows") { + Deno.removeSync(tmpdir() + "\\deno-compile-using_code_cache.exe.cache"); + } else { + Deno.removeSync(tmpdir() + "\\deno-compile-using_code_cache.cache"); + } +} catch { +} diff --git a/tests/specs/compile/determinism/__test__.jsonc b/tests/specs/compile/determinism/__test__.jsonc index 97045744f1..b84a1fdf18 100644 --- a/tests/specs/compile/determinism/__test__.jsonc +++ b/tests/specs/compile/determinism/__test__.jsonc @@ -1,28 +1,31 @@ { "tempDir": true, "steps": [{ - "if": "unix", - "args": "compile --output main1 main.ts", + "args": "run -A setup.ts", "output": "[WILDCARD]" }, { "if": "unix", - "args": "compile --output main2 main.ts", + "args": "compile --no-config --output a/main a/main.ts", "output": "[WILDCARD]" }, { "if": "unix", - "args": "run --allow-read=. assert_equal.ts main1 main2", + "args": "compile --no-config --output b/main b/main.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "args": "run --allow-read=. assert_equal.ts a/main b/main", "output": "Same\n" }, { "if": "windows", - "args": "compile --output main1.exe main.ts", + "args": "compile --no-config --output a/main.exe a/main.ts", "output": "[WILDCARD]" }, { "if": "windows", - "args": "compile --output main2.exe main.ts", + "args": "compile --no-config --output b/main.exe b/main.ts", "output": "[WILDCARD]" }, { "if": "windows", - "args": "run --allow-read=. assert_equal.ts main1.exe main2.exe", + "args": "run --allow-read=. assert_equal.ts a/main.exe b/main.exe", "output": "Same\n" }] } diff --git a/tests/specs/compile/determinism/setup.ts b/tests/specs/compile/determinism/setup.ts new file mode 100644 index 0000000000..8bb5753079 --- /dev/null +++ b/tests/specs/compile/determinism/setup.ts @@ -0,0 +1,10 @@ +// for setup, we create two directories with the same file in each +// and then when compiling we ensure this directory name has no +// effect on the output +makeCopyDir("a"); +makeCopyDir("b"); + +function makeCopyDir(dirName) { + Deno.mkdirSync(dirName); + Deno.copyFileSync("main.ts", `${dirName}/main.ts`); +} diff --git a/tests/specs/compile/error/local/__test__.jsonc b/tests/specs/compile/error/local/__test__.jsonc new file mode 100644 index 0000000000..8d6a015a51 --- /dev/null +++ b/tests/specs/compile/error/local/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile --output main standalone_error.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 1 + }, { + "if": "windows", + "args": "compile --output main.exe standalone_error.ts", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 1 + }] +} diff --git a/tests/specs/compile/error/local/output.out b/tests/specs/compile/error/local/output.out new file mode 100644 index 0000000000..b346734ae6 --- /dev/null +++ b/tests/specs/compile/error/local/output.out @@ -0,0 +1,6 @@ +error: Uncaught (in promise) Error: boom! + throw new Error("boom!"); + ^ + at boom (file:///[WILDLINE]standalone_error.ts:2:9) + at foo (file:///[WILDLINE]standalone_error.ts:6:3) + at file:///[WILDLINE]standalone_error.ts:9:1 diff --git a/tests/testdata/compile/standalone_error.ts b/tests/specs/compile/error/local/standalone_error.ts similarity index 100% rename from tests/testdata/compile/standalone_error.ts rename to tests/specs/compile/error/local/standalone_error.ts diff --git a/tests/specs/compile/error/remote/__test__.jsonc b/tests/specs/compile/error/remote/__test__.jsonc new file mode 100644 index 0000000000..9ad9091ec6 --- /dev/null +++ b/tests/specs/compile/error/remote/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile -A --output main main.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 1 + }, { + "if": "windows", + "args": "compile -A --output main.exe main.ts", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 1 + }] +} diff --git a/tests/specs/compile/error/remote/main.ts b/tests/specs/compile/error/remote/main.ts new file mode 100644 index 0000000000..7a27276dd8 --- /dev/null +++ b/tests/specs/compile/error/remote/main.ts @@ -0,0 +1 @@ +import "http://localhost:4545/compile/standalone_error_module_with_imports_1.ts"; diff --git a/tests/specs/compile/error/remote/output.out b/tests/specs/compile/error/remote/output.out new file mode 100644 index 0000000000..3e23694c16 --- /dev/null +++ b/tests/specs/compile/error/remote/output.out @@ -0,0 +1,5 @@ +hello +error: Uncaught (in promise) Error: boom! +throw new Error(value); + ^ + at http://localhost:4545/compile/standalone_error_module_with_imports_2.ts:7:7 diff --git a/tests/specs/node/cjs_key_escaped_whitespace/__test__.jsonc b/tests/specs/node/cjs_key_escaped_whitespace/__test__.jsonc new file mode 100644 index 0000000000..ebaae5bfd6 --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -A main.js", + "output": "output.out" +} diff --git a/tests/specs/node/cjs_key_escaped_whitespace/main.js b/tests/specs/node/cjs_key_escaped_whitespace/main.js new file mode 100644 index 0000000000..9d4f2ee26c --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/main.js @@ -0,0 +1,2 @@ +const bang = await import("./module.cjs"); +console.log("imported:", bang); diff --git a/tests/specs/node/cjs_key_escaped_whitespace/module.cjs b/tests/specs/node/cjs_key_escaped_whitespace/module.cjs new file mode 100644 index 0000000000..5accc6196a --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/module.cjs @@ -0,0 +1,6 @@ +module.exports = { + "\nx": "test", + "\ty": "test", + "\rz": "test", + '"a': "test", +}; diff --git a/tests/specs/node/cjs_key_escaped_whitespace/output.out b/tests/specs/node/cjs_key_escaped_whitespace/output.out new file mode 100644 index 0000000000..49e92abdec --- /dev/null +++ b/tests/specs/node/cjs_key_escaped_whitespace/output.out @@ -0,0 +1,7 @@ +imported: [Module: null prototype] { + "\ty": "test", + "\nx": "test", + "\rz": "test", + '"a': "test", + default: { "\nx": "test", "\ty": "test", "\rz": "test", '"a': "test" } +} diff --git a/tests/specs/permission/allow_import_worker/denied.out b/tests/specs/permission/allow_import_worker/denied.out index 6e4dcaee09..af44ae21ee 100644 --- a/tests/specs/permission/allow_import_worker/denied.out +++ b/tests/specs/permission/allow_import_worker/denied.out @@ -3,5 +3,4 @@ await import(specifier); ^ at async file:///[WILDLINE] error: Uncaught (in promise) Error: Unhandled error in child worker. - at [WILDLINE] - at [WILDLINE] \ No newline at end of file + at [WILDCARD] \ No newline at end of file diff --git a/tests/specs/task/dependencies/__test__.jsonc b/tests/specs/task/dependencies/__test__.jsonc index 84c98f11a4..c9032153b3 100644 --- a/tests/specs/task/dependencies/__test__.jsonc +++ b/tests/specs/task/dependencies/__test__.jsonc @@ -61,6 +61,18 @@ "cwd": "arg_task_with_deps", "args": "task a a", "output": "./arg_task_with_deps.out" + }, + "no_command": { + "cwd": "no_command", + "args": "task a", + "output": "./no_command.out", + "exitCode": 0 + }, + "no_command_list": { + "cwd": "no_command", + "args": "task", + "output": "./no_command_list.out", + "exitCode": 0 } } } diff --git a/tests/specs/task/dependencies/no_command.out b/tests/specs/task/dependencies/no_command.out new file mode 100644 index 0000000000..521b3541df --- /dev/null +++ b/tests/specs/task/dependencies/no_command.out @@ -0,0 +1,5 @@ +Task b echo 'b' +b +Task c echo 'c' +c +Task a (no command) diff --git a/tests/specs/task/dependencies/no_command/deno.json b/tests/specs/task/dependencies/no_command/deno.json new file mode 100644 index 0000000000..5588365a92 --- /dev/null +++ b/tests/specs/task/dependencies/no_command/deno.json @@ -0,0 +1,13 @@ +{ + "tasks": { + "a": { + "dependencies": ["b", "c"] + }, + "b": { + "command": "echo 'b'" + }, + "c": { + "command": "echo 'c'" + } + } +} diff --git a/tests/specs/task/dependencies/no_command_list.out b/tests/specs/task/dependencies/no_command_list.out new file mode 100644 index 0000000000..3d58c1cb06 --- /dev/null +++ b/tests/specs/task/dependencies/no_command_list.out @@ -0,0 +1,7 @@ +Available tasks: +- a + depends on: b, c +- b + echo 'b' +- c + echo 'c' diff --git a/tests/testdata/compile/standalone_error_module_with_imports_2.ts b/tests/testdata/compile/standalone_error_module_with_imports_2.ts index ef052b512e..c83d7ceea6 100644 --- a/tests/testdata/compile/standalone_error_module_with_imports_2.ts +++ b/tests/testdata/compile/standalone_error_module_with_imports_2.ts @@ -1,2 +1,7 @@ +// file has blank lines to make the input line +// different than the output console.log("hello"); -throw new Error("boom!"); + +const value: string = "boom!"; + +throw new Error(value); diff --git a/tests/unit/body_test.ts b/tests/unit/body_test.ts index 18cdb22be0..fb51fd0076 100644 --- a/tests/unit/body_test.ts +++ b/tests/unit/body_test.ts @@ -1,5 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -import { assert, assertEquals } from "./test_util.ts"; +import { assert, assertEquals, assertRejects } from "./test_util.ts"; // just a hack to get a body object // deno-lint-ignore no-explicit-any @@ -187,3 +187,14 @@ Deno.test( assertEquals(file.size, 1); }, ); + +Deno.test(async function bodyBadResourceError() { + const file = await Deno.open("README.md"); + file.close(); + const body = buildBody(file.readable); + await assertRejects( + () => body.arrayBuffer(), + Deno.errors.BadResource, + "Cannot read body as underlying resource unavailable", + ); +}); diff --git a/tests/unit/lint_plugin_test.ts b/tests/unit/lint_plugin_test.ts new file mode 100644 index 0000000000..38a7e1b091 --- /dev/null +++ b/tests/unit/lint_plugin_test.ts @@ -0,0 +1,769 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "./test_util.ts"; + +// TODO(@marvinhagemeister) Remove once we land "official" types +export interface LintReportData { + // deno-lint-ignore no-explicit-any + node: any; + message: string; +} +// TODO(@marvinhagemeister) Remove once we land "official" types +interface LintContext { + id: string; +} +// TODO(@marvinhagemeister) Remove once we land "official" types +// deno-lint-ignore no-explicit-any +type LintVisitor = Record void>; + +// TODO(@marvinhagemeister) Remove once we land "official" types +interface LintRule { + create(ctx: LintContext): LintVisitor; + destroy?(): void; +} + +// TODO(@marvinhagemeister) Remove once we land "official" types +interface LintPlugin { + name: string; + rules: Record; +} + +function runLintPlugin(plugin: LintPlugin, fileName: string, source: string) { + // deno-lint-ignore no-explicit-any + return (Deno as any)[(Deno as any).internal].runLintPlugin( + plugin, + fileName, + source, + ); +} + +function testPlugin( + source: string, + rule: LintRule, +) { + const plugin = { + name: "test-plugin", + rules: { + testRule: rule, + }, + }; + + return runLintPlugin(plugin, "source.tsx", source); +} + +interface VisitResult { + selector: string; + kind: "enter" | "exit"; + // deno-lint-ignore no-explicit-any + node: any; +} + +function testVisit( + source: string, + ...selectors: string[] +): VisitResult[] { + const result: VisitResult[] = []; + + testPlugin(source, { + create() { + const visitor: LintVisitor = {}; + + for (const s of selectors) { + visitor[s] = (node) => { + result.push({ + kind: s.endsWith(":exit") ? "exit" : "enter", + selector: s, + node, + }); + }; + } + + return visitor; + }, + }); + + return result; +} + +function testLintNode(source: string, ...selectors: string[]) { + // deno-lint-ignore no-explicit-any + const log: any[] = []; + + testPlugin(source, { + create() { + const visitor: LintVisitor = {}; + + for (const s of selectors) { + visitor[s] = (node) => { + log.push(node[Symbol.for("Deno.lint.toJsValue")]()); + }; + } + + return visitor; + }, + }); + + return log; +} + +Deno.test("Plugin - visitor enter/exit", () => { + const enter = testVisit( + "foo", + "Identifier", + ); + assertEquals(enter[0].node.type, "Identifier"); + + const exit = testVisit( + "foo", + "Identifier:exit", + ); + assertEquals(exit[0].node.type, "Identifier"); + + const both = testVisit("foo", "Identifier", "Identifier:exit"); + assertEquals(both.map((t) => t.selector), ["Identifier", "Identifier:exit"]); +}); + +Deno.test("Plugin - visitor descendant", () => { + let result = testVisit( + "if (false) foo; if (false) bar()", + "IfStatement CallExpression", + ); + assertEquals(result[0].node.type, "CallExpression"); + assertEquals(result[0].node.callee.name, "bar"); + + result = testVisit( + "if (false) foo; foo()", + "IfStatement IfStatement", + ); + assertEquals(result, []); + + result = testVisit( + "if (false) foo; foo()", + "* CallExpression", + ); + assertEquals(result[0].node.type, "CallExpression"); +}); + +Deno.test("Plugin - visitor child combinator", () => { + let result = testVisit( + "if (false) foo; if (false) { bar; }", + "IfStatement > ExpressionStatement > Identifier", + ); + assertEquals(result[0].node.name, "foo"); + + result = testVisit( + "if (false) foo; foo()", + "IfStatement IfStatement", + ); + assertEquals(result, []); +}); + +Deno.test("Plugin - visitor next sibling", () => { + const result = testVisit( + "if (false) foo; if (false) bar;", + "IfStatement + IfStatement Identifier", + ); + assertEquals(result[0].node.name, "bar"); +}); + +Deno.test("Plugin - visitor subsequent sibling", () => { + const result = testVisit( + "if (false) foo; if (false) bar; if (false) baz;", + "IfStatement ~ IfStatement Identifier", + ); + assertEquals(result.map((r) => r.node.name), ["bar", "baz"]); +}); + +Deno.test("Plugin - visitor attr", () => { + let result = testVisit( + "for (const a of b) {}", + "[await]", + ); + assertEquals(result[0].node.await, false); + + result = testVisit( + "for await (const a of b) {}", + "[await=true]", + ); + assertEquals(result[0].node.await, true); + + result = testVisit( + "for await (const a of b) {}", + "ForOfStatement[await=true]", + ); + assertEquals(result[0].node.await, true); + + result = testVisit( + "for (const a of b) {}", + "ForOfStatement[await != true]", + ); + assertEquals(result[0].node.await, false); + + result = testVisit( + "async function *foo() {}", + "FunctionDeclaration[async=true][generator=true]", + ); + assertEquals(result[0].node.type, "FunctionDeclaration"); + + result = testVisit( + "foo", + "[name='foo']", + ); + assertEquals(result[0].node.name, "foo"); +}); + +Deno.test("Plugin - visitor attr to check type", () => { + let result = testVisit( + "foo", + "Identifier[type]", + ); + assertEquals(result[0].node.type, "Identifier"); + + result = testVisit( + "foo", + "Identifier[type='Identifier']", + ); + assertEquals(result[0].node.type, "Identifier"); +}); + +Deno.test("Plugin - visitor attr non-existing", () => { + const result = testVisit( + "foo", + "[non-existing]", + ); + assertEquals(result, []); +}); + +Deno.test("Plugin - visitor attr length special case", () => { + let result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length=2]", + ); + assertEquals(result[0].node.arguments.length, 2); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length>1]", + ); + assertEquals(result[0].node.arguments.length, 2); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length<2]", + ); + assertEquals(result[0].node.arguments.length, 1); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length<=3]", + ); + assertEquals(result[0].node.arguments.length, 1); + assertEquals(result[1].node.arguments.length, 2); + + result = testVisit( + "foo(1); foo(1, 2);", + "CallExpression[arguments.length>=1]", + ); + assertEquals(result[0].node.arguments.length, 1); + assertEquals(result[1].node.arguments.length, 2); +}); + +Deno.test("Plugin - visitor :first-child", () => { + const result = testVisit( + "{ foo; bar }", + "BlockStatement ExpressionStatement:first-child Identifier", + ); + assertEquals(result[0].node.name, "foo"); +}); + +Deno.test("Plugin - visitor :last-child", () => { + const result = testVisit( + "{ foo; bar }", + "BlockStatement ExpressionStatement:last-child Identifier", + ); + assertEquals(result[0].node.name, "bar"); +}); + +Deno.test("Plugin - visitor :nth-child", () => { + let result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement ExpressionStatement:nth-child(2) Identifier", + ); + assertEquals(result[0].node.name, "bar"); + + result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement ExpressionStatement:nth-child(2n) Identifier", + ); + assertEquals(result[0].node.name, "foo"); + assertEquals(result[1].node.name, "baz"); + + result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement ExpressionStatement:nth-child(2n + 1) Identifier", + ); + assertEquals(result[0].node.name, "bar"); + assertEquals(result[1].node.name, "foobar"); + + result = testVisit( + "{ foo; bar; baz; foobar; }", + "BlockStatement *:nth-child(2n + 1 of ExpressionStatement) Identifier", + ); + assertEquals(result[0].node.name, "bar"); + assertEquals(result[1].node.name, "foobar"); +}); + +Deno.test("Plugin - Program", () => { + const node = testLintNode("", "Program"); + assertEquals(node[0], { + type: "Program", + sourceType: "script", + range: [1, 1], + body: [], + }); +}); + +Deno.test("Plugin - BlockStatement", () => { + const node = testLintNode("{ foo; }", "BlockStatement"); + assertEquals(node[0], { + type: "BlockStatement", + range: [1, 9], + body: [{ + type: "ExpressionStatement", + range: [3, 7], + expression: { + type: "Identifier", + name: "foo", + range: [3, 6], + }, + }], + }); +}); + +Deno.test("Plugin - BreakStatement", () => { + let node = testLintNode("break;", "BreakStatement"); + assertEquals(node[0], { + type: "BreakStatement", + range: [1, 7], + label: null, + }); + + node = testLintNode("break foo;", "BreakStatement"); + assertEquals(node[0], { + type: "BreakStatement", + range: [1, 11], + label: { + type: "Identifier", + range: [7, 10], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - ContinueStatement", () => { + let node = testLintNode("continue;", "ContinueStatement"); + assertEquals(node[0], { + type: "ContinueStatement", + range: [1, 10], + label: null, + }); + + node = testLintNode("continue foo;", "ContinueStatement"); + assertEquals(node[0], { + type: "ContinueStatement", + range: [1, 14], + label: { + type: "Identifier", + range: [10, 13], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - DebuggerStatement", () => { + const node = testLintNode("debugger;", "DebuggerStatement"); + assertEquals(node[0], { + type: "DebuggerStatement", + range: [1, 10], + }); +}); + +Deno.test("Plugin - DoWhileStatement", () => { + const node = testLintNode("do {} while (foo);", "DoWhileStatement"); + assertEquals(node[0], { + type: "DoWhileStatement", + range: [1, 19], + test: { + type: "Identifier", + range: [14, 17], + name: "foo", + }, + body: { + type: "BlockStatement", + range: [4, 6], + body: [], + }, + }); +}); + +Deno.test("Plugin - ExpressionStatement", () => { + const node = testLintNode("foo;", "ExpressionStatement"); + assertEquals(node[0], { + type: "ExpressionStatement", + range: [1, 5], + expression: { + type: "Identifier", + range: [1, 4], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - ForInStatement", () => { + const node = testLintNode("for (a in b) {}", "ForInStatement"); + assertEquals(node[0], { + type: "ForInStatement", + range: [1, 16], + left: { + type: "Identifier", + range: [6, 7], + name: "a", + }, + right: { + type: "Identifier", + range: [11, 12], + name: "b", + }, + body: { + type: "BlockStatement", + range: [14, 16], + body: [], + }, + }); +}); + +Deno.test("Plugin - ForOfStatement", () => { + let node = testLintNode("for (a of b) {}", "ForOfStatement"); + assertEquals(node[0], { + type: "ForOfStatement", + range: [1, 16], + await: false, + left: { + type: "Identifier", + range: [6, 7], + name: "a", + }, + right: { + type: "Identifier", + range: [11, 12], + name: "b", + }, + body: { + type: "BlockStatement", + range: [14, 16], + body: [], + }, + }); + + node = testLintNode("for await (a of b) {}", "ForOfStatement"); + assertEquals(node[0], { + type: "ForOfStatement", + range: [1, 22], + await: true, + left: { + type: "Identifier", + range: [12, 13], + name: "a", + }, + right: { + type: "Identifier", + range: [17, 18], + name: "b", + }, + body: { + type: "BlockStatement", + range: [20, 22], + body: [], + }, + }); +}); + +Deno.test("Plugin - ForStatement", () => { + let node = testLintNode("for (;;) {}", "ForStatement"); + assertEquals(node[0], { + type: "ForStatement", + range: [1, 12], + init: null, + test: null, + update: null, + body: { + type: "BlockStatement", + range: [10, 12], + body: [], + }, + }); + + node = testLintNode("for (a; b; c) {}", "ForStatement"); + assertEquals(node[0], { + type: "ForStatement", + range: [1, 17], + init: { + type: "Identifier", + range: [6, 7], + name: "a", + }, + test: { + type: "Identifier", + range: [9, 10], + name: "b", + }, + update: { + type: "Identifier", + range: [12, 13], + name: "c", + }, + body: { + type: "BlockStatement", + range: [15, 17], + body: [], + }, + }); +}); + +Deno.test("Plugin - IfStatement", () => { + let node = testLintNode("if (foo) {}", "IfStatement"); + assertEquals(node[0], { + type: "IfStatement", + range: [1, 12], + test: { + type: "Identifier", + name: "foo", + range: [5, 8], + }, + consequent: { + type: "BlockStatement", + range: [10, 12], + body: [], + }, + alternate: null, + }); + + node = testLintNode("if (foo) {} else {}", "IfStatement"); + assertEquals(node[0], { + type: "IfStatement", + range: [1, 20], + test: { + type: "Identifier", + name: "foo", + range: [5, 8], + }, + consequent: { + type: "BlockStatement", + range: [10, 12], + body: [], + }, + alternate: { + type: "BlockStatement", + range: [18, 20], + body: [], + }, + }); +}); + +Deno.test("Plugin - LabeledStatement", () => { + const node = testLintNode("foo: {};", "LabeledStatement"); + assertEquals(node[0], { + type: "LabeledStatement", + range: [1, 8], + label: { + type: "Identifier", + name: "foo", + range: [1, 4], + }, + body: { + type: "BlockStatement", + range: [6, 8], + body: [], + }, + }); +}); + +Deno.test("Plugin - ReturnStatement", () => { + let node = testLintNode("return", "ReturnStatement"); + assertEquals(node[0], { + type: "ReturnStatement", + range: [1, 7], + argument: null, + }); + + node = testLintNode("return foo;", "ReturnStatement"); + assertEquals(node[0], { + type: "ReturnStatement", + range: [1, 12], + argument: { + type: "Identifier", + name: "foo", + range: [8, 11], + }, + }); +}); + +Deno.test("Plugin - SwitchStatement", () => { + const node = testLintNode( + `switch (foo) { + case foo: + case bar: + break; + default: + {} + }`, + "SwitchStatement", + ); + assertEquals(node[0], { + type: "SwitchStatement", + range: [1, 94], + discriminant: { + type: "Identifier", + range: [9, 12], + name: "foo", + }, + cases: [ + { + type: "SwitchCase", + range: [22, 31], + test: { + type: "Identifier", + range: [27, 30], + name: "foo", + }, + consequent: [], + }, + { + type: "SwitchCase", + range: [38, 62], + test: { + type: "Identifier", + range: [43, 46], + name: "bar", + }, + consequent: [ + { + type: "BreakStatement", + label: null, + range: [56, 62], + }, + ], + }, + { + type: "SwitchCase", + range: [69, 88], + test: null, + consequent: [ + { + type: "BlockStatement", + range: [86, 88], + body: [], + }, + ], + }, + ], + }); +}); + +Deno.test("Plugin - ThrowStatement", () => { + const node = testLintNode("throw foo;", "ThrowStatement"); + assertEquals(node[0], { + type: "ThrowStatement", + range: [1, 11], + argument: { + type: "Identifier", + range: [7, 10], + name: "foo", + }, + }); +}); + +Deno.test("Plugin - TryStatement", () => { + let node = testLintNode("try {} catch {};", "TryStatement"); + assertEquals(node[0], { + type: "TryStatement", + range: [1, 16], + block: { + type: "BlockStatement", + range: [5, 7], + body: [], + }, + handler: { + type: "CatchClause", + range: [8, 16], + param: null, + body: { + type: "BlockStatement", + range: [14, 16], + body: [], + }, + }, + finalizer: null, + }); + + node = testLintNode("try {} catch (e) {};", "TryStatement"); + assertEquals(node[0], { + type: "TryStatement", + range: [1, 20], + block: { + type: "BlockStatement", + range: [5, 7], + body: [], + }, + handler: { + type: "CatchClause", + range: [8, 20], + param: { + type: "Identifier", + range: [15, 16], + name: "e", + }, + body: { + type: "BlockStatement", + range: [18, 20], + body: [], + }, + }, + finalizer: null, + }); + + node = testLintNode("try {} finally {};", "TryStatement"); + assertEquals(node[0], { + type: "TryStatement", + range: [1, 18], + block: { + type: "BlockStatement", + range: [5, 7], + body: [], + }, + handler: null, + finalizer: { + type: "BlockStatement", + range: [16, 18], + body: [], + }, + }); +}); + +Deno.test("Plugin - WhileStatement", () => { + const node = testLintNode("while (foo) {}", "WhileStatement"); + assertEquals(node[0], { + type: "WhileStatement", + range: [1, 15], + test: { + type: "Identifier", + range: [8, 11], + name: "foo", + }, + body: { + type: "BlockStatement", + range: [13, 15], + body: [], + }, + }); +}); diff --git a/tests/unit/lint_selectors_test.ts b/tests/unit/lint_selectors_test.ts new file mode 100644 index 0000000000..0909a4907a --- /dev/null +++ b/tests/unit/lint_selectors_test.ts @@ -0,0 +1,610 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "@std/assert/equals"; +import { + ATTR_BIN_NODE, + ATTR_EXISTS_NODE, + BinOp, + ELEM_NODE, + Lexer, + parseSelector, + PSEUDO_FIRST_CHILD, + PSEUDO_HAS, + PSEUDO_LAST_CHILD, + PSEUDO_NOT, + PSEUDO_NTH_CHILD, + RELATION_NODE, + splitSelectors, + Token, +} from "../../cli/js/40_lint_selector.js"; +import { assertThrows } from "@std/assert"; + +Deno.test("splitSelectors", () => { + assertEquals(splitSelectors("foo"), ["foo"]); + assertEquals(splitSelectors("foo, bar"), ["foo", "bar"]); + assertEquals(splitSelectors("foo:f(bar, baz)"), ["foo:f(bar, baz)"]); + assertEquals(splitSelectors("foo:f(bar, baz), foobar"), [ + "foo:f(bar, baz)", + "foobar", + ]); +}); + +interface LexState { + token: number; + value: string; +} + +function testLexer(input: string): LexState[] { + const out: LexState[] = []; + const l = new Lexer(input); + + while (l.token !== Token.EOF) { + out.push({ token: l.token, value: l.value }); + l.next(); + } + + return out; +} + +const Tags: Record = { Foo: 1, Bar: 2, FooBar: 3 }; +const Attrs: Record = { foo: 1, bar: 2, foobar: 3, attr: 4 }; +const toTag = (name: string): number => Tags[name]; +const toAttr = (name: string): number => Attrs[name]; + +const testParse = (input: string) => parseSelector(input, toTag, toAttr); + +Deno.test("Lexer - Elem", () => { + assertEquals(testLexer("Foo"), [ + { token: Token.Word, value: "Foo" }, + ]); + assertEquals(testLexer("foo-bar"), [ + { token: Token.Word, value: "foo-bar" }, + ]); + assertEquals(testLexer("foo_bar"), [ + { token: Token.Word, value: "foo_bar" }, + ]); + assertEquals(testLexer("Foo Bar Baz"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Bar" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Baz" }, + ]); + assertEquals(testLexer("Foo Bar Baz"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Bar" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Baz" }, + ]); +}); + +Deno.test("Lexer - Relation >", () => { + assertEquals(testLexer("Foo > Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("Foo>Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer(">Bar"), [ + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "Bar" }, + ]); +}); + +Deno.test("Lexer - Relation +", () => { + assertEquals(testLexer("Foo + Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("Foo+Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("+Bar"), [ + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "Bar" }, + ]); +}); + +Deno.test("Lexer - Relation ~", () => { + assertEquals(testLexer("Foo ~ Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("Foo~Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); + assertEquals(testLexer("~Bar"), [ + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); + + assertEquals(testLexer("Foo Bar ~ Bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Space, value: "" }, + { token: Token.Word, value: "Bar" }, + { token: Token.Op, value: "~" }, + { token: Token.Word, value: "Bar" }, + ]); +}); + +Deno.test("Lexer - Attr", () => { + assertEquals(testLexer("[attr]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr=1]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "=" }, + { token: Token.Word, value: "1" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr='foo']"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "=" }, + { token: Token.String, value: "foo" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr>=2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: ">=" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr<=2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "<=" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr>2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: ">" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr<2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "<" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr!=2]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Op, value: "!=" }, + { token: Token.Word, value: "2" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr.foo=1]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.Dot, value: "" }, + { token: Token.Word, value: "foo" }, + { token: Token.Op, value: "=" }, + { token: Token.Word, value: "1" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("[attr] [attr]"), [ + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + { token: Token.Space, value: "" }, + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + ]); + assertEquals(testLexer("Foo[attr][attr2=1]"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr" }, + { token: Token.BracketClose, value: "" }, + { token: Token.BracketOpen, value: "" }, + { token: Token.Word, value: "attr2" }, + { token: Token.Op, value: "=" }, + { token: Token.Word, value: "1" }, + { token: Token.BracketClose, value: "" }, + ]); +}); + +Deno.test("Lexer - Pseudo", () => { + assertEquals(testLexer(":foo-bar"), [ + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + ]); + assertEquals(testLexer("Foo:foo-bar"), [ + { token: Token.Word, value: "Foo" }, + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + ]); + assertEquals(testLexer(":foo-bar(baz)"), [ + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + { token: Token.BraceOpen, value: "" }, + { token: Token.Word, value: "baz" }, + { token: Token.BraceClose, value: "" }, + ]); + assertEquals(testLexer(":foo-bar(2n + 1)"), [ + { token: Token.Colon, value: "" }, + { token: Token.Word, value: "foo-bar" }, + { token: Token.BraceOpen, value: "" }, + { token: Token.Word, value: "2n" }, + { token: Token.Op, value: "+" }, + { token: Token.Word, value: "1" }, + { token: Token.BraceClose, value: "" }, + ]); +}); + +Deno.test("Parser - Elem", () => { + assertEquals(testParse("Foo"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + ]]); +}); + +Deno.test("Parser - Relation (descendant)", () => { + assertEquals(testParse("Foo Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Space, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); +}); + +Deno.test("Parser - Relation", () => { + assertEquals(testParse("Foo > Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Greater, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); + + assertEquals(testParse("Foo ~ Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Tilde, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); + + assertEquals(testParse("Foo + Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { + type: RELATION_NODE, + op: BinOp.Plus, + }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); +}); + +Deno.test("Parser - Attr", () => { + assertEquals(testParse("[foo]"), [[ + { + type: ATTR_EXISTS_NODE, + prop: [1], + }, + ]]); + + assertEquals(testParse("[foo][bar]"), [[ + { + type: ATTR_EXISTS_NODE, + prop: [1], + }, + { + type: ATTR_EXISTS_NODE, + prop: [2], + }, + ]]); + + assertEquals(testParse("[foo=1]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: 1, + }, + ]]); + assertEquals(testParse("[foo=true]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: true, + }, + ]]); + assertEquals(testParse("[foo=false]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: false, + }, + ]]); + assertEquals(testParse("[foo=null]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: null, + }, + ]]); + assertEquals(testParse("[foo='str']"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: "str", + }, + ]]); + assertEquals(testParse('[foo="str"]'), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: "str", + }, + ]]); + assertEquals(testParse("[foo=/str/]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: /str/, + }, + ]]); + assertEquals(testParse("[foo=/str/g]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1], + value: /str/g, + }, + ]]); +}); + +Deno.test("Parser - Attr nested", () => { + assertEquals(testParse("[foo.bar]"), [[ + { + type: ATTR_EXISTS_NODE, + prop: [1, 2], + }, + ]]); + + assertEquals(testParse("[foo.bar = 2]"), [[ + { + type: ATTR_BIN_NODE, + op: BinOp.Equal, + prop: [1, 2], + value: 2, + }, + ]]); +}); + +Deno.test("Parser - Pseudo no value", () => { + assertEquals(testParse(":first-child"), [[ + { + type: PSEUDO_FIRST_CHILD, + }, + ]]); + assertEquals(testParse(":last-child"), [[ + { + type: PSEUDO_LAST_CHILD, + }, + ]]); +}); + +Deno.test("Parser - Pseudo nth-child", () => { + assertEquals(testParse(":nth-child(2)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step: 0, + stepOffset: 1, + repeat: false, + }, + ]]); + assertEquals(testParse(":nth-child(2n)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step: 2, + stepOffset: 0, + repeat: true, + }, + ]]); + assertEquals(testParse(":nth-child(-2n)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: null, + step: -2, + stepOffset: 0, + repeat: true, + }, + ]]); + assertEquals(testParse(":nth-child(2n + 1)"), [[ + { + type: PSEUDO_NTH_CHILD, + of: null, + op: "+", + step: 2, + stepOffset: 1, + repeat: true, + }, + ]]); + assertEquals(testParse(":nth-child(2n + 1 of Foo[attr])"), [[ + { + type: PSEUDO_NTH_CHILD, + of: [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { type: ATTR_EXISTS_NODE, prop: [4] }, + ], + op: "+", + step: 2, + stepOffset: 1, + repeat: true, + }, + ]]); + + // Invalid selectors + assertThrows(() => testParse(":nth-child(2n + 1 of Foo[attr], Bar)")); + assertThrows(() => testParse(":nth-child(2n - 1 foo)")); +}); + +Deno.test("Parser - Pseudo has/is/where", () => { + assertEquals(testParse(":has(Foo:has(Foo), Bar)"), [[ + { + type: PSEUDO_HAS, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_HAS, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); + assertEquals(testParse(":where(Foo:where(Foo), Bar)"), [[ + { + type: PSEUDO_HAS, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_HAS, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); + assertEquals(testParse(":is(Foo:is(Foo), Bar)"), [[ + { + type: PSEUDO_HAS, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_HAS, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); +}); + +Deno.test("Parser - Pseudo not", () => { + assertEquals(testParse(":not(Foo:not(Foo), Bar)"), [[ + { + type: PSEUDO_NOT, + selectors: [ + [ + { type: ELEM_NODE, elem: 1, wildcard: false }, + { + type: PSEUDO_NOT, + selectors: [ + [{ type: ELEM_NODE, elem: 1, wildcard: false }], + ], + }, + ], + [ + { type: ELEM_NODE, elem: 2, wildcard: false }, + ], + ], + }, + ]]); +}); + +Deno.test("Parser - mixed", () => { + assertEquals(testParse("Foo[foo=true] Bar"), [[ + { + type: ELEM_NODE, + elem: 1, + wildcard: false, + }, + { type: ATTR_BIN_NODE, op: BinOp.Equal, prop: [1], value: true }, + { type: RELATION_NODE, op: BinOp.Space }, + { + type: ELEM_NODE, + elem: 2, + wildcard: false, + }, + ]]); +}); diff --git a/tests/unit/ops_test.ts b/tests/unit/ops_test.ts index 6de55f8b66..631e5c5736 100644 --- a/tests/unit/ops_test.ts +++ b/tests/unit/ops_test.ts @@ -1,6 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -const EXPECTED_OP_COUNT = 12; +const EXPECTED_OP_COUNT = 13; Deno.test(function checkExposedOps() { // @ts-ignore TS doesn't allow to index with symbol diff --git a/tests/unit/quic_test.ts b/tests/unit/quic_test.ts new file mode 100644 index 0000000000..f5423327de --- /dev/null +++ b/tests/unit/quic_test.ts @@ -0,0 +1,172 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "./test_util.ts"; + +const cert = Deno.readTextFileSync("tests/testdata/tls/localhost.crt"); +const key = Deno.readTextFileSync("tests/testdata/tls/localhost.key"); +const caCerts = [Deno.readTextFileSync("tests/testdata/tls/RootCA.pem")]; + +async function pair(opt?: Deno.QuicTransportOptions): Promise< + [Deno.QuicConn, Deno.QuicConn, Deno.QuicListener] +> { + const listener = await Deno.listenQuic({ + hostname: "localhost", + port: 0, + cert, + key, + alpnProtocols: ["deno-test"], + ...opt, + }); + + const [server, client] = await Promise.all([ + listener.accept(), + Deno.connectQuic({ + hostname: "localhost", + port: listener.addr.port, + caCerts, + alpnProtocols: ["deno-test"], + ...opt, + }), + ]); + + assertEquals(server.protocol, "deno-test"); + assertEquals(client.protocol, "deno-test"); + assertEquals(client.remoteAddr, listener.addr); + + return [server, client, listener]; +} + +Deno.test("bidirectional stream", async () => { + const [server, client, listener] = await pair(); + + const encoded = (new TextEncoder()).encode("hi!"); + + { + const bi = await server.createBidirectionalStream({ sendOrder: 42 }); + assertEquals(bi.writable.sendOrder, 42); + bi.writable.sendOrder = 0; + assertEquals(bi.writable.sendOrder, 0); + await bi.writable.getWriter().write(encoded); + } + + { + const { value: bi } = await client.incomingBidirectionalStreams + .getReader() + .read(); + const { value: data } = await bi!.readable.getReader().read(); + assertEquals(data, encoded); + } + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("unidirectional stream", async () => { + const [server, client, listener] = await pair(); + + const encoded = (new TextEncoder()).encode("hi!"); + + { + const uni = await server.createUnidirectionalStream({ sendOrder: 42 }); + assertEquals(uni.sendOrder, 42); + uni.sendOrder = 0; + assertEquals(uni.sendOrder, 0); + await uni.getWriter().write(encoded); + } + + { + const { value: uni } = await client.incomingUnidirectionalStreams + .getReader() + .read(); + const { value: data } = await uni!.getReader().read(); + assertEquals(data, encoded); + } + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("datagrams", async () => { + const [server, client, listener] = await pair(); + + const encoded = (new TextEncoder()).encode("hi!"); + + await server.sendDatagram(encoded); + + const data = await client.readDatagram(); + assertEquals(data, encoded); + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("closing", async () => { + const [server, client, listener] = await pair(); + + server.close({ closeCode: 42, reason: "hi!" }); + + assertEquals(await client.closed, { closeCode: 42, reason: "hi!" }); + + listener.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("max concurrent streams", async () => { + const [server, client, listener] = await pair({ + maxConcurrentBidirectionalStreams: 1, + maxConcurrentUnidirectionalStreams: 1, + }); + + { + await server.createBidirectionalStream(); + await server.createBidirectionalStream() + .then(() => { + throw new Error("expected failure"); + }, () => { + // success! + }); + } + + { + await server.createUnidirectionalStream(); + await server.createUnidirectionalStream() + .then(() => { + throw new Error("expected failure"); + }, () => { + // success! + }); + } + + listener.close({ closeCode: 0, reason: "" }); + server.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); + +Deno.test("incoming", async () => { + const listener = await Deno.listenQuic({ + hostname: "localhost", + port: 0, + cert, + key, + alpnProtocols: ["deno-test"], + }); + + const connect = () => + Deno.connectQuic({ + hostname: "localhost", + port: listener.addr.port, + caCerts, + alpnProtocols: ["deno-test"], + }); + + const c1p = connect(); + const i1 = await listener.incoming(); + const server = await i1.accept(); + const client = await c1p; + + assertEquals(server.protocol, "deno-test"); + assertEquals(client.protocol, "deno-test"); + assertEquals(client.remoteAddr, listener.addr); + + listener.close({ closeCode: 0, reason: "" }); + client.close({ closeCode: 0, reason: "" }); +}); diff --git a/tests/unit_node/_fs/_fs_handle_test.ts b/tests/unit_node/_fs/_fs_handle_test.ts index e26b82aa06..84d72c0745 100644 --- a/tests/unit_node/_fs/_fs_handle_test.ts +++ b/tests/unit_node/_fs/_fs_handle_test.ts @@ -117,3 +117,85 @@ Deno.test("[node/fs filehandle.writeFile] Write to file", async function () { assertEquals(decoder.decode(data), "hello world"); }); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file with length", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hello world"); + + await fileHandle.truncate(5); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + assertEquals(decoder.decode(data), "hello"); + }, +); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file without length", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hello world"); + + await fileHandle.truncate(); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + assertEquals(decoder.decode(data), ""); + }, +); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file with extension", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hi"); + + await fileHandle.truncate(5); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + const expected = new Uint8Array(5); + expected.set(new TextEncoder().encode("hi")); + + assertEquals(data, expected); + assertEquals(data.length, 5); + assertEquals(decoder.decode(data.subarray(0, 2)), "hi"); + // Verify null bytes + assertEquals(data[2], 0); + assertEquals(data[3], 0); + assertEquals(data[4], 0); + }, +); + +Deno.test( + "[node/fs filehandle.truncate] Truncate file with negative length", + async function () { + const tempFile: string = await Deno.makeTempFile(); + const fileHandle = await fs.open(tempFile, "w+"); + + await fileHandle.writeFile("hello world"); + + await fileHandle.truncate(-1); + + const data = Deno.readFileSync(tempFile); + await Deno.remove(tempFile); + await fileHandle.close(); + + assertEquals(decoder.decode(data), ""); + assertEquals(data.length, 0); + }, +); diff --git a/tests/unit_node/crypto/crypto_cipher_gcm_test.ts b/tests/unit_node/crypto/crypto_cipher_gcm_test.ts index b379a43696..16f6f56a9c 100644 --- a/tests/unit_node/crypto/crypto_cipher_gcm_test.ts +++ b/tests/unit_node/crypto/crypto_cipher_gcm_test.ts @@ -119,3 +119,23 @@ Deno.test({ ); }, }); + +// Issue #27441 +// https://github.com/denoland/deno/issues/27441 +Deno.test({ + name: "aes-256-gcm supports IV of non standard length", + fn() { + const decipher = crypto.createDecipheriv( + "aes-256-gcm", + Buffer.from("eYLEiLFQnpjYksWTiKpwv2sKhw+WJb5Fo/aY2YqXswc=", "base64"), + Buffer.from("k5oP3kb8tTbZaL3PxbFWN8ToOb8vfv2b1EuPz1LbmYU=", "base64"), // 256 bits IV + ); + const decrypted = decipher.update( + "s0/KBsFec29XLrGbAnLiNA==", + "base64", + "utf-8", + ); + assertEquals(decrypted, "this is a secret"); + decipher.final(); + }, +}); diff --git a/tests/unit_node/crypto/crypto_cipher_test.ts b/tests/unit_node/crypto/crypto_cipher_test.ts index 65a5b29eeb..e40625c5a4 100644 --- a/tests/unit_node/crypto/crypto_cipher_test.ts +++ b/tests/unit_node/crypto/crypto_cipher_test.ts @@ -361,6 +361,19 @@ Deno.test({ name: "getCiphers", fn() { assertEquals(crypto.getCiphers().includes("aes-128-cbc"), true); + + const getZeroKey = (cipher: string) => zeros(+cipher.match(/\d+/)![0] / 8); + const getZeroIv = (cipher: string) => { + if (cipher.includes("gcm") || cipher.includes("ecb")) { + return zeros(12); + } + return zeros(16); + }; + + for (const cipher of crypto.getCiphers()) { + crypto.createCipheriv(cipher, getZeroKey(cipher), getZeroIv(cipher)) + .final(); + } }, }); diff --git a/tests/unit_node/crypto/crypto_misc_test.ts b/tests/unit_node/crypto/crypto_misc_test.ts index 007009339d..9f72683398 100644 --- a/tests/unit_node/crypto/crypto_misc_test.ts +++ b/tests/unit_node/crypto/crypto_misc_test.ts @@ -1,7 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import { randomFillSync, randomUUID, timingSafeEqual } from "node:crypto"; import { Buffer } from "node:buffer"; -import { assert, assertEquals } from "../../unit/test_util.ts"; +import { assert, assertEquals, assertThrows } from "../../unit/test_util.ts"; import { assertNotEquals } from "@std/assert"; Deno.test("[node/crypto.getRandomUUID] works the same way as Web Crypto API", () => { @@ -36,3 +36,10 @@ Deno.test("[node/crypto.timingSafeEqual] compares equal Buffer with different by assert(timingSafeEqual(a, b)); }); + +Deno.test("[node/crypto.timingSafeEqual] RangeError on Buffer with different byteLength", () => { + const a = Buffer.from([212, 213]); + const b = Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 212, 213, 0]); + + assertThrows(() => timingSafeEqual(a, b), RangeError); +}); diff --git a/tests/unit_node/crypto/gcmEncryptExtIV128.json b/tests/unit_node/crypto/gcmEncryptExtIV128.json index 64896642d4..f0b4bca1f1 100644 --- a/tests/unit_node/crypto/gcmEncryptExtIV128.json +++ b/tests/unit_node/crypto/gcmEncryptExtIV128.json @@ -51373,5 +51373,322 @@ 102, 238 ] + }, + { + "key": [ + 131, + 249, + 217, + 125, + 74, + 183, + 89, + 253, + 220, + 195, + 239, + 84, + 160, + 226, + 168, + 236 + ], + "nonce": [ + 207 + ], + "aad": [ + 109, + 212, + 158, + 174, + 180, + 16, + 61, + 172, + 143, + 151, + 227, + 35, + 73, + 70, + 221, + 45 + ], + "plaintext": [ + 119, + 230, + 50, + 156, + 249, + 66, + 79, + 113, + 200, + 8, + 223, + 145, + 112, + 191, + 210, + 152 + ], + "ciphertext": [ + 80, + 222, + 134, + 167, + 169, + 42, + 138, + 94, + 163, + 61, + 181, + 105, + 107, + 150, + 205, + 119 + ], + "tag": [ + 170, + 24, + 30, + 132, + 188, + 139, + 75, + 245, + 166, + 137, + 39, + 196, + 9, + 212, + 34, + 203 + ] + }, + { + "key": [ + 202, + 145, + 226, + 65, + 68, + 9, + 164, + 57, + 176, + 101, + 115, + 215, + 114, + 249, + 10, + 251 + ], + "nonce": [ + 23, + 112, + 8, + 249, + 32, + 160, + 97, + 105, + 204, + 223, + 117, + 58, + 51, + 133, + 83, + 254, + 253, + 70, + 132, + 88, + 105, + 201, + 36, + 77, + 164, + 73, + 151, + 248, + 61, + 76, + 232, + 5, + 161, + 135, + 7, + 200, + 77, + 17, + 79, + 156, + 104, + 66, + 123, + 34, + 132, + 21, + 145, + 230, + 202, + 236, + 245, + 195, + 231, + 42, + 37, + 22, + 122, + 168, + 96, + 197, + 27, + 220, + 26, + 165, + 109, + 205, + 105, + 242, + 154, + 47, + 53, + 231, + 10, + 50, + 43, + 158, + 186, + 9, + 42, + 152, + 214, + 106, + 149, + 107, + 77, + 41, + 67, + 131, + 160, + 235, + 171, + 38, + 247, + 196, + 223, + 26, + 93, + 64, + 96, + 223, + 196, + 90, + 20, + 21, + 81, + 0, + 234, + 125, + 158, + 50, + 222, + 187, + 101, + 55, + 64, + 107, + 117, + 114, + 145, + 113, + 5, + 5, + 20, + 46, + 118, + 89, + 252, + 119 + ], + "aad": [ + 191, + 235, + 21, + 252, + 247, + 177, + 95, + 14, + 20, + 192, + 68, + 57, + 182, + 121, + 80, + 189 + ], + "plaintext": [ + 40, + 0, + 62, + 48, + 196, + 164, + 202, + 158, + 65, + 170, + 254, + 250, + 193, + 225, + 195, + 222 + ], + "ciphertext": [ + 0, + 228, + 114, + 151, + 31, + 58, + 119, + 112, + 170, + 113, + 88, + 253, + 146, + 241, + 123, + 183 + ], + "tag": [ + 22, + 102, + 27, + 133, + 235, + 81, + 100, + 108, + 148, + 207, + 43, + 228, + 228, + 45, + 122, + 142 + ] } ] diff --git a/tests/unit_node/crypto/gcmEncryptExtIV256.json b/tests/unit_node/crypto/gcmEncryptExtIV256.json index cb8ba30869..808c47ec6e 100644 --- a/tests/unit_node/crypto/gcmEncryptExtIV256.json +++ b/tests/unit_node/crypto/gcmEncryptExtIV256.json @@ -57373,5 +57373,354 @@ 246, 57 ] + }, + { + "key": [ + 187, + 70, + 53, + 215, + 102, + 221, + 14, + 74, + 112, + 25, + 209, + 114, + 76, + 115, + 110, + 31, + 44, + 1, + 106, + 249, + 226, + 158, + 125, + 58, + 162, + 192, + 222, + 35, + 231, + 128, + 175, + 38 + ], + "nonce": [ + 171 + ], + "aad": [ + 15, + 133, + 199, + 219, + 235, + 103, + 75, + 122, + 112, + 195, + 81, + 37, + 211, + 97, + 147, + 80 + ], + "plaintext": [ + 208, + 92, + 232, + 120, + 217, + 70, + 98, + 209, + 82, + 11, + 24, + 75, + 75, + 239, + 60, + 69 + ], + "ciphertext": [ + 81, + 186, + 162, + 106, + 106, + 113, + 156, + 22, + 0, + 100, + 95, + 243, + 191, + 223, + 165, + 59 + ], + "tag": [ + 107, + 213, + 78, + 81, + 132, + 235, + 48, + 9, + 52, + 179, + 146, + 195, + 43, + 124, + 26, + 110 + ] + }, + { + "key": [ + 252, + 188, + 126, + 182, + 39, + 22, + 220, + 127, + 121, + 43, + 97, + 148, + 210, + 109, + 109, + 86, + 158, + 174, + 224, + 122, + 157, + 60, + 55, + 202, + 66, + 133, + 64, + 144, + 102, + 30, + 24, + 69 + ], + "nonce": [ + 76, + 140, + 70, + 36, + 39, + 155, + 35, + 180, + 149, + 199, + 136, + 132, + 76, + 118, + 210, + 37, + 235, + 242, + 56, + 38, + 89, + 156, + 62, + 28, + 244, + 219, + 29, + 162, + 214, + 90, + 127, + 117, + 68, + 216, + 232, + 111, + 204, + 51, + 251, + 17, + 61, + 49, + 116, + 184, + 199, + 144, + 49, + 34, + 203, + 89, + 103, + 246, + 16, + 115, + 130, + 204, + 90, + 198, + 231, + 160, + 228, + 202, + 79, + 8, + 222, + 62, + 145, + 29, + 72, + 62, + 104, + 37, + 61, + 63, + 136, + 108, + 254, + 52, + 155, + 249, + 50, + 153, + 162, + 142, + 102, + 91, + 192, + 150, + 165, + 28, + 232, + 76, + 230, + 148, + 11, + 52, + 160, + 55, + 114, + 36, + 131, + 185, + 106, + 123, + 37, + 80, + 127, + 90, + 4, + 100, + 60, + 103, + 48, + 250, + 170, + 182, + 24, + 230, + 35, + 26, + 114, + 119, + 20, + 214, + 243, + 102, + 250, + 155 + ], + "aad": [ + 60, + 24, + 42, + 241, + 156, + 70, + 255, + 74, + 203, + 218, + 206, + 207, + 112, + 180, + 47, + 181 + ], + "plaintext": [ + 34, + 20, + 79, + 193, + 47, + 123, + 197, + 82, + 43, + 136, + 183, + 108, + 141, + 237, + 28, + 118 + ], + "ciphertext": [ + 200, + 217, + 129, + 7, + 192, + 203, + 60, + 15, + 210, + 24, + 154, + 233, + 114, + 128, + 213, + 98 + ], + "tag": [ + 41, + 6, + 119, + 35, + 48, + 236, + 217, + 163, + 184, + 168, + 40, + 118, + 164, + 235, + 222, + 234 + ] } ] diff --git a/tests/unit_node/http_test.ts b/tests/unit_node/http_test.ts index e6c36eea19..f30a4a20a3 100644 --- a/tests/unit_node/http_test.ts +++ b/tests/unit_node/http_test.ts @@ -10,6 +10,7 @@ import http, { } from "node:http"; import url from "node:url"; import https from "node:https"; +import zlib from "node:zlib"; import net, { Socket } from "node:net"; import fs from "node:fs"; import { text } from "node:stream/consumers"; @@ -1823,3 +1824,60 @@ Deno.test("[node/http] ServerResponse socket", async () => { await promise; }); + +Deno.test("[node/http] decompress brotli response", { + permissions: { net: true }, +}, async () => { + let received = false; + const ac = new AbortController(); + const server = Deno.serve({ port: 5928, signal: ac.signal }, (_req) => { + received = true; + return Response.json([ + ["accept-language", "*"], + ["host", "localhost:3000"], + ["user-agent", "Deno/2.1.1"], + ], {}); + }); + const { promise, resolve, reject } = Promise.withResolvers(); + let body = ""; + + const request = http.get( + "http://localhost:5928/", + { + headers: { + "accept-encoding": "gzip, deflate, br, zstd", + }, + }, + (resp) => { + const decompress = zlib.createBrotliDecompress(); + resp.on("data", (chunk) => { + decompress.write(chunk); + }); + + resp.on("end", () => { + decompress.end(); + }); + + decompress.on("data", (chunk) => { + body += chunk; + }); + + decompress.on("end", () => { + resolve(); + }); + }, + ); + request.on("error", reject); + request.end(() => { + assert(received); + }); + + await promise; + ac.abort(); + await server.finished; + + assertEquals(JSON.parse(body), [["accept-language", "*"], [ + "host", + "localhost:3000", + ], ["user-agent", "Deno/2.1.1"]]); +}); diff --git a/tests/unit_node/worker_threads_test.ts b/tests/unit_node/worker_threads_test.ts index 808fd6116e..5f38d51d4d 100644 --- a/tests/unit_node/worker_threads_test.ts +++ b/tests/unit_node/worker_threads_test.ts @@ -841,3 +841,26 @@ Deno.test({ assertEquals(result, true); }, }); + +Deno.test("[node/worker_threads] Worker runs async ops correctly", async () => { + const recvMessage = Promise.withResolvers(); + const timer = setTimeout(() => recvMessage.reject(), 1000); + const worker = new workerThreads.Worker( + ` + import { parentPort } from "node:worker_threads"; + setTimeout(() => { + parentPort.postMessage("Hello from worker"); + }, 10); + `, + { eval: true }, + ); + + worker.on("message", (msg) => { + assertEquals(msg, "Hello from worker"); + worker.terminate(); + recvMessage.resolve(); + clearTimeout(timer); + }); + + await recvMessage.promise; +}); diff --git a/tools/core_import_map.json b/tools/core_import_map.json index bc0674277e..d38221eb4c 100644 --- a/tools/core_import_map.json +++ b/tools/core_import_map.json @@ -250,6 +250,7 @@ "ext:deno_node/_util/std_fmt_colors.ts": "../ext/node/polyfills/_util/std_fmt_colors.ts", "ext:deno_telemetry/telemetry.ts": "../ext/deno_telemetry/telemetry.ts", "ext:deno_telemetry/util.ts": "../ext/deno_telemetry/util.ts", + "ext:cli/40_lint_selector.js": "../cli/js/40_lint_selector.js", "@std/archive": "../tests/util/std/archive/mod.ts", "@std/archive/tar": "../tests/util/std/archive/tar.ts", "@std/archive/untar": "../tests/util/std/archive/untar.ts",