mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Merge branch 'main' into ext-node-errors-no-tostring
This commit is contained in:
commit
2131c6214a
8331 changed files with 638722 additions and 328509 deletions
|
@ -11,8 +11,17 @@ rustflags = [
|
|||
"link-arg=/STACK:4194304",
|
||||
]
|
||||
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = [
|
||||
"-C",
|
||||
"link-args=-weak_framework Metal -weak_framework MetalPerformanceShaders -weak_framework QuartzCore -weak_framework CoreGraphics",
|
||||
]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=lld"]
|
||||
rustflags = [
|
||||
"-C",
|
||||
"link-args=-fuse-ld=lld -weak_framework Metal -weak_framework MetalPerformanceShaders -weak_framework QuartzCore -weak_framework CoreGraphics",
|
||||
]
|
||||
|
||||
[target.'cfg(all())']
|
||||
rustflags = [
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
FROM mcr.microsoft.com/vscode/devcontainers/rust:1-bullseye
|
||||
|
||||
# Install cmake
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y cmake \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Deno
|
||||
ENV DENO_INSTALL=/usr/local
|
||||
RUN curl -fsSL https://deno.land/x/install/install.sh | sh
|
||||
|
|
|
@ -4,23 +4,23 @@
|
|||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"],
|
||||
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
// VS Code don't watch files under ./target
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true
|
||||
}
|
||||
},
|
||||
|
||||
"extensions": [
|
||||
"rust-lang.rust-analyzer",
|
||||
"bungcip.better-toml",
|
||||
"tamasfe.even-better-toml",
|
||||
"vadimcn.vscode-lldb",
|
||||
"mutantdino.resourcemonitor"
|
||||
],
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"postCreateCommand": "git submodule update --init",
|
||||
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
"include": [
|
||||
"ban-untagged-todo",
|
||||
"camelcase",
|
||||
"no-console",
|
||||
"guard-for-in"
|
||||
],
|
||||
"exclude": [
|
||||
|
|
78
.dprint.json
78
.dprint.json
|
@ -8,6 +8,9 @@
|
|||
"json": {
|
||||
"deno": true
|
||||
},
|
||||
"yaml": {
|
||||
"quotes": "preferSingle"
|
||||
},
|
||||
"exec": {
|
||||
"commands": [{
|
||||
"command": "rustfmt --config imports_granularity=item",
|
||||
|
@ -18,47 +21,58 @@
|
|||
".cargo_home",
|
||||
".git",
|
||||
"cli/bench/testdata/express-router.js",
|
||||
"cli/bench/testdata/lsp_benchdata/",
|
||||
"cli/bench/testdata/npm/",
|
||||
"cli/tsc/*typescript.js",
|
||||
"cli/tsc/dts/lib.d.ts",
|
||||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.decorators*.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/lib.dom*.d.ts",
|
||||
"cli/tsc/dts/lib.es*.d.ts",
|
||||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/typescript.d.ts",
|
||||
"cli/tests/node_compat/test",
|
||||
"cli/tests/testdata/file_extensions/ts_with_js_extension.js",
|
||||
"cli/tests/testdata/fmt/badly_formatted.json",
|
||||
"cli/tests/testdata/fmt/badly_formatted.md",
|
||||
"cli/tests/testdata/byte_order_mark.ts",
|
||||
"cli/tests/testdata/encoding",
|
||||
"cli/tests/testdata/fmt/",
|
||||
"cli/tests/testdata/lint/glob/",
|
||||
"cli/tests/testdata/test/glob/",
|
||||
"cli/tests/testdata/import_assertions/json_with_shebang.json",
|
||||
"cli/tests/testdata/run/error_syntax_empty_trailing_line.mjs",
|
||||
"cli/tests/testdata/run/inline_js_source_map*",
|
||||
"cli/tests/testdata/malformed_config/",
|
||||
"cli/tests/testdata/npm/registry/",
|
||||
"cli/tests/testdata/test/markdown_windows.md",
|
||||
"cli/tsc/*typescript.js",
|
||||
"ext/websocket/autobahn/reports",
|
||||
"gh-pages",
|
||||
"target",
|
||||
"test_ffi/tests/test.js",
|
||||
"test_util/std",
|
||||
"test_util/wpt",
|
||||
"third_party",
|
||||
"tools/node_compat/TODO.md",
|
||||
"tools/node_compat/node",
|
||||
"tools/wpt/expectation.json",
|
||||
"tools/wpt/manifest.json",
|
||||
"ext/websocket/autobahn/reports"
|
||||
"tests/ffi/tests/test.js",
|
||||
"tests/node_compat/runner/suite",
|
||||
"tests/node_compat/runner/TODO.md",
|
||||
"tests/node_compat/test",
|
||||
"tests/registry/",
|
||||
"tests/specs/bench/default_ts",
|
||||
"tests/specs/fmt",
|
||||
"tests/specs/lint/bom",
|
||||
"tests/specs/lint/default_ts",
|
||||
"tests/specs/lint/syntax_error_reporting",
|
||||
"tests/specs/publish/no_check_surfaces_syntax_error",
|
||||
"tests/specs/run/default_ts",
|
||||
"tests/specs/test/default_ts",
|
||||
"tests/testdata/byte_order_mark.ts",
|
||||
"tests/testdata/encoding",
|
||||
"tests/testdata/file_extensions/ts_with_js_extension.js",
|
||||
"tests/testdata/fmt/",
|
||||
"tests/testdata/fmt/badly_formatted.ipynb",
|
||||
"tests/testdata/fmt/badly_formatted.json",
|
||||
"tests/testdata/fmt/badly_formatted.md",
|
||||
"tests/testdata/import_attributes/json_with_shebang.json",
|
||||
"tests/testdata/lint/glob/",
|
||||
"tests/testdata/malformed_config/",
|
||||
"tests/testdata/run/byte_order_mark.ts",
|
||||
"tests/testdata/run/error_syntax_empty_trailing_line.mjs",
|
||||
"tests/testdata/run/inline_js_source_map*",
|
||||
"tests/testdata/test/markdown_windows.md",
|
||||
"tests/util/std",
|
||||
"tests/wpt/runner/expectation.json",
|
||||
"tests/wpt/runner/manifest.json",
|
||||
"tests/wpt/suite",
|
||||
"third_party"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.86.1.wasm",
|
||||
"https://plugins.dprint.dev/json-0.17.4.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.15.3.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.5.4.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.4.3.json@42343548b8022c99b1d750be6b894fe6b6c7ee25f72ae9f9082226dd2e515072"
|
||||
"https://plugins.dprint.dev/typescript-0.93.0.wasm",
|
||||
"https://plugins.dprint.dev/json-0.19.4.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.17.8.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.6.3.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.0.wasm"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -9,8 +9,9 @@ charset = utf-8
|
|||
trim_trailing_whitespace = true
|
||||
|
||||
[*.out] # make editor neutral to .out files
|
||||
insert_final_newline = false
|
||||
trim_trailing_whitespace = false
|
||||
insert_final_newline = unset
|
||||
trim_trailing_whitespace = unset
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
[tests/node_compat/test/**]
|
||||
insert_final_newline = unset
|
||||
trim_trailing_whitespace = unset
|
||||
|
|
6
.gitattributes
vendored
6
.gitattributes
vendored
|
@ -2,11 +2,11 @@
|
|||
* text=auto eol=lf
|
||||
*.png -text
|
||||
|
||||
/cli/tests/testdata/encoding/* -text
|
||||
/tests/testdata/encoding/* -text
|
||||
|
||||
# Tell git which symlinks point to files, and which ones point to directories.
|
||||
# This is relevant for Windows only, and requires git >= 2.19.2 to work.
|
||||
/cli/tests/testdata/symlink_to_subdir symlink=dir
|
||||
/tests/testdata/symlink_to_subdir symlink=dir
|
||||
|
||||
# Tell github these are vendored files.
|
||||
# Doesn't include them in the language statistics.
|
||||
|
@ -14,4 +14,4 @@
|
|||
/cli/dts/* linguist-vendored
|
||||
|
||||
# Keep Windows line endings in cross-platform doc check test file
|
||||
/cli/tests/testdata/test/markdown_windows.md eol=crlf
|
||||
/tests/testdata/test/markdown_windows.md eol=crlf
|
||||
|
|
9
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
9
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: 🐛 Bug Report
|
||||
about: Report an issue found in the Deno CLI.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
Version: Deno x.x.x
|
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 🦕 Deploy Feedback
|
||||
url: https://github.com/denoland/deploy_feedback/
|
||||
about: Provide feature requests or bug reports for the Deno Deploy edge runtime.
|
||||
- name: 💬 Discord
|
||||
url: https://discord.gg/deno
|
||||
about: Join our community on Discord.
|
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
name: 💡 Feature Request
|
||||
about: Suggest a feature for the Deno CLI.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -1,5 +1,5 @@
|
|||
<!--
|
||||
Before submitting a PR, please read https://deno.com/manual/contributing
|
||||
Before submitting a PR, please read https://docs.deno.com/runtime/manual/references/contributing
|
||||
|
||||
1. Give the PR a descriptive title.
|
||||
|
||||
|
|
10
.github/SECURITY.md
vendored
10
.github/SECURITY.md
vendored
|
@ -9,11 +9,13 @@ we use to work with the security research community to address runtime security.
|
|||
|
||||
## Reporting a vulnerability
|
||||
|
||||
Please email findings to security@deno.com. We strive to resolve all problems as
|
||||
quickly as possible, and are more than happy to play an active role in
|
||||
publication of writeups after the problem is resolved.
|
||||
Please open a new
|
||||
[Security advisory](https://github.com/denoland/deno/security/advisories/new)
|
||||
with your findings. We strive to resolve all problems as quickly as possible,
|
||||
and are more than happy to play an active role in publication of writeups after
|
||||
the problem is resolved.
|
||||
|
||||
Try to include as much information as possible in the initial email, so we can
|
||||
Try to include as much information as possible in the initial report, so we can
|
||||
quickly address the issue.
|
||||
|
||||
**Please do not open security issues in the public issue tracker.**
|
||||
|
|
8
.github/mtime_cache/action.js
vendored
8
.github/mtime_cache/action.js
vendored
|
@ -1,9 +1,9 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// This file contains the implementation of a Github Action. Github uses
|
||||
// Node.js v12.x to run actions, so this is Node code and not Deno code.
|
||||
// Node.js v20.x to run actions, so this is Node code and not Deno code.
|
||||
|
||||
const { spawn } = require("child_process");
|
||||
const fs = require("fs");
|
||||
const { existsSync } = require("fs");
|
||||
const { utimes, mkdir, readFile, writeFile } = require("fs/promises");
|
||||
const { dirname, resolve } = require("path");
|
||||
const { StringDecoder } = require("string_decoder");
|
||||
|
@ -147,7 +147,7 @@ async function* ls(dir = "") {
|
|||
break;
|
||||
case "160000": // Git submodule.
|
||||
// sometimes we don't checkout all submodules
|
||||
if (fs.existsSync(path)) {
|
||||
if (existsSync(path)) {
|
||||
yield* ls(path);
|
||||
}
|
||||
break;
|
||||
|
|
2
.github/mtime_cache/action.yml
vendored
2
.github/mtime_cache/action.yml
vendored
|
@ -7,4 +7,4 @@ inputs:
|
|||
required: true
|
||||
runs:
|
||||
main: action.js
|
||||
using: node16
|
||||
using: node20
|
||||
|
|
49
.github/workflows/bench_cron.yml
vendored
49
.github/workflows/bench_cron.yml
vendored
|
@ -1,49 +0,0 @@
|
|||
name: bench
|
||||
|
||||
on:
|
||||
# Runs at minute 9 past hour 0, 6, 12, and 18.
|
||||
schedule:
|
||||
- cron: 9 0,6,12,18 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
bench:
|
||||
name: bench / ${{ matrix.os }} / ${{ matrix.deno-version }}
|
||||
if: github.repository == 'denoland/deno'
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-22.04-xl]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: full
|
||||
CI: true
|
||||
GOOGLE_SVC_KEY: ${{ secrets.GOOGLE_SVC_KEY }}
|
||||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
persist-credentials: false
|
||||
|
||||
- uses: dsherret/rust-toolchain-file@v1
|
||||
|
||||
- name: Install protoc
|
||||
uses: arduino/setup-protoc@v2
|
||||
with:
|
||||
version: "21.12"
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build release
|
||||
run: cargo build --release --locked --all-targets
|
||||
|
||||
- name: Worker info
|
||||
run: |
|
||||
cat /proc/cpuinfo
|
||||
cat /proc/meminfo
|
||||
|
||||
- name: Run and Post benchmarks
|
||||
run: cargo bench --locked
|
13
.github/workflows/cargo_publish.yml
vendored
13
.github/workflows/cargo_publish.yml
vendored
|
@ -2,6 +2,11 @@ name: cargo_publish
|
|||
|
||||
on: workflow_dispatch
|
||||
|
||||
# Ensures only one publish is running at a time
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: cargo publish
|
||||
|
@ -20,7 +25,7 @@ jobs:
|
|||
git config --global fetch.parallel 32
|
||||
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.DENOBOT_PAT }}
|
||||
submodules: recursive
|
||||
|
@ -28,11 +33,9 @@ jobs:
|
|||
- uses: dsherret/rust-toolchain-file@v1
|
||||
|
||||
- name: Install deno
|
||||
uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
# use a recent version instead of the latest version in case
|
||||
# the latest version ever has issues that breaks publishing
|
||||
deno-version: v1.31.3
|
||||
deno-version: v1.x
|
||||
|
||||
- name: Publish
|
||||
env:
|
||||
|
|
632
.github/workflows/ci.generate.ts
vendored
632
.github/workflows/ci.generate.ts
vendored
File diff suppressed because it is too large
Load diff
537
.github/workflows/ci.yml
vendored
537
.github/workflows/ci.yml
vendored
|
@ -1,6 +1,8 @@
|
|||
# GENERATED BY ./ci.generate.ts -- DO NOT DIRECTLY EDIT
|
||||
|
||||
name: ci
|
||||
permissions:
|
||||
contents: write
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
@ -29,114 +31,141 @@ jobs:
|
|||
git config --global fetch.parallel 32
|
||||
if: github.event.pull_request.draft == true
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 5
|
||||
submodules: false
|
||||
if: github.event.pull_request.draft == true
|
||||
- id: check
|
||||
if: 'github.event.pull_request.draft == true && (!contains(github.event.pull_request.labels.*.name, ''ci-draft''))'
|
||||
run: |-
|
||||
GIT_MESSAGE=$(git log --format=%s -n 1 ${{github.event.after}})
|
||||
echo Commit message: $GIT_MESSAGE
|
||||
echo $GIT_MESSAGE | grep '\[ci\]' || (echo 'Exiting due to draft PR. Commit with [ci] to bypass.' ; echo 'skip_build=true' >> $GITHUB_OUTPUT)
|
||||
if: github.event.pull_request.draft == true
|
||||
echo $GIT_MESSAGE | grep '\[ci\]' || (echo 'Exiting due to draft PR. Commit with [ci] to bypass or add the ci-draft label.' ; echo 'skip_build=true' >> $GITHUB_OUTPUT)
|
||||
build:
|
||||
name: '${{ matrix.job }} ${{ matrix.profile }} ${{ matrix.os_display_name }}'
|
||||
name: '${{ matrix.job }} ${{ matrix.profile }} ${{ matrix.os }}-${{ matrix.arch }}'
|
||||
needs:
|
||||
- pre_build
|
||||
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
|
||||
runs-on: '${{ matrix.runner || matrix.os }}'
|
||||
timeout-minutes: 120
|
||||
runs-on: '${{ matrix.runner }}'
|
||||
timeout-minutes: 180
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-12
|
||||
- os: macos
|
||||
arch: x86_64
|
||||
runner: macos-13
|
||||
job: test
|
||||
profile: debug
|
||||
os_display_name: macos-x86_64
|
||||
- os: macos-12
|
||||
- os: macos
|
||||
arch: x86_64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-13'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
skip_pr: true
|
||||
runner: '${{ github.event_name == ''pull_request'' && ''ubuntu-22.04'' || ''macos-12'' }}'
|
||||
os_display_name: macos-x86_64
|
||||
- os: windows-2022
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: macos-14
|
||||
job: test
|
||||
profile: debug
|
||||
os_display_name: windows-x86_64
|
||||
- os: '${{ github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}'
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-14'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
skip_pr: true
|
||||
runner: '${{ github.event_name == ''pull_request'' && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}'
|
||||
os_display_name: windows-x86_64
|
||||
- os: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}'
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
- os: windows
|
||||
arch: x86_64
|
||||
runner: windows-2022
|
||||
job: test
|
||||
profile: debug
|
||||
- os: windows
|
||||
arch: x86_64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
- os: linux
|
||||
arch: x86_64
|
||||
runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
use_sysroot: true
|
||||
wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}'
|
||||
os_display_name: ubuntu-x86_64
|
||||
- os: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}'
|
||||
- os: linux
|
||||
arch: x86_64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}'
|
||||
job: bench
|
||||
profile: release
|
||||
use_sysroot: true
|
||||
skip_pr: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-bench'') }}'
|
||||
runner: '${{ github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'') && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}'
|
||||
os_display_name: ubuntu-x86_64
|
||||
- os: ubuntu-22.04
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}'
|
||||
- os: linux
|
||||
arch: x86_64
|
||||
runner: ubuntu-22.04
|
||||
job: test
|
||||
profile: debug
|
||||
use_sysroot: true
|
||||
os_display_name: ubuntu-x86_64
|
||||
- os: ubuntu-22.04
|
||||
- os: linux
|
||||
arch: x86_64
|
||||
runner: ubuntu-22.04
|
||||
job: lint
|
||||
profile: debug
|
||||
os_display_name: ubuntu-x86_64
|
||||
- os: macos-12
|
||||
- os: linux
|
||||
arch: aarch64
|
||||
runner: ubicloud-standard-16-arm
|
||||
job: test
|
||||
profile: debug
|
||||
- os: linux
|
||||
arch: aarch64
|
||||
runner: ubicloud-standard-16-arm
|
||||
job: test
|
||||
profile: release
|
||||
use_sysroot: true
|
||||
- os: macos
|
||||
arch: x86_64
|
||||
runner: macos-13
|
||||
job: lint
|
||||
profile: debug
|
||||
os_display_name: macos-x86_64
|
||||
- os: windows-2022
|
||||
- os: windows
|
||||
arch: x86_64
|
||||
runner: windows-2022
|
||||
job: lint
|
||||
profile: debug
|
||||
os_display_name: windows-x86_64
|
||||
fail-fast: '${{ github.event_name == ''pull_request'' || (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/'')) }}'
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: full
|
||||
RUST_LIB_BACKTRACE: 0
|
||||
steps:
|
||||
- name: Reconfigure Windows Storage
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (startsWith(matrix.os, ''windows'') && !endsWith(matrix.os, ''-xl''))'
|
||||
shell: pwsh
|
||||
run: |-
|
||||
New-Item -ItemType "directory" -Path "$env:TEMP/__target__"
|
||||
New-Item -ItemType Junction -Target "$env:TEMP/__target__" -Path "D:/a/deno/deno"
|
||||
- name: Configure git
|
||||
run: |-
|
||||
git config --global core.symlinks true
|
||||
git config --global fetch.parallel 32
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 5
|
||||
submodules: false
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
- name: Clone submodule ./test_util/std
|
||||
run: git submodule update --init --recursive --depth=1 -- ./test_util/std
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
- name: Clone submodule ./test_util/wpt
|
||||
run: git submodule update --init --recursive --depth=1 -- ./test_util/wpt
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt)'
|
||||
- name: Clone submodule ./tools/node_compat/node
|
||||
run: git submodule update --init --recursive --depth=1 -- ./tools/node_compat/node
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && startsWith(matrix.os, ''ubuntu''))'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Clone submodule ./tests/util/std
|
||||
run: git submodule update --init --recursive --depth=1 -- ./tests/util/std
|
||||
if: '!(matrix.skip)'
|
||||
- name: Clone submodule ./tests/wpt/suite
|
||||
run: git submodule update --init --recursive --depth=1 -- ./tests/wpt/suite
|
||||
if: '!(matrix.skip) && (matrix.wpt)'
|
||||
- name: Clone submodule ./tests/node_compat/runner/suite
|
||||
run: git submodule update --init --recursive --depth=1 -- ./tests/node_compat/runner/suite
|
||||
if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')'
|
||||
- name: Clone submodule ./cli/bench/testdata/lsp_benchdata
|
||||
run: git submodule update --init --recursive --depth=1 -- ./cli/bench/testdata/lsp_benchdata
|
||||
if: '!(matrix.skip) && (matrix.job == ''bench'')'
|
||||
- name: 'Create source tarballs (release, linux)'
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (startsWith(matrix.os, 'ubuntu') &&
|
||||
!(matrix.skip) && (matrix.os == 'linux' &&
|
||||
matrix.profile == 'release' &&
|
||||
matrix.job == 'test' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
|
@ -146,44 +175,38 @@ jobs:
|
|||
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
|
||||
-czvf target/release/deno_src.tar.gz -C .. deno
|
||||
- uses: dsherret/rust-toolchain-file@v1
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
- if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
|
||||
if: '!(matrix.skip)'
|
||||
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
|
||||
name: Install Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job != ''lint'')'
|
||||
if: '!(matrix.skip) && (matrix.job != ''lint'' && (matrix.os != ''linux'' || matrix.arch != ''aarch64''))'
|
||||
- name: Remove unused versions of Python
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job != ''lint'' && (startsWith(matrix.os, ''windows'')))'
|
||||
if: '!(matrix.skip) && (matrix.job != ''lint'' && (matrix.os != ''linux'' || matrix.arch != ''aarch64'') && (matrix.os == ''windows''))'
|
||||
shell: pwsh
|
||||
run: |-
|
||||
$env:PATH -split ";" |
|
||||
Where-Object { Test-Path "$_\python.exe" } |
|
||||
Select-Object -Skip 1 |
|
||||
ForEach-Object { Move-Item "$_" "$_.disabled" }
|
||||
- if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''bench'')'
|
||||
- if: '!(matrix.skip) && (matrix.job == ''bench'' || matrix.job == ''test'')'
|
||||
name: Install Node
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Install protoc
|
||||
uses: arduino/setup-protoc@v2
|
||||
with:
|
||||
version: '21.12'
|
||||
repo-token: '${{ secrets.GITHUB_TOKEN }}'
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
- if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.profile == 'release' &&
|
||||
!(matrix.skip) && (matrix.profile == 'release' &&
|
||||
matrix.job == 'test' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' ||
|
||||
startsWith(github.ref, 'refs/tags/')))
|
||||
name: Authenticate with Google Cloud
|
||||
uses: google-github-actions/auth@v1
|
||||
uses: google-github-actions/auth@v2
|
||||
with:
|
||||
project_id: denoland
|
||||
credentials_json: '${{ secrets.GCP_SA_KEY }}'
|
||||
|
@ -191,165 +214,192 @@ jobs:
|
|||
create_credentials_file: true
|
||||
- name: Setup gcloud (unix)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (runner.os != 'Windows' &&
|
||||
!(matrix.skip) && (matrix.os != 'windows' &&
|
||||
matrix.profile == 'release' &&
|
||||
matrix.job == 'test' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' ||
|
||||
startsWith(github.ref, 'refs/tags/')))
|
||||
uses: google-github-actions/setup-gcloud@v1
|
||||
uses: google-github-actions/setup-gcloud@v2
|
||||
with:
|
||||
project_id: denoland
|
||||
- name: Setup gcloud (windows)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (runner.os == 'Windows' &&
|
||||
!(matrix.skip) && (matrix.os == 'windows' &&
|
||||
matrix.profile == 'release' &&
|
||||
matrix.job == 'test' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' ||
|
||||
startsWith(github.ref, 'refs/tags/')))
|
||||
uses: google-github-actions/setup-gcloud@v1
|
||||
uses: google-github-actions/setup-gcloud@v2
|
||||
env:
|
||||
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
|
||||
with:
|
||||
project_id: denoland
|
||||
- name: Configure canary build
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' &&
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main')
|
||||
run: echo "DENO_CANARY=true" >> $GITHUB_ENV
|
||||
- if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.use_sysroot)'
|
||||
- if: '!(matrix.skip) && (matrix.use_sysroot)'
|
||||
name: Set up incremental LTO and sysroot build
|
||||
run: |-
|
||||
# Setting up sysroot
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
# Avoid running man-db triggers, which sometimes takes several minutes
|
||||
# to complete.
|
||||
sudo apt-get remove --purge -y man-db
|
||||
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
|
||||
# Remove older clang before we install
|
||||
sudo apt-get remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*'
|
||||
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null
|
||||
|
||||
# Install clang-XXX, lld-XXX, and debootstrap.
|
||||
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-16 main" |
|
||||
sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-16.list
|
||||
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-18 main" |
|
||||
sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-18.list
|
||||
curl https://apt.llvm.org/llvm-snapshot.gpg.key |
|
||||
gpg --dearmor |
|
||||
sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg
|
||||
sudo apt-get update
|
||||
# this was unreliable sometimes, so try again if it fails
|
||||
sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16 clang-tools-16 clang-format-16 clang-tidy-16 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16 clang-tools-16 clang-format-16 clang-tidy-16
|
||||
sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18
|
||||
# Fix alternatives
|
||||
(yes '' | sudo update-alternatives --force --all) || true
|
||||
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
|
||||
|
||||
# Create ubuntu-16.04 sysroot environment, which is used to avoid
|
||||
# depending on a very recent version of glibc.
|
||||
# `libc6-dev` is required for building any C source files.
|
||||
# `file` and `make` are needed to build libffi-sys.
|
||||
# `curl` is needed to build rusty_v8.
|
||||
sudo debootstrap \
|
||||
--include=ca-certificates,curl,file,libc6-dev,make \
|
||||
--no-merged-usr --variant=minbase xenial /sysroot \
|
||||
http://azure.archive.ubuntu.com/ubuntu
|
||||
echo "Decompressing sysroot..."
|
||||
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
|
||||
cd /
|
||||
xzcat /tmp/sysroot.tar.xz | sudo tar -x
|
||||
sudo mount --rbind /dev /sysroot/dev
|
||||
sudo mount --rbind /sys /sysroot/sys
|
||||
sudo mount --rbind /home /sysroot/home
|
||||
sudo mount -t proc /proc /sysroot/proc
|
||||
cd
|
||||
|
||||
wget https://github.com/denoland/deno_third_party/raw/master/prebuilt/linux64/libdl/libdl.a
|
||||
wget https://github.com/denoland/deno_third_party/raw/master/prebuilt/linux64/libdl/libdl.so.2
|
||||
|
||||
sudo ln -s libdl.so.2 /sysroot/lib/x86_64-linux-gnu/libdl.so
|
||||
sudo ln -s libdl.a /sysroot/lib/x86_64-linux-gnu/libdl.a
|
||||
echo "Done."
|
||||
|
||||
# Configure the build environment. Both Rust and Clang will produce
|
||||
# llvm bitcode only, so we can use lld's incremental LTO support.
|
||||
cat >> $GITHUB_ENV << __0
|
||||
|
||||
# Load the sysroot's env vars
|
||||
echo "sysroot env:"
|
||||
cat /sysroot/.env
|
||||
. /sysroot/.env
|
||||
|
||||
# Important notes:
|
||||
# 1. -ldl seems to be required to avoid a failure in FFI tests. This flag seems
|
||||
# to be in the Rust default flags in the smoketest, so uncertain why we need
|
||||
# to be explicit here.
|
||||
# 2. RUSTFLAGS and RUSTDOCFLAGS must be specified, otherwise the doctests fail
|
||||
# to build because the object formats are not compatible.
|
||||
echo "
|
||||
CARGO_PROFILE_BENCH_INCREMENTAL=false
|
||||
CARGO_PROFILE_BENCH_LTO=false
|
||||
CARGO_PROFILE_RELEASE_INCREMENTAL=false
|
||||
CARGO_PROFILE_RELEASE_LTO=false
|
||||
RUSTFLAGS<<__1
|
||||
-C linker-plugin-lto=true
|
||||
-C linker=clang-16
|
||||
-C link-arg=-fuse-ld=lld-16
|
||||
-C link-arg=--sysroot=/sysroot
|
||||
-C linker=clang-18
|
||||
-C link-arg=-fuse-ld=lld-18
|
||||
-C link-arg=-ldl
|
||||
-C link-arg=-Wl,--allow-shlib-undefined
|
||||
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
|
||||
-C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m
|
||||
--cfg tokio_unstable
|
||||
${{ env.RUSTFLAGS }}
|
||||
$RUSTFLAGS
|
||||
__1
|
||||
RUSTDOCFLAGS<<__1
|
||||
-C linker-plugin-lto=true
|
||||
-C linker=clang-16
|
||||
-C link-arg=-fuse-ld=lld-16
|
||||
-C link-arg=--sysroot=/sysroot
|
||||
-C linker=clang-18
|
||||
-C link-arg=-fuse-ld=lld-18
|
||||
-C link-arg=-ldl
|
||||
-C link-arg=-Wl,--allow-shlib-undefined
|
||||
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
|
||||
-C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m
|
||||
${{ env.RUSTFLAGS }}
|
||||
--cfg tokio_unstable
|
||||
$RUSTFLAGS
|
||||
__1
|
||||
CC=clang-16
|
||||
CFLAGS=-flto=thin --sysroot=/sysroot
|
||||
__0
|
||||
CC=/usr/bin/clang-18
|
||||
CFLAGS=-flto=thin $CFLAGS
|
||||
" > $GITHUB_ENV
|
||||
- name: Remove macOS cURL --ipv4 flag
|
||||
run: |-
|
||||
curl --version
|
||||
which curl
|
||||
cat /etc/hosts
|
||||
rm ~/.curlrc || true
|
||||
if: '!(matrix.skip) && (matrix.os == ''macos'')'
|
||||
- name: Install macOS aarch64 lld
|
||||
run: ./tools/install_prebuilt.js ld64.lld
|
||||
if: '!(matrix.skip) && (matrix.os == ''macos'' && matrix.arch == ''aarch64'')'
|
||||
- name: Install rust-codesign
|
||||
run: |-
|
||||
./tools/install_prebuilt.js rcodesign
|
||||
echo $GITHUB_WORKSPACE/third_party/prebuilt/mac >> $GITHUB_PATH
|
||||
if: '!(matrix.skip) && (matrix.os == ''macos'')'
|
||||
- name: Log versions
|
||||
run: |-
|
||||
python --version
|
||||
rustc --version
|
||||
cargo --version
|
||||
which dpkg && dpkg -l
|
||||
if [[ "${{ matrix.job }}" == "lint" ]] || [[ "${{ matrix.job }}" == "test" ]]; then
|
||||
deno --version
|
||||
fi
|
||||
if [ "${{ matrix.job }}" == "bench" ]
|
||||
then
|
||||
node -v
|
||||
./tools/install_prebuilt.js wrk hyperfine
|
||||
fi
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
echo '*** Python'
|
||||
command -v python && python --version || echo 'No python found or bad executable'
|
||||
echo '*** Rust'
|
||||
command -v rustc && rustc --version || echo 'No rustc found or bad executable'
|
||||
echo '*** Cargo'
|
||||
command -v cargo && cargo --version || echo 'No cargo found or bad executable'
|
||||
echo '*** Deno'
|
||||
command -v deno && deno --version || echo 'No deno found or bad executable'
|
||||
echo '*** Node'
|
||||
command -v node && node --version || echo 'No node found or bad executable'
|
||||
echo '*** Installed packages'
|
||||
command -v dpkg && dpkg -l || echo 'No dpkg found or bad executable'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Install benchmark tools
|
||||
if: '!(matrix.skip) && (matrix.job == ''bench'')'
|
||||
run: ./tools/install_prebuilt.js wrk hyperfine
|
||||
- name: Cache Cargo home
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '51-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '51-cargo-home-${{ matrix.os }}'
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
key: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v3
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: actions/cache/restore@v4
|
||||
if: '!(matrix.skip) && (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||
with:
|
||||
path: |-
|
||||
./target
|
||||
!./target/*/gn_out
|
||||
!./target/*/gn_root
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '51-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
with:
|
||||
cache-path: ./target
|
||||
- name: test_format.js
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && startsWith(matrix.os, ''ubuntu''))'
|
||||
run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check
|
||||
if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')'
|
||||
run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check
|
||||
- name: Lint PR title
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && startsWith(matrix.os, ''ubuntu''))'
|
||||
if: '!(matrix.skip) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && matrix.os == ''linux'')'
|
||||
env:
|
||||
PR_TITLE: '${{ github.event.pull_request.title }}'
|
||||
run: deno run ./tools/verify_pr_title.js "$PR_TITLE"
|
||||
- name: lint.js
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')'
|
||||
run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js
|
||||
if: '!(matrix.skip) && (matrix.job == ''lint'')'
|
||||
run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/lint.js
|
||||
- name: jsdoc_checker.js
|
||||
if: '!(matrix.skip) && (matrix.job == ''lint'')'
|
||||
run: deno run --allow-read --allow-env --allow-sys ./tools/jsdoc_checker.js
|
||||
- name: node_compat/setup.ts --check
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && startsWith(matrix.os, ''ubuntu''))'
|
||||
run: deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check
|
||||
if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')'
|
||||
run: deno run --allow-write --allow-read --allow-run=git ./tests/node_compat/runner/setup.ts --check
|
||||
- name: Build debug
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''test'' && matrix.profile == ''debug'')'
|
||||
if: '!(matrix.skip) && (matrix.job == ''test'' && matrix.profile == ''debug'')'
|
||||
run: |-
|
||||
df -h
|
||||
cargo build --locked --all-targets
|
||||
|
@ -358,154 +408,151 @@ jobs:
|
|||
CARGO_PROFILE_DEV_DEBUG: 0
|
||||
- name: Build release
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && ((matrix.job == 'test' || matrix.job == 'bench') &&
|
||||
!(matrix.skip) && ((matrix.job == 'test' || matrix.job == 'bench') &&
|
||||
matrix.profile == 'release' && (matrix.use_sysroot ||
|
||||
(github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' ||
|
||||
startsWith(github.ref, 'refs/tags/')))))
|
||||
github.repository == 'denoland/deno'))
|
||||
run: |-
|
||||
df -h
|
||||
cargo build --release --locked --all-targets
|
||||
df -h
|
||||
- name: Check deno binary
|
||||
if: '!(matrix.skip) && (matrix.job == ''test'')'
|
||||
run: 'target/${{ matrix.profile }}/deno eval "console.log(1+2)" | grep 3'
|
||||
env:
|
||||
NO_COLOR: 1
|
||||
- name: Check deno binary (in sysroot)
|
||||
if: '!(matrix.skip) && (matrix.job == ''test'' && matrix.use_sysroot)'
|
||||
run: 'sudo chroot /sysroot "$(pwd)/target/${{ matrix.profile }}/deno" --version'
|
||||
- name: Upload PR artifact (linux)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' &&
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' && (matrix.use_sysroot ||
|
||||
(github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' ||
|
||||
startsWith(github.ref, 'refs/tags/')))))
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: 'deno-${{ github.event.number }}'
|
||||
name: 'deno-${{ matrix.os }}-${{ matrix.arch }}-${{ github.event.number }}'
|
||||
path: target/release/deno
|
||||
- name: Pre-release (linux)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (startsWith(matrix.os, 'ubuntu') &&
|
||||
!(matrix.skip) && (matrix.os == 'linux' &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno')
|
||||
run: |-
|
||||
cd target/release
|
||||
zip -r deno-x86_64-unknown-linux-gnu.zip deno
|
||||
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
|
||||
shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
|
||||
strip denort
|
||||
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
|
||||
shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
|
||||
./deno types > lib.deno.d.ts
|
||||
- name: Pre-release (mac)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (startsWith(matrix.os, 'macOS') &&
|
||||
!(matrix.skip) && (matrix.os == 'macos' &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')))
|
||||
github.repository == 'denoland/deno')
|
||||
env:
|
||||
APPLE_CODESIGN_KEY: '${{ secrets.APPLE_CODESIGN_KEY }}'
|
||||
APPLE_CODESIGN_PASSWORD: '${{ secrets.APPLE_CODESIGN_PASSWORD }}'
|
||||
run: |-
|
||||
echo "Key is $(echo $APPLE_CODESIGN_KEY | base64 -d | wc -c) bytes"
|
||||
rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist
|
||||
cd target/release
|
||||
zip -r deno-x86_64-apple-darwin.zip deno
|
||||
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
|
||||
shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum
|
||||
strip denort
|
||||
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
|
||||
shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum
|
||||
- name: Pre-release (windows)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (startsWith(matrix.os, 'windows') &&
|
||||
!(matrix.skip) && (matrix.os == 'windows' &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')))
|
||||
github.repository == 'denoland/deno')
|
||||
shell: pwsh
|
||||
run: Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-x86_64-pc-windows-msvc.zip
|
||||
- name: Upload canary to dl.deno.land (unix)
|
||||
run: |-
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
|
||||
- name: Upload canary to dl.deno.land
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (runner.os != 'Windows' &&
|
||||
matrix.job == 'test' &&
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main')
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/'
|
||||
- name: Upload canary to dl.deno.land (windows)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (runner.os == 'Windows' &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main')
|
||||
env:
|
||||
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/'
|
||||
run: |-
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/
|
||||
echo ${{ github.sha }} > canary-latest.txt
|
||||
gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt
|
||||
- name: Autobahn testsuite
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'release' &&
|
||||
!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu'))
|
||||
run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js
|
||||
- name: Test debug
|
||||
!(matrix.skip) && ((matrix.os == 'linux' && matrix.arch != 'aarch64') &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
!startsWith(github.ref, 'refs/tags/'))
|
||||
run: target/release/deno run -A --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js
|
||||
- name: 'Test (full, debug)'
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'debug' &&
|
||||
!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu'))
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'debug' &&
|
||||
!startsWith(github.ref, 'refs/tags/') &&
|
||||
matrix.os == 'linux')
|
||||
run: cargo test --locked
|
||||
env:
|
||||
CARGO_PROFILE_DEV_DEBUG: 0
|
||||
- name: Test debug (fast)
|
||||
- name: 'Test (fast, debug)'
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'debug' &&
|
||||
!startsWith(matrix.os, 'ubuntu'))
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'debug' &&
|
||||
(startsWith(github.ref, 'refs/tags/') || matrix.os != 'linux'))
|
||||
run: |-
|
||||
cargo test --locked --lib
|
||||
cargo test --locked --test '*'
|
||||
cargo test --locked --tests
|
||||
env:
|
||||
CARGO_PROFILE_DEV_DEBUG: 0
|
||||
- name: Test examples debug
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''test'' && matrix.profile == ''debug'')'
|
||||
run: |-
|
||||
cargo run -p deno_runtime --example extension_with_esm
|
||||
cargo run -p deno_runtime --example extension_with_esm --features include_js_files_for_snapshotting
|
||||
env:
|
||||
CARGO_PROFILE_DEV_DEBUG: 0
|
||||
- name: Test release
|
||||
- name: Test (release)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'release' &&
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
(matrix.use_sysroot || (
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/'))))
|
||||
!startsWith(github.ref, 'refs/tags/'))))
|
||||
run: cargo test --release --locked
|
||||
- name: Check deno binary
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.profile == ''release'' && startsWith(github.ref, ''refs/tags/''))'
|
||||
run: target/release/deno eval "console.log(1+2)" | grep 3
|
||||
env:
|
||||
NO_COLOR: 1
|
||||
- name: Check deno binary (in sysroot)
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.profile == ''release'' && matrix.use_sysroot)'
|
||||
run: sudo chroot /sysroot "$(pwd)/target/release/deno" --version
|
||||
- name: Configure hosts file for WPT
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt)'
|
||||
if: '!(matrix.skip) && (matrix.wpt)'
|
||||
run: ./wpt make-hosts-file | sudo tee -a /etc/hosts
|
||||
working-directory: test_util/wpt/
|
||||
working-directory: tests/wpt/suite/
|
||||
- name: Run web platform tests (debug)
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt && matrix.profile == ''debug'')'
|
||||
if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''debug'')'
|
||||
env:
|
||||
DENO_BIN: ./target/debug/deno
|
||||
run: |-
|
||||
deno run --allow-env --allow-net --allow-read --allow-run \
|
||||
--allow-write --unstable \
|
||||
--lock=tools/deno.lock.json \
|
||||
./tools/wpt.ts setup
|
||||
deno run --allow-env --allow-net --allow-read --allow-run \
|
||||
--allow-write --unstable \
|
||||
--lock=tools/deno.lock.json \
|
||||
./tools/wpt.ts run --quiet --binary="$DENO_BIN"
|
||||
deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts setup
|
||||
deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"
|
||||
- name: Run web platform tests (release)
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt && matrix.profile == ''release'')'
|
||||
if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')'
|
||||
env:
|
||||
DENO_BIN: ./target/release/deno
|
||||
run: |-
|
||||
deno run --allow-env --allow-net --allow-read --allow-run \
|
||||
--allow-write --unstable \
|
||||
--lock=tools/deno.lock.json \
|
||||
./tools/wpt.ts setup
|
||||
deno run --allow-env --allow-net --allow-read --allow-run \
|
||||
--allow-write --unstable \
|
||||
--lock=tools/deno.lock.json \
|
||||
./tools/wpt.ts run --quiet --release \
|
||||
deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts setup
|
||||
deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts run --quiet --release \
|
||||
--binary="$DENO_BIN" \
|
||||
--json=wpt.json \
|
||||
--wptreport=wptreport.json
|
||||
- name: Upload wpt results to dl.deno.land
|
||||
continue-on-error: true
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.wpt &&
|
||||
runner.os == 'Linux' &&
|
||||
!(matrix.skip) && (matrix.wpt &&
|
||||
matrix.os == 'linux' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/'))
|
||||
|
@ -518,8 +565,8 @@ jobs:
|
|||
- name: Upload wpt results to wpt.fyi
|
||||
continue-on-error: true
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.wpt &&
|
||||
runner.os == 'Linux' &&
|
||||
!(matrix.skip) && (matrix.wpt &&
|
||||
matrix.os == 'linux' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/'))
|
||||
|
@ -531,11 +578,11 @@ jobs:
|
|||
./target/release/deno run --allow-all --lock=tools/deno.lock.json \
|
||||
./tools/upload_wptfyi.js $(git rev-parse HEAD) --ghstatus
|
||||
- name: Run benchmarks
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''bench'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||
if: '!(matrix.skip) && (matrix.job == ''bench'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||
run: cargo bench --locked
|
||||
- name: Post Benchmarks
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'bench' &&
|
||||
!(matrix.skip) && (matrix.job == 'bench' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
github.ref == 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/'))
|
||||
env:
|
||||
|
@ -544,8 +591,7 @@ jobs:
|
|||
git clone --depth 1 --branch gh-pages \
|
||||
https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \
|
||||
gh-pages
|
||||
./target/release/deno run --allow-all --unstable \
|
||||
./tools/build_benchmark_jsons.js --release
|
||||
./target/release/deno run --allow-all ./tools/build_benchmark_jsons.js --release
|
||||
cd gh-pages
|
||||
git config user.email "propelml@gmail.com"
|
||||
git config user.name "denobot"
|
||||
|
@ -553,36 +599,41 @@ jobs:
|
|||
git commit --message "Update benchmarks"
|
||||
git push origin gh-pages
|
||||
- name: Build product size info
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job != ''lint'' && matrix.profile != ''debug'' && github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')))'
|
||||
if: '!(matrix.skip) && (matrix.job != ''lint'' && matrix.profile != ''debug'' && github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')))'
|
||||
run: |-
|
||||
du -hd1 "./target/${{ matrix.profile }}"
|
||||
du -ha "./target/${{ matrix.profile }}/deno"
|
||||
du -ha "./target/${{ matrix.profile }}/denort"
|
||||
- name: Worker info
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''bench'')'
|
||||
if: '!(matrix.skip) && (matrix.job == ''bench'')'
|
||||
run: |-
|
||||
cat /proc/cpuinfo
|
||||
cat /proc/meminfo
|
||||
- name: Upload release to dl.deno.land (unix)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (runner.os != 'Windows' &&
|
||||
!(matrix.skip) && (matrix.os != 'windows' &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
startsWith(github.ref, 'refs/tags/'))
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/'
|
||||
run: |-
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
- name: Upload release to dl.deno.land (windows)
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (runner.os == 'Windows' &&
|
||||
!(matrix.skip) && (matrix.os == 'windows' &&
|
||||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
startsWith(github.ref, 'refs/tags/'))
|
||||
env:
|
||||
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/'
|
||||
run: |-
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
- name: Create release notes
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' &&
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
startsWith(github.ref, 'refs/tags/'))
|
||||
|
@ -592,7 +643,7 @@ jobs:
|
|||
- name: Upload release to GitHub
|
||||
uses: softprops/action-gh-release@v0.1.15
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' &&
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
startsWith(github.ref, 'refs/tags/'))
|
||||
|
@ -601,22 +652,40 @@ jobs:
|
|||
with:
|
||||
files: |-
|
||||
target/release/deno-x86_64-pc-windows-msvc.zip
|
||||
target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum
|
||||
target/release/denort-x86_64-pc-windows-msvc.zip
|
||||
target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum
|
||||
target/release/deno-x86_64-unknown-linux-gnu.zip
|
||||
target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum
|
||||
target/release/denort-x86_64-unknown-linux-gnu.zip
|
||||
target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum
|
||||
target/release/deno-x86_64-apple-darwin.zip
|
||||
target/release/deno-x86_64-apple-darwin.zip.sha256sum
|
||||
target/release/denort-x86_64-apple-darwin.zip
|
||||
target/release/denort-x86_64-apple-darwin.zip.sha256sum
|
||||
target/release/deno-aarch64-unknown-linux-gnu.zip
|
||||
target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum
|
||||
target/release/denort-aarch64-unknown-linux-gnu.zip
|
||||
target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum
|
||||
target/release/deno-aarch64-apple-darwin.zip
|
||||
target/release/deno-aarch64-apple-darwin.zip.sha256sum
|
||||
target/release/denort-aarch64-apple-darwin.zip
|
||||
target/release/denort-aarch64-apple-darwin.zip.sha256sum
|
||||
target/release/deno_src.tar.gz
|
||||
target/release/lib.deno.d.ts
|
||||
body_path: target/release/release-notes.md
|
||||
draft: true
|
||||
- name: Save cache build output (main)
|
||||
uses: actions/cache/save@v3
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && ((matrix.job == ''test'' || matrix.job == ''lint'') && github.ref == ''refs/heads/main'')'
|
||||
uses: actions/cache/save@v4
|
||||
if: '!(matrix.skip) && ((matrix.job == ''test'' || matrix.job == ''lint'') && github.ref == ''refs/heads/main'')'
|
||||
with:
|
||||
path: |-
|
||||
./target
|
||||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '51-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
@ -625,14 +694,14 @@ jobs:
|
|||
if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- name: Authenticate with Google Cloud
|
||||
uses: google-github-actions/auth@v1
|
||||
uses: google-github-actions/auth@v2
|
||||
with:
|
||||
project_id: denoland
|
||||
credentials_json: '${{ secrets.GCP_SA_KEY }}'
|
||||
export_environment_variables: true
|
||||
create_credentials_file: true
|
||||
- name: Setup gcloud
|
||||
uses: google-github-actions/setup-gcloud@v1
|
||||
uses: google-github-actions/setup-gcloud@v2
|
||||
with:
|
||||
project_id: denoland
|
||||
- name: Upload canary version file to dl.deno.land
|
||||
|
|
62
.github/workflows/promote_to_release.yml
vendored
Normal file
62
.github/workflows/promote_to_release.yml
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
name: promote_to_release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseKind:
|
||||
description: 'Kind of release'
|
||||
type: choice
|
||||
options:
|
||||
- rc
|
||||
- lts
|
||||
required: true
|
||||
commitHash:
|
||||
description: Commit to promote to release
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
promote-to-release:
|
||||
name: Promote to Release
|
||||
runs-on: macOS-latest
|
||||
if: github.repository == 'denoland/deno'
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.DENOBOT_PAT }}
|
||||
submodules: recursive
|
||||
|
||||
- name: Authenticate with Google Cloud
|
||||
uses: google-github-actions/auth@v1
|
||||
with:
|
||||
project_id: denoland
|
||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
||||
export_environment_variables: true
|
||||
create_credentials_file: true
|
||||
|
||||
- name: Setup gcloud
|
||||
uses: google-github-actions/setup-gcloud@v1
|
||||
with:
|
||||
project_id: denoland
|
||||
|
||||
- name: Install deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v1.x
|
||||
|
||||
- name: Install rust-codesign
|
||||
run: |-
|
||||
./tools/install_prebuilt.js rcodesign
|
||||
echo $GITHUB_WORKSPACE/third_party/prebuilt/mac >> $GITHUB_PATH
|
||||
|
||||
- name: Promote to Release
|
||||
env:
|
||||
APPLE_CODESIGN_KEY: '${{ secrets.APPLE_CODESIGN_KEY }}'
|
||||
APPLE_CODESIGN_PASSWORD: '${{ secrets.APPLE_CODESIGN_PASSWORD }}'
|
||||
run: |
|
||||
deno run -A ./tools/release/promote_to_release.ts ${{github.event.inputs.releaseKind}} ${{github.event.inputs.commitHash}}
|
||||
|
||||
- name: Upload archives to dl.deno.land
|
||||
run: |
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./*.zip gs://dl.deno.land/release/$(cat release-${{github.event.inputs.releaseKind}}-latest.txt)/
|
||||
gsutil -h "Cache-Control: no-cache" cp release-${{github.event.inputs.releaseKind}}-latest.txt gs://dl.deno.land/release-${{github.event.inputs.releaseKind}}-latest.txt
|
8
.github/workflows/start_release.yml
vendored
8
.github/workflows/start_release.yml
vendored
|
@ -31,14 +31,12 @@ jobs:
|
|||
git config --global fetch.parallel 32
|
||||
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install deno
|
||||
uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
# use a recent version instead of the latest version in case
|
||||
# the latest version ever has issues that breaks publishing
|
||||
deno-version: v1.31.3
|
||||
deno-version: v1.x
|
||||
|
||||
- name: Create Gist URL
|
||||
env:
|
||||
|
|
8
.github/workflows/version_bump.yml
vendored
8
.github/workflows/version_bump.yml
vendored
|
@ -31,7 +31,7 @@ jobs:
|
|||
git config --global fetch.parallel 32
|
||||
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.DENOBOT_PAT }}
|
||||
submodules: recursive
|
||||
|
@ -39,11 +39,9 @@ jobs:
|
|||
- uses: dsherret/rust-toolchain-file@v1
|
||||
|
||||
- name: Install deno
|
||||
uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
# use a recent version instead of the latest version in case
|
||||
# the latest version ever has issues that breaks publishing
|
||||
deno-version: v1.31.3
|
||||
deno-version: v1.x
|
||||
|
||||
- name: Run version bump
|
||||
run: |
|
||||
|
|
26
.github/workflows/wpt_epoch.yml
vendored
26
.github/workflows/wpt_epoch.yml
vendored
|
@ -24,20 +24,20 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: ${{ matrix.deno-version }}
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Log versions
|
||||
run: |
|
||||
|
@ -45,7 +45,7 @@ jobs:
|
|||
deno --version
|
||||
|
||||
- name: Switch WPT submodule to epochs/daily
|
||||
working-directory: test_util/wpt/
|
||||
working-directory: tests/wpt/suite/
|
||||
shell: bash
|
||||
run: |
|
||||
git remote set-branches origin '*'
|
||||
|
@ -55,24 +55,22 @@ jobs:
|
|||
|
||||
- name: Configure hosts file for WPT (unix)
|
||||
if: runner.os != 'Windows'
|
||||
working-directory: test_util/wpt/
|
||||
working-directory: tests/wpt/suite/
|
||||
run: ./wpt make-hosts-file | sudo tee -a /etc/hosts
|
||||
|
||||
- name: Configure hosts file for WPT (windows)
|
||||
if: runner.os == 'Windows'
|
||||
working-directory: test_util/wpt/
|
||||
working-directory: tests/wpt/suite/
|
||||
run: python wpt make-hosts-file | Out-File $env:SystemRoot\System32\drivers\etc\hosts -Encoding ascii -Append
|
||||
|
||||
- name: Run web platform tests
|
||||
shell: bash
|
||||
run: |
|
||||
deno run --unstable --allow-write --allow-read --allow-net \
|
||||
--allow-env --allow-run --lock=tools/deno.lock.json \
|
||||
./tools/wpt.ts setup
|
||||
deno run --unstable --allow-write --allow-read --allow-net \
|
||||
--allow-env --allow-run --lock=tools/deno.lock.json \
|
||||
./tools/wpt.ts run \
|
||||
--binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json || true
|
||||
deno run -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
|
||||
./tests/wpt/wpt.ts setup
|
||||
deno run -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
|
||||
./tests/wpt/wpt.ts run \ \
|
||||
--binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero
|
||||
|
||||
- name: Upload wpt results to wpt.fyi
|
||||
env:
|
||||
|
|
16
.gitignore
vendored
16
.gitignore
vendored
|
@ -10,11 +10,11 @@
|
|||
gclient_config.py_entries
|
||||
/target/
|
||||
/std/hash/_wasm/target
|
||||
/tools/wpt/manifest.json
|
||||
/tests/wpt/runner/manifest.json
|
||||
/third_party/
|
||||
/test_napi/node_modules
|
||||
/test_napi/build
|
||||
/test_napi/third_party_tests/node_modules
|
||||
/tests/napi/node_modules
|
||||
/tests/napi/build
|
||||
/tests/napi/third_party_tests/node_modules
|
||||
|
||||
# MacOS generated files
|
||||
.DS_Store
|
||||
|
@ -25,10 +25,14 @@ gclient_config.py_entries
|
|||
/flamegraph*.svg
|
||||
|
||||
# WPT generated cert files
|
||||
/tools/wpt/certs/index.txt*
|
||||
/tools/wpt/certs/serial*
|
||||
/tests/wpt/runner/certs/index.txt*
|
||||
/tests/wpt/runner/certs/serial*
|
||||
|
||||
/ext/websocket/autobahn/reports
|
||||
|
||||
# JUnit files produced by deno test --junit
|
||||
junit.xml
|
||||
|
||||
# Jupyter files
|
||||
.ipynb_checkpoints/
|
||||
Untitled*.ipynb
|
19
.gitmodules
vendored
19
.gitmodules
vendored
|
@ -1,11 +1,16 @@
|
|||
[submodule "test_util/std"]
|
||||
path = test_util/std
|
||||
[submodule "tests/util/std"]
|
||||
path = tests/util/std
|
||||
url = https://github.com/denoland/deno_std
|
||||
shallow = true
|
||||
[submodule "test_util/wpt"]
|
||||
path = test_util/wpt
|
||||
[submodule "tests/wpt/suite"]
|
||||
path = tests/wpt/suite
|
||||
url = https://github.com/web-platform-tests/wpt.git
|
||||
|
||||
[submodule "tools/node_compat/node"]
|
||||
path = tools/node_compat/node
|
||||
shallow = true
|
||||
[submodule "tests/node_compat/runner/suite"]
|
||||
path = tests/node_compat/runner/suite
|
||||
url = https://github.com/denoland/node_test.git
|
||||
shallow = true
|
||||
[submodule "cli/bench/testdata/lsp_benchdata"]
|
||||
path = cli/bench/testdata/lsp_benchdata
|
||||
url = https://github.com/denoland/deno_lsp_benchdata.git
|
||||
shallow = true
|
||||
|
|
5710
Cargo.lock
generated
5710
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
370
Cargo.toml
370
Cargo.toml
|
@ -1,18 +1,15 @@
|
|||
# Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"bench_util",
|
||||
"cli",
|
||||
"cli/napi/sym",
|
||||
"runtime",
|
||||
"test_ffi",
|
||||
"test_napi",
|
||||
"test_util",
|
||||
"ext/broadcast_channel",
|
||||
"ext/cache",
|
||||
"ext/canvas",
|
||||
"ext/console",
|
||||
"ext/cron",
|
||||
"ext/crypto",
|
||||
"ext/fetch",
|
||||
"ext/ffi",
|
||||
|
@ -20,16 +17,26 @@ members = [
|
|||
"ext/http",
|
||||
"ext/io",
|
||||
"ext/kv",
|
||||
"ext/napi",
|
||||
"ext/napi/sym",
|
||||
"ext/net",
|
||||
"ext/node",
|
||||
"ext/url",
|
||||
"ext/web",
|
||||
"ext/webgpu",
|
||||
"ext/webidl",
|
||||
"ext/websocket",
|
||||
"ext/webstorage",
|
||||
"ext/napi",
|
||||
"resolvers/deno",
|
||||
"resolvers/node",
|
||||
"runtime",
|
||||
"runtime/permissions",
|
||||
"tests",
|
||||
"tests/ffi",
|
||||
"tests/napi",
|
||||
"tests/util/server",
|
||||
]
|
||||
exclude = ["test_util/std/hash/_wasm"]
|
||||
exclude = ["tests/util/std/hash/_wasm"]
|
||||
|
||||
[workspace.package]
|
||||
authors = ["the Deno authors"]
|
||||
|
@ -38,138 +45,190 @@ license = "MIT"
|
|||
repository = "https://github.com/denoland/deno"
|
||||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "0.28.0", features = ["transpiling"] }
|
||||
deno_ast = { version = "=0.42.2", features = ["transpiling"] }
|
||||
deno_core = { version = "0.314.2" }
|
||||
|
||||
deno_core = "0.204.0"
|
||||
deno_bench_util = { version = "0.168.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.4"
|
||||
deno_path_util = "=0.2.1"
|
||||
deno_permissions = { version = "0.34.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.183.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.16"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.104.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
deno_runtime = { version = "0.125.0", path = "./runtime" }
|
||||
napi_sym = { version = "0.47.0", path = "./cli/napi/sym" }
|
||||
deno_bench_util = { version = "0.111.0", path = "./bench_util" }
|
||||
test_util = { path = "./test_util" }
|
||||
deno_lockfile = "0.15.0"
|
||||
deno_media_type = { version = "0.1.1", features = ["module_specifier"] }
|
||||
deno_npm = "0.12.0"
|
||||
deno_semver = "0.4.0"
|
||||
denokv_proto = "0.8.1"
|
||||
denokv_remote = "0.8.1"
|
||||
# denokv_sqlite brings in bundled sqlite if we don't disable the default features
|
||||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.111.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.49.0", path = "./ext/cache" }
|
||||
deno_console = { version = "0.117.0", path = "./ext/console" }
|
||||
deno_crypto = { version = "0.131.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.141.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.104.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.27.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.112.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.27.0", path = "./ext/io" }
|
||||
deno_net = { version = "0.109.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.54.0", path = "./ext/node" }
|
||||
deno_kv = { version = "0.25.0", path = "./ext/kv" }
|
||||
deno_tls = { version = "0.104.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.117.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.148.0", path = "./ext/web" }
|
||||
deno_webidl = { version = "0.117.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.122.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.112.0", path = "./ext/webstorage" }
|
||||
deno_napi = { version = "0.47.0", path = "./ext/napi" }
|
||||
deno_broadcast_channel = { version = "0.168.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.106.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.43.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.174.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.54.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.188.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.198.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.161.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.84.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.172.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.84.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.82.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.105.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.166.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.111.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.161.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.174.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.205.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.141.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.174.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.179.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.169.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.6.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.13.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
async-trait = "0.1.73"
|
||||
# TODO(mmastrac): Requires code changes to bump
|
||||
base64 = "=0.13.1"
|
||||
base32 = "=0.5.1"
|
||||
base64 = "0.21.7"
|
||||
bencher = "0.1"
|
||||
brotli = "3.3.4"
|
||||
brotli = "6.0.0"
|
||||
bytes = "1.4.0"
|
||||
cache_control = "=0.2.0"
|
||||
cbc = { version = "=0.1.2", features = ["alloc"] }
|
||||
chrono = { version = "=0.4.26", default-features = false, features = ["std", "serde", "clock"] }
|
||||
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
|
||||
# Instead use util::time::utc_now()
|
||||
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
|
||||
color-print = "0.3.5"
|
||||
console_static_text = "=0.8.1"
|
||||
dashmap = "5.5.3"
|
||||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
dlopen = "0.1.8"
|
||||
encoding_rs = "=0.8.33"
|
||||
deno_cache_dir = "=0.13.0"
|
||||
deno_package_json = { version = "0.1.2", default-features = false }
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
fastwebsockets = "=0.4.4"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
|
||||
encoding_rs = "=0.8.33"
|
||||
fast-socks5 = "0.9.6"
|
||||
faster-hex = "0.9"
|
||||
fastwebsockets = { version = "0.8", features = ["upgrade", "unstable-split"] }
|
||||
filetime = "0.2.16"
|
||||
flate2 = { version = "1.0.26", features = ["zlib-ng"], default-features = false }
|
||||
flate2 = { version = "1.0.30", default-features = false }
|
||||
fs3 = "0.5.0"
|
||||
futures = "0.3.21"
|
||||
glob = "0.3.1"
|
||||
hex = "0.4"
|
||||
http = "0.2.9"
|
||||
h2 = "0.4.4"
|
||||
http = "1.0"
|
||||
http-body = "1.0"
|
||||
http-body-util = "0.1.2"
|
||||
http_v02 = { package = "http", version = "0.2.9" }
|
||||
httparse = "1.8.0"
|
||||
hyper = { version = "0.14.26", features = ["runtime", "http1"] }
|
||||
# TODO(mmastrac): indexmap 2.0 will require multiple synchronized changes
|
||||
indexmap1 = { package = "indexmap", version = "1", features = ["serde"] }
|
||||
hyper = { version = "1.4.1", features = ["full"] }
|
||||
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
|
||||
hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
|
||||
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
ipnet = "2.3"
|
||||
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
||||
lazy-regex = "3"
|
||||
libc = "0.2.126"
|
||||
log = "=0.4.20"
|
||||
lsp-types = "=0.93.2" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
libz-sys = { version = "1.1.20", default-features = false }
|
||||
log = "0.4.20"
|
||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
memmem = "0.1.1"
|
||||
notify = "=5.0.0"
|
||||
monch = "=0.5.0"
|
||||
notify = "=6.1.1"
|
||||
num-bigint = { version = "0.4", features = ["rand"] }
|
||||
once_cell = "1.17.1"
|
||||
os_pipe = "=1.1.4"
|
||||
os_pipe = { version = "=1.1.5", features = ["io_safety"] }
|
||||
p224 = { version = "0.13.0", features = ["ecdh"] }
|
||||
p256 = { version = "0.13.2", features = ["ecdh", "jwk"] }
|
||||
p384 = { version = "0.13.0", features = ["ecdh", "jwk"] }
|
||||
parking_lot = "0.12.0"
|
||||
percent-encoding = "=2.3.0"
|
||||
percent-encoding = "2.3.0"
|
||||
phf = { version = "0.11", features = ["macros"] }
|
||||
pin-project = "1.0.11" # don't pin because they yank crates from cargo
|
||||
pretty_assertions = "=1.4.0"
|
||||
prost = "0.11"
|
||||
prost-build = "0.11"
|
||||
rand = "=0.8.5"
|
||||
regex = "^1.7.0"
|
||||
lazy-regex = "3"
|
||||
reqwest = { version = "0.11.20", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json"] }
|
||||
ring = "=0.16.20"
|
||||
rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] }
|
||||
rustls = "0.21.0"
|
||||
rustls-pemfile = "1.0.0"
|
||||
rustls-webpki = "0.101.4"
|
||||
rustls-native-certs = "0.6.2"
|
||||
webpki-roots = "0.25.2"
|
||||
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
|
||||
ring = "^0.17.0"
|
||||
rusqlite = { version = "0.32.0", features = ["unlock_notify", "bundled"] }
|
||||
rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] }
|
||||
rustls-pemfile = "2"
|
||||
rustls-tokio-stream = "=0.3.0"
|
||||
rustls-webpki = "0.102"
|
||||
rustyline = "=13.0.0"
|
||||
saffron = "=0.1.0"
|
||||
scopeguard = "1.2.0"
|
||||
sec1 = "0.7"
|
||||
serde = { version = "1.0.149", features = ["derive"] }
|
||||
serde_bytes = "0.11"
|
||||
serde_json = "1.0.85"
|
||||
serde_repr = "=0.1.16"
|
||||
sha2 = { version = "0.10.6", features = ["oid"] }
|
||||
signature = "=1.6.4"
|
||||
sha1 = { version = "0.10.6", features = ["oid"] }
|
||||
sha2 = { version = "0.10.8", features = ["oid"] }
|
||||
signature = "2.1"
|
||||
slab = "0.4"
|
||||
smallvec = "1.8"
|
||||
socket2 = { version = "0.5.3", features = ["all"] }
|
||||
spki = "0.7.2"
|
||||
tar = "=0.4.40"
|
||||
tempfile = "3.4.0"
|
||||
thiserror = "1.0.40"
|
||||
tokio = { version = "1.28.1", features = ["full"] }
|
||||
termcolor = "1.1.3"
|
||||
thiserror = "1.0.61"
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
tokio-metrics = { version = "0.3.0", features = ["rt"] }
|
||||
tokio-rustls = "0.24.0"
|
||||
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
|
||||
tokio-socks = "0.5.1"
|
||||
tokio-util = "0.7.4"
|
||||
tower-lsp = { version = "=0.17.0", features = ["proposed"] }
|
||||
url = { version = "2.3.1", features = ["serde", "expose_internals"] }
|
||||
tower = { version = "0.4.13", default-features = false, features = ["util"] }
|
||||
tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
|
||||
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
|
||||
tower-service = "0.3.2"
|
||||
twox-hash = "=1.6.3"
|
||||
# Upgrading past 2.4.1 may cause WPT failures
|
||||
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
|
||||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
webpki-root-certs = "0.26.5"
|
||||
webpki-roots = "0.26"
|
||||
which = "4.2.5"
|
||||
yoke = { version = "0.7.4", features = ["derive"] }
|
||||
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
zstd = "=0.12.4"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
||||
p224 = { version = "0.13.0", features = ["ecdh"] }
|
||||
p256 = { version = "0.13.2", features = ["ecdh"] }
|
||||
p384 = { version = "0.13.0", features = ["ecdh"] }
|
||||
|
||||
# crypto
|
||||
rsa = { version = "0.7.0", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node
|
||||
hkdf = "0.12.3"
|
||||
rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node
|
||||
|
||||
# webgpu
|
||||
raw-window-handle = "0.6.0"
|
||||
wgpu-core = "0.21.1"
|
||||
wgpu-types = "0.20"
|
||||
|
||||
# macros
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "2", features = ["full", "extra-traits"] }
|
||||
|
||||
# unix
|
||||
nix = "=0.26.2"
|
||||
nix = "=0.27.1"
|
||||
|
||||
# windows deps
|
||||
fwdansi = "=1.1.0"
|
||||
winres = "=0.1.12"
|
||||
junction = "=0.2.0"
|
||||
winapi = "=0.3.9"
|
||||
windows-sys = { version = "0.48.0", features = ["Win32_Media"] }
|
||||
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
|
||||
winres = "=0.1.12"
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
incremental = true
|
||||
|
@ -181,150 +240,89 @@ opt-level = 'z' # Optimize for size
|
|||
inherits = "release"
|
||||
debug = true
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.bench]
|
||||
codegen-units = 1
|
||||
incremental = true
|
||||
lto = true
|
||||
opt-level = 'z' # Optimize for size
|
||||
# Faster to compile than `release` but with similar performance.
|
||||
[profile.release-lite]
|
||||
inherits = "release"
|
||||
codegen-units = 128
|
||||
lto = "thin"
|
||||
|
||||
# Key generation is too slow on `debug`
|
||||
[profile.dev.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
|
||||
# Optimize these packages for performance.
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.bench.package.rand]
|
||||
opt-level = 3
|
||||
[profile.bench.package.flate2]
|
||||
opt-level = 3
|
||||
[profile.bench.package.brotli]
|
||||
opt-level = 3
|
||||
[profile.bench.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.bench.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.bench.package.brotli-decompressor]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_bench_util]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_core]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_fetch]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_ffi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_websocket]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_crypto]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.bench.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
[profile.bench.package.v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_url]
|
||||
opt-level = 3
|
||||
[profile.bench.package.url]
|
||||
opt-level = 3
|
||||
[profile.bench.package.bytes]
|
||||
opt-level = 3
|
||||
[profile.bench.package.futures-util]
|
||||
opt-level = 3
|
||||
[profile.bench.package.hyper]
|
||||
opt-level = 3
|
||||
[profile.bench.package.tokio]
|
||||
opt-level = 3
|
||||
[profile.bench.package.zstd]
|
||||
opt-level = 3
|
||||
[profile.bench.package.zstd-sys]
|
||||
opt-level = 3
|
||||
[profile.bench.package.base64-simd]
|
||||
opt-level = 3
|
||||
# rusty-v8 needs at least -O1 to not miscompile
|
||||
[profile.dev.package.v8]
|
||||
opt-level = 1
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.release.package.rand]
|
||||
[profile.release.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.release.package.flate2]
|
||||
[profile.release.package.base64-simd]
|
||||
opt-level = 3
|
||||
[profile.release.package.brotli]
|
||||
opt-level = 3
|
||||
[profile.release.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.release.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.release.package.brotli-decompressor]
|
||||
opt-level = 3
|
||||
[profile.release.package.bytes]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_bench_util]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_core]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_crypto]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_fetch]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_ffi]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_websocket]
|
||||
[profile.release.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_napi]
|
||||
opt-level = 3
|
||||
[profile.release.package.test_napi]
|
||||
[profile.release.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.release.package.num-bigint-dig]
|
||||
[profile.release.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.release.package.v8]
|
||||
[profile.release.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde]
|
||||
[profile.release.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_url]
|
||||
opt-level = 3
|
||||
[profile.release.package.url]
|
||||
[profile.release.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.release.package.bytes]
|
||||
[profile.release.package.deno_websocket]
|
||||
opt-level = 3
|
||||
[profile.release.package.fastwebsockets]
|
||||
opt-level = 3
|
||||
[profile.release.package.flate2]
|
||||
opt-level = 3
|
||||
[profile.release.package.futures-util]
|
||||
opt-level = 3
|
||||
[profile.release.package.hyper]
|
||||
opt-level = 3
|
||||
[profile.release.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.release.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
[profile.release.package.rand]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.libsui]
|
||||
opt-level = 3
|
||||
[profile.release.package.test_napi]
|
||||
opt-level = 3
|
||||
[profile.release.package.tokio]
|
||||
opt-level = 3
|
||||
[profile.release.package.url]
|
||||
opt-level = 3
|
||||
[profile.release.package.v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.zstd]
|
||||
opt-level = 3
|
||||
[profile.release.package.zstd-sys]
|
||||
opt-level = 3
|
||||
[profile.release.package.base64-simd]
|
||||
opt-level = 3
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright 2018-2023 the Deno authors
|
||||
Copyright 2018-2024 the Deno authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
|
|
106
README.md
106
README.md
|
@ -6,26 +6,21 @@
|
|||
|
||||
<img align="right" src="https://deno.land/logo.svg" height="150px" alt="the deno mascot dinosaur standing in the rain">
|
||||
|
||||
[Deno](https://deno.com/runtime) is a _simple_, _modern_ and _secure_ runtime
|
||||
for **JavaScript** and **TypeScript** that uses V8 and is built in Rust.
|
||||
[Deno](https://www.deno.com)
|
||||
([/ˈdiːnoʊ/](http://ipa-reader.xyz/?text=%CB%88di%CB%90no%CA%8A), pronounced
|
||||
`dee-no`) is a JavaScript, TypeScript, and WebAssembly runtime with secure
|
||||
defaults and a great developer experience. It's built on [V8](https://v8.dev/),
|
||||
[Rust](https://www.rust-lang.org/), and [Tokio](https://tokio.rs/).
|
||||
|
||||
### Features
|
||||
Learn more about the Deno runtime
|
||||
[in the documentation](https://docs.deno.com/runtime/manual).
|
||||
|
||||
- [Secure by default.](https://deno.land/manual/basics/permissions) No file,
|
||||
network, or environment access, unless explicitly enabled.
|
||||
- Provides
|
||||
[web platform functionality and APIs](https://deno.land/manual/runtime/web_platform_apis),
|
||||
e.g. using ES modules, web workers, and `fetch()`.
|
||||
- Supports
|
||||
[TypeScript out of the box](https://deno.land/manual/advanced/typescript).
|
||||
- Ships only a single executable file.
|
||||
- [Built-in tooling](https://deno.land/manual/tools#built-in-tooling) including
|
||||
`deno test`, `deno fmt`, `deno bench`, and more.
|
||||
- Includes [a set of reviewed standard modules](https://deno.land/std/)
|
||||
guaranteed to work with Deno.
|
||||
- [Supports npm.](https://deno.land/manual/node)
|
||||
## Installation
|
||||
|
||||
### Install
|
||||
Install the Deno runtime on your system using one of the commands below. Note
|
||||
that there are a number of ways to install Deno - a comprehensive list of
|
||||
installation options can be found
|
||||
[here](https://docs.deno.com/runtime/manual/getting_started/installation).
|
||||
|
||||
Shell (Mac, Linux):
|
||||
|
||||
|
@ -51,64 +46,49 @@ brew install deno
|
|||
choco install deno
|
||||
```
|
||||
|
||||
[Scoop](https://scoop.sh/) (Windows):
|
||||
### Build and install from source
|
||||
|
||||
```powershell
|
||||
scoop install deno
|
||||
```
|
||||
Complete instructions for building Deno from source can be found in the manual
|
||||
[here](https://docs.deno.com/runtime/manual/references/contributing/building_from_source).
|
||||
|
||||
Build and install from source using [Cargo](https://crates.io/crates/deno):
|
||||
## Your first Deno program
|
||||
|
||||
```sh
|
||||
# Install the Protobuf compiler
|
||||
apt install -y protobuf-compiler # Linux
|
||||
brew install protobuf # macOS
|
||||
|
||||
# Build and install Deno
|
||||
cargo install deno --locked
|
||||
```
|
||||
|
||||
See
|
||||
[deno_install](https://github.com/denoland/deno_install/blob/master/README.md)
|
||||
and [releases](https://github.com/denoland/deno/releases) for other options.
|
||||
|
||||
### Getting Started
|
||||
|
||||
Try [running a simple program](https://examples.deno.land/hello-world):
|
||||
|
||||
```sh
|
||||
deno run https://deno.land/std/examples/welcome.ts
|
||||
```
|
||||
|
||||
Or [setup a simple HTTP server](https://examples.deno.land/http-server):
|
||||
Deno can be used for many different applications, but is most commonly used to
|
||||
build web servers. Create a file called `server.ts` and include the following
|
||||
TypeScript code:
|
||||
|
||||
```ts
|
||||
Deno.serve((_req) => new Response("Hello, World!"));
|
||||
Deno.serve((_req: Request) => {
|
||||
return new Response("Hello, world!");
|
||||
});
|
||||
```
|
||||
|
||||
[More Examples](https://examples.deno.land)
|
||||
Run your server with the following command:
|
||||
|
||||
### Additional Resources
|
||||
```sh
|
||||
deno run --allow-net server.ts
|
||||
```
|
||||
|
||||
- **[The Deno Manual](https://deno.land/manual)** is a great starting point for
|
||||
[additional examples](https://deno.land/manual/examples),
|
||||
[setting up your environment](https://deno.land/manual/getting_started/setup_your_environment),
|
||||
[using npm](https://deno.land/manual/node), and more.
|
||||
- **[Runtime API reference](https://deno.land/api)** documents all APIs built
|
||||
into Deno CLI.
|
||||
- **[Deno Standard Modules](https://deno.land/std)** do not have external
|
||||
dependencies and are reviewed by the Deno core team.
|
||||
- **[deno.land/x](https://deno.land/x)** is the registry for third party
|
||||
modules.
|
||||
- **[Blog](https://deno.com/blog)** is where the Deno team shares important
|
||||
product updates and “how to”s about solving technical problems.
|
||||
This should start a local web server on
|
||||
[http://localhost:8000](http://localhost:8000).
|
||||
|
||||
### Contributing
|
||||
Learn more about writing and running Deno programs
|
||||
[in the docs](https://docs.deno.com/runtime/manual).
|
||||
|
||||
We appreciate your help!
|
||||
## Additional resources
|
||||
|
||||
To contribute, please read our
|
||||
[contributing instructions](https://deno.land/manual/contributing).
|
||||
- **[Deno Docs](https://docs.deno.com)**: official guides and reference docs for
|
||||
the Deno runtime, [Deno Deploy](https://deno.com/deploy), and beyond.
|
||||
- **[Deno Standard Library](https://jsr.io/@std)**: officially supported common
|
||||
utilities for Deno programs.
|
||||
- **[deno.land/x](https://deno.land/x)**: registry for third-party Deno modules.
|
||||
- **[Developer Blog](https://deno.com/blog)**: Product updates, tutorials, and
|
||||
more from the Deno team.
|
||||
|
||||
## Contributing
|
||||
|
||||
We appreciate your help! To contribute, please read our
|
||||
[contributing instructions](https://docs.deno.com/runtime/manual/references/contributing/).
|
||||
|
||||
[Build status - Cirrus]: https://github.com/denoland/deno/workflows/ci/badge.svg?branch=main&event=push
|
||||
[Build status]: https://github.com/denoland/deno/actions
|
||||
|
|
2243
Releases.md
2243
Releases.md
File diff suppressed because it is too large
Load diff
|
@ -1,8 +1,8 @@
|
|||
# Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.111.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -17,13 +17,8 @@ path = "lib.rs"
|
|||
[dependencies]
|
||||
bencher.workspace = true
|
||||
deno_core.workspace = true
|
||||
once_cell.workspace = true
|
||||
tokio.workspace = true
|
||||
|
||||
[[bench]]
|
||||
name = "op_baseline"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "utf8"
|
||||
harness = false
|
||||
|
|
|
@ -3,13 +3,15 @@
|
|||
Example:
|
||||
|
||||
```rust
|
||||
use deno_bench_util::bench_js_sync;
|
||||
use deno_bench_util::bench_or_profile;
|
||||
use deno_bench_util::bencher::{benchmark_group, Bencher};
|
||||
use deno_bench_util::bench_js_sync};
|
||||
use deno_bench_util::bencher::benchmark_group;
|
||||
use deno_bench_util::bencher::Bencher;
|
||||
|
||||
use deno_core::Extension;
|
||||
|
||||
#[op]
|
||||
#[op2]
|
||||
#[number]
|
||||
fn op_nop() -> usize {
|
||||
9
|
||||
}
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_bench_util::bench_js_async;
|
||||
use deno_bench_util::bench_js_sync;
|
||||
use deno_bench_util::bench_or_profile;
|
||||
use deno_bench_util::bencher::benchmark_group;
|
||||
use deno_bench_util::bencher::Bencher;
|
||||
|
||||
use deno_core::op;
|
||||
use deno_core::Extension;
|
||||
|
||||
deno_core::extension!(
|
||||
bench_setup,
|
||||
ops = [
|
||||
// op_pi_json,
|
||||
op_pi_async,
|
||||
op_nop
|
||||
]
|
||||
);
|
||||
|
||||
fn setup() -> Vec<Extension> {
|
||||
vec![bench_setup::init_ops()]
|
||||
}
|
||||
|
||||
#[op]
|
||||
fn op_nop() {}
|
||||
|
||||
// TODO(bartlomieju): reenable, currently this op generates a fast function,
|
||||
// which is wrong, because i64 is not a compatible type for fast call.
|
||||
// #[op]
|
||||
// fn op_pi_json() -> i64 {
|
||||
// 314159
|
||||
// }
|
||||
|
||||
// this is a function since async closures aren't stable
|
||||
#[op]
|
||||
async fn op_pi_async() -> i64 {
|
||||
314159
|
||||
}
|
||||
|
||||
// fn bench_op_pi_json(b: &mut Bencher) {
|
||||
// bench_js_sync(b, r#"Deno.core.ops.op_pi_json();"#, setup);
|
||||
// }
|
||||
|
||||
fn bench_op_nop(b: &mut Bencher) {
|
||||
bench_js_sync(b, r#"Deno.core.ops.op_nop();"#, setup);
|
||||
}
|
||||
|
||||
fn bench_op_async(b: &mut Bencher) {
|
||||
bench_js_async(b, r#"Deno.core.opAsync("op_pi_async");"#, setup);
|
||||
}
|
||||
|
||||
benchmark_group!(
|
||||
benches,
|
||||
// bench_op_pi_json,
|
||||
bench_op_nop,
|
||||
bench_op_async,
|
||||
);
|
||||
|
||||
bench_or_profile!(benches);
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_bench_util::bench_js_sync_with;
|
||||
use deno_bench_util::bench_or_profile;
|
||||
|
@ -6,27 +6,23 @@ use deno_bench_util::bencher::benchmark_group;
|
|||
use deno_bench_util::bencher::Bencher;
|
||||
use deno_bench_util::BenchOptions;
|
||||
use deno_core::Extension;
|
||||
use deno_core::ExtensionFileSource;
|
||||
use deno_core::ExtensionFileSourceCode;
|
||||
|
||||
fn setup() -> Vec<Extension> {
|
||||
vec![Extension {
|
||||
name: "bench_setup",
|
||||
js_files: std::borrow::Cow::Borrowed(&[ExtensionFileSource {
|
||||
specifier: "ext:bench_setup/setup.js",
|
||||
code: ExtensionFileSourceCode::IncludedInBinary(
|
||||
r#"
|
||||
deno_core::extension!(
|
||||
bench_setup,
|
||||
js = ["ext:bench_setup/setup.js" = {
|
||||
source = r#"
|
||||
const hello = "hello world\n";
|
||||
const hello1k = hello.repeat(1e3);
|
||||
const hello1m = hello.repeat(1e6);
|
||||
const helloEncoded = Deno.core.encode(hello);
|
||||
const hello1kEncoded = Deno.core.encode(hello1k);
|
||||
const hello1mEncoded = Deno.core.encode(hello1m);
|
||||
"#,
|
||||
),
|
||||
}]),
|
||||
..Default::default()
|
||||
"#
|
||||
}]
|
||||
);
|
||||
|
||||
vec![bench_setup::init_ops_and_esm()]
|
||||
}
|
||||
|
||||
fn bench_utf8_encode_12_b(b: &mut Bencher) {
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use bencher::Bencher;
|
||||
use deno_core::v8;
|
||||
use deno_core::Extension;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::PollEventLoopOptions;
|
||||
use deno_core::RuntimeOptions;
|
||||
|
||||
use crate::profiling::is_profiling;
|
||||
|
@ -115,6 +116,9 @@ pub fn bench_js_async_with(
|
|||
}
|
||||
|
||||
async fn inner_async(src: &'static str, runtime: &mut JsRuntime) {
|
||||
runtime.execute_script_static("inner_loop", src).unwrap();
|
||||
runtime.run_event_loop(false).await.unwrap();
|
||||
runtime.execute_script("inner_loop", src).unwrap();
|
||||
runtime
|
||||
.run_event_loop(PollEventLoopOptions::default())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
mod js_runtime;
|
||||
mod profiling;
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use bencher::DynBenchFn;
|
||||
use bencher::StaticBenchFn;
|
||||
use bencher::TestDescAndFn;
|
||||
|
@ -39,6 +39,7 @@ macro_rules! bench_or_profile {
|
|||
};
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
pub fn run_profiles(opts: &TestOpts, tests: Vec<TestDescAndFn>) {
|
||||
let tests = filter_tests(opts, tests);
|
||||
// let decs = tests.iter().map(|t| t.desc.clone()).collect();
|
||||
|
|
132
cli/Cargo.toml
132
cli/Cargo.toml
|
@ -1,12 +1,12 @@
|
|||
# Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "1.36.3"
|
||||
version = "2.0.3"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
exclude = ["tests/testdata/npm/registry/*"]
|
||||
exclude = ["bench/testdata/lsp_benchdata/"]
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
description = "Provides the deno executable"
|
||||
|
@ -16,6 +16,16 @@ name = "deno"
|
|||
path = "main.rs"
|
||||
doc = false
|
||||
|
||||
[[bin]]
|
||||
name = "denort"
|
||||
path = "mainrt.rs"
|
||||
doc = false
|
||||
|
||||
[[test]]
|
||||
name = "integration"
|
||||
path = "integration_tests_runner.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "deno_bench"
|
||||
harness = false
|
||||
|
@ -27,87 +37,121 @@ harness = false
|
|||
path = "./bench/lsp_bench_standalone.rs"
|
||||
|
||||
[features]
|
||||
default = ["upgrade", "__vendored_zlib_ng"]
|
||||
# A feature that enables heap profiling with dhat on Linux.
|
||||
# 1. Compile with `cargo build --profile=release-with-debug --features=dhat-heap`
|
||||
# 2. Run the executable. It will output a dhat-heap.json file.
|
||||
# 3. Open the json file in https://nnethercote.github.io/dh_view/dh_view.html
|
||||
dhat-heap = ["dhat"]
|
||||
# A feature that enables the upgrade subcommand and the background check for
|
||||
# available updates (of deno binary). This is typically disabled for (Linux)
|
||||
# distribution packages.
|
||||
upgrade = []
|
||||
# A dev feature to disable creations and loading of snapshots in favor of
|
||||
# loading JS sources at runtime.
|
||||
__runtime_js_sources = ["deno_runtime/__runtime_js_sources"]
|
||||
hmr = ["deno_runtime/hmr"]
|
||||
# Vendor zlib as zlib-ng
|
||||
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
|
||||
|
||||
[build-dependencies]
|
||||
deno_runtime = { workspace = true, features = ["snapshot_from_snapshot", "include_js_files_for_snapshotting"] }
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
lazy-regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
zstd.workspace = true
|
||||
glibc_version = "0.1.2"
|
||||
flate2 = { workspace = true, features = ["default"] }
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
winapi.workspace = true
|
||||
winres.workspace = true
|
||||
|
||||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = "=0.5.2"
|
||||
deno_config = "=0.2.1"
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.37.2", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = "=0.65.0"
|
||||
deno_emit = "=0.26.0"
|
||||
deno_graph = "=0.52.0"
|
||||
deno_lint = { version = "=0.50.2", features = ["docs"] }
|
||||
deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] }
|
||||
deno_graph = { version = "=0.83.4" }
|
||||
deno_lint = { version = "=0.67.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
|
||||
deno_package_json.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_resolver.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.13.2"
|
||||
eszip = "=0.50.0"
|
||||
napi_sym.workspace = true
|
||||
deno_task_shell = "=0.18.1"
|
||||
deno_terminal.workspace = true
|
||||
libsui = "0.4.0"
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
async-trait.workspace = true
|
||||
base32 = "=0.4.0"
|
||||
base64.workspace = true
|
||||
bincode = "=1.3.3"
|
||||
bytes.workspace = true
|
||||
cache_control.workspace = true
|
||||
chrono.workspace = true
|
||||
clap = { version = "=4.3.3", features = ["string"] }
|
||||
clap_complete = "=4.3.1"
|
||||
clap_complete_fig = "=4.3.1"
|
||||
chrono = { workspace = true, features = ["now"] }
|
||||
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
||||
clap_complete = "=4.5.24"
|
||||
clap_complete_fig = "=4.5.2"
|
||||
color-print.workspace = true
|
||||
console_static_text.workspace = true
|
||||
data-url.workspace = true
|
||||
dashmap.workspace = true
|
||||
data-encoding.workspace = true
|
||||
dhat = { version = "0.3.3", optional = true }
|
||||
dissimilar = "=1.0.4"
|
||||
dprint-plugin-json = "=0.17.4"
|
||||
dprint-plugin-markdown = "=0.15.3"
|
||||
dprint-plugin-typescript = "=0.86.2"
|
||||
encoding_rs.workspace = true
|
||||
dotenvy = "0.15.7"
|
||||
dprint-plugin-json = "=0.19.4"
|
||||
dprint-plugin-jupyter = "=0.1.5"
|
||||
dprint-plugin-markdown = "=0.17.8"
|
||||
dprint-plugin-typescript = "=0.93.0"
|
||||
env_logger = "=0.10.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
fastwebsockets.workspace = true
|
||||
faster-hex.workspace = true
|
||||
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
|
||||
flate2.workspace = true
|
||||
fs3.workspace = true
|
||||
glob = "0.3.1"
|
||||
http.workspace = true
|
||||
hyper.workspace = true
|
||||
import_map = "=0.15.0"
|
||||
http-body.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper-util.workspace = true
|
||||
import_map = { version = "=0.20.1", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
indexmap1.workspace = true
|
||||
jsonc-parser = { version = "=0.21.1", features = ["serde"] }
|
||||
jsonc-parser = { workspace = true, features = ["cst", "serde"] }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
|
||||
lazy-regex.workspace = true
|
||||
libc.workspace = true
|
||||
libz-sys.workspace = true
|
||||
log = { workspace = true, features = ["serde"] }
|
||||
lsp-types.workspace = true
|
||||
monch = "=0.4.3"
|
||||
malva = "=0.11.0"
|
||||
markup_fmt = "=0.14.0"
|
||||
memmem.workspace = true
|
||||
monch.workspace = true
|
||||
notify.workspace = true
|
||||
once_cell.workspace = true
|
||||
os_pipe.workspace = true
|
||||
open = "5.0.1"
|
||||
p256.workspace = true
|
||||
pathdiff = "0.2.1"
|
||||
percent-encoding.workspace = true
|
||||
pin-project.workspace = true
|
||||
quick-junit = "^0.3.3"
|
||||
phf.workspace = true
|
||||
pretty_yaml = "=0.5.0"
|
||||
quick-junit = "^0.3.5"
|
||||
rand = { workspace = true, features = ["small_rng"] }
|
||||
regex.workspace = true
|
||||
ring.workspace = true
|
||||
rustyline = { version = "=10.0.0", default-features = false, features = ["custom-bindings"] }
|
||||
rustyline.workspace = true
|
||||
rustyline-derive = "=0.7.0"
|
||||
serde.workspace = true
|
||||
serde_repr.workspace = true
|
||||
sha2.workspace = true
|
||||
shell-escape = "=0.1.5"
|
||||
spki = { version = "0.7", features = ["pem"] }
|
||||
strsim = "0.11.1"
|
||||
tar.workspace = true
|
||||
tempfile.workspace = true
|
||||
text-size = "=1.1.0"
|
||||
|
@ -116,15 +160,18 @@ thiserror.workspace = true
|
|||
tokio.workspace = true
|
||||
tokio-util.workspace = true
|
||||
tower-lsp.workspace = true
|
||||
twox-hash = "=1.6.3"
|
||||
typed-arena = "=2.0.1"
|
||||
tracing = { version = "0.1", features = ["log", "default"] }
|
||||
twox-hash.workspace = true
|
||||
typed-arena = "=2.0.2"
|
||||
uuid = { workspace = true, features = ["serde"] }
|
||||
walkdir = "=2.3.2"
|
||||
which.workspace = true
|
||||
zeromq.workspace = true
|
||||
zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] }
|
||||
zstd.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
fwdansi.workspace = true
|
||||
junction = "=0.2.0"
|
||||
junction.workspace = true
|
||||
winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
|
@ -132,13 +179,8 @@ nix.workspace = true
|
|||
|
||||
[dev-dependencies]
|
||||
deno_bench_util.workspace = true
|
||||
flaky_test = "=0.1.0"
|
||||
once_cell.workspace = true
|
||||
os_pipe.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
test_util.workspace = true
|
||||
trust-dns-client = "=0.22.0"
|
||||
trust-dns-server = "=0.22.1"
|
||||
|
||||
[package.metadata.winres]
|
||||
# This section defines the metadata that appears in the deno.exe PE header.
|
||||
|
|
123
cli/args/deno_json.rs
Normal file
123
cli/args/deno_json.rs
Normal file
|
@ -0,0 +1,123 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use deno_config::deno_json::TsConfigForEmit;
|
||||
use deno_core::serde_json;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
||||
#[cfg(test)] // happens to only be used by the tests at the moment
|
||||
pub struct DenoConfigFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
#[cfg(test)]
|
||||
impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
|
||||
fn read_to_string_lossy(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<String, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.read_text_file_lossy_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn stat_sync(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<deno_config::fs::FsMetadata, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.stat_sync(path)
|
||||
.map(|stat| deno_config::fs::FsMetadata {
|
||||
is_file: stat.is_file,
|
||||
is_directory: stat.is_directory,
|
||||
is_symlink: stat.is_symlink,
|
||||
})
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn read_dir(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<Vec<deno_config::fs::FsDirEntry>, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.read_dir_sync(path)
|
||||
.map_err(|err| err.into_io_error())
|
||||
.map(|entries| {
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|e| deno_config::fs::FsDirEntry {
|
||||
path: path.join(e.name),
|
||||
metadata: deno_config::fs::FsMetadata {
|
||||
is_file: e.is_file,
|
||||
is_directory: e.is_directory,
|
||||
is_symlink: e.is_symlink,
|
||||
},
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deno_json_deps(
|
||||
config: &deno_config::deno_json::ConfigFile,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
let values = imports_values(config.json.imports.as_ref())
|
||||
.into_iter()
|
||||
.chain(scope_values(config.json.scopes.as_ref()));
|
||||
values_to_set(values)
|
||||
}
|
||||
|
||||
fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> {
|
||||
let Some(obj) = value.and_then(|v| v.as_object()) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut items = Vec::with_capacity(obj.len());
|
||||
for value in obj.values() {
|
||||
if let serde_json::Value::String(value) = value {
|
||||
items.push(value);
|
||||
}
|
||||
}
|
||||
items
|
||||
}
|
||||
|
||||
fn scope_values(value: Option<&serde_json::Value>) -> Vec<&String> {
|
||||
let Some(obj) = value.and_then(|v| v.as_object()) else {
|
||||
return Vec::new();
|
||||
};
|
||||
obj.values().flat_map(|v| imports_values(Some(v))).collect()
|
||||
}
|
||||
|
||||
fn values_to_set<'a>(
|
||||
values: impl Iterator<Item = &'a String>,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
let mut entries = HashSet::new();
|
||||
for value in values {
|
||||
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
|
||||
entries.insert(JsrDepPackageReq::jsr(req_ref.into_inner().req));
|
||||
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
|
||||
entries.insert(JsrDepPackageReq::npm(req_ref.into_inner().req));
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) {
|
||||
if let Some(ignored_options) = &ts_config.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
}
|
||||
let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else {
|
||||
return;
|
||||
};
|
||||
if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) {
|
||||
log::warn!(
|
||||
"{} experimentalDecorators compiler option is deprecated and may be removed at any time",
|
||||
deno_runtime::colors::yellow("Warning"),
|
||||
);
|
||||
}
|
||||
}
|
7891
cli/args/flags.rs
7891
cli/args/flags.rs
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_permissions::NetDescriptor;
|
||||
use std::net::IpAddr;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -42,21 +43,17 @@ pub fn validator(host_and_port: &str) -> Result<String, String> {
|
|||
/// `127.0.0.1:port` and `localhost:port`.
|
||||
pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
|
||||
let mut out: Vec<String> = vec![];
|
||||
for host_and_port in paths.iter() {
|
||||
if Url::parse(&format!("internal://{host_and_port}")).is_ok()
|
||||
|| host_and_port.parse::<IpAddr>().is_ok()
|
||||
{
|
||||
out.push(host_and_port.to_owned())
|
||||
} else if let Ok(port) = host_and_port.parse::<BarePort>() {
|
||||
for host_and_port in paths.into_iter() {
|
||||
if let Ok(port) = host_and_port.parse::<BarePort>() {
|
||||
// we got bare port, let's add default hosts
|
||||
for host in ["0.0.0.0", "127.0.0.1", "localhost"].iter() {
|
||||
out.push(format!("{}:{}", host, port.0));
|
||||
}
|
||||
} else {
|
||||
return Err(clap::Error::raw(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
format!("Bad host:port pair: {host_and_port}"),
|
||||
));
|
||||
NetDescriptor::parse(&host_and_port).map_err(|e| {
|
||||
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}"))
|
||||
})?;
|
||||
out.push(host_and_port)
|
||||
}
|
||||
}
|
||||
Ok(out)
|
||||
|
@ -121,8 +118,8 @@ mod tests {
|
|||
let entries = svec![
|
||||
"deno.land",
|
||||
"deno.land:80",
|
||||
"::",
|
||||
"::1",
|
||||
"[::]",
|
||||
"[::1]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4:5678",
|
||||
|
@ -142,8 +139,8 @@ mod tests {
|
|||
let expected = svec![
|
||||
"deno.land",
|
||||
"deno.land:80",
|
||||
"::",
|
||||
"::1",
|
||||
"[::]",
|
||||
"[::1]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4:5678",
|
||||
|
@ -174,10 +171,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6() {
|
||||
let entries =
|
||||
svec!["::", "::1", "[::1]", "[::]:5678", "[::1]:5678", "::cafe"];
|
||||
let expected =
|
||||
svec!["::", "::1", "[::1]", "[::]:5678", "[::1]:5678", "::cafe"];
|
||||
let entries = svec!["[::1]", "[::]:5678", "[::1]:5678"];
|
||||
let expected = svec!["[::1]", "[::]:5678", "[::1]:5678"];
|
||||
let actual = parse(entries).unwrap();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
@ -190,12 +185,36 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error2() {
|
||||
let entries = svec!["0123:4567:890a:bcde:fg::"];
|
||||
let entries = svec!["::1"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error3() {
|
||||
let entries = svec!["::"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error4() {
|
||||
let entries = svec!["::cafe"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error5() {
|
||||
let entries = svec!["1::1"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error6() {
|
||||
let entries = svec!["0123:4567:890a:bcde:fg::"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error7() {
|
||||
let entries = svec!["[::q]:8080"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
|
|
@ -1,64 +1,24 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use import_map::ImportMap;
|
||||
use import_map::ImportMapDiagnostic;
|
||||
use log::warn;
|
||||
|
||||
use super::ConfigFile;
|
||||
use crate::file_fetcher::get_source_from_data_url;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
pub async fn resolve_import_map_from_specifier(
|
||||
pub async fn resolve_import_map_value_from_specifier(
|
||||
specifier: &Url,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
file_fetcher: &FileFetcher,
|
||||
) -> Result<ImportMap, AnyError> {
|
||||
let value: serde_json::Value = if specifier.scheme() == "data" {
|
||||
serde_json::from_str(&get_source_from_data_url(specifier)?.0)?
|
||||
) -> Result<serde_json::Value, AnyError> {
|
||||
if specifier.scheme() == "data" {
|
||||
let data_url_text =
|
||||
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
|
||||
Ok(serde_json::from_str(&data_url_text)?)
|
||||
} else {
|
||||
let import_map_config = maybe_config_file
|
||||
.as_ref()
|
||||
.filter(|c| c.specifier == *specifier);
|
||||
match import_map_config {
|
||||
Some(config) => config.to_import_map_value(),
|
||||
None => {
|
||||
let file = file_fetcher
|
||||
.fetch(specifier, PermissionsContainer::allow_all())
|
||||
.await?;
|
||||
serde_json::from_str(&file.source)?
|
||||
}
|
||||
}
|
||||
};
|
||||
import_map_from_value(specifier, value)
|
||||
}
|
||||
|
||||
fn import_map_from_value(
|
||||
specifier: &Url,
|
||||
json_value: serde_json::Value,
|
||||
) -> Result<ImportMap, AnyError> {
|
||||
debug_assert!(
|
||||
!specifier.as_str().contains("../"),
|
||||
"Import map specifier incorrectly contained ../: {}",
|
||||
specifier.as_str()
|
||||
);
|
||||
let result = import_map::parse_from_value(specifier, json_value)?;
|
||||
print_import_map_diagnostics(&result.diagnostics);
|
||||
Ok(result.import_map)
|
||||
}
|
||||
|
||||
fn print_import_map_diagnostics(diagnostics: &[ImportMapDiagnostic]) {
|
||||
if !diagnostics.is_empty() {
|
||||
warn!(
|
||||
"Import map diagnostics:\n{}",
|
||||
diagnostics
|
||||
.iter()
|
||||
.map(|d| format!(" - {d}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
.fetch_bypass_permissions(specifier)
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
Ok(serde_json::from_str(&file.source)?)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,65 +1,267 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_npm::registry::NpmRegistryApi;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_lockfile::WorkspaceMemberConfig;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
|
||||
use crate::args::ConfigFile;
|
||||
use crate::cache;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::Flags;
|
||||
|
||||
use super::DenoSubcommand;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::InstallFlags;
|
||||
use crate::args::InstallKind;
|
||||
|
||||
pub use deno_lockfile::Lockfile;
|
||||
pub use deno_lockfile::LockfileError;
|
||||
use deno_lockfile::Lockfile;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfileReadFromPathOptions {
|
||||
pub file_path: PathBuf,
|
||||
pub frozen: bool,
|
||||
/// Causes the lockfile to only be read from, but not written to.
|
||||
pub skip_write: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfile {
|
||||
lockfile: Mutex<Lockfile>,
|
||||
pub filename: PathBuf,
|
||||
frozen: bool,
|
||||
skip_write: bool,
|
||||
}
|
||||
|
||||
pub struct Guard<'a, T> {
|
||||
guard: MutexGuard<'a, T>,
|
||||
}
|
||||
|
||||
impl<'a, T> std::ops::Deref for Guard<'a, T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.guard
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.guard
|
||||
}
|
||||
}
|
||||
|
||||
impl CliLockfile {
|
||||
/// Get the inner deno_lockfile::Lockfile.
|
||||
pub fn lock(&self) -> Guard<Lockfile> {
|
||||
Guard {
|
||||
guard: self.lockfile.lock(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_workspace_config(
|
||||
&self,
|
||||
options: deno_lockfile::SetWorkspaceConfigOptions,
|
||||
) {
|
||||
self.lockfile.lock().set_workspace_config(options);
|
||||
}
|
||||
|
||||
pub fn overwrite(&self) -> bool {
|
||||
self.lockfile.lock().overwrite
|
||||
}
|
||||
|
||||
pub fn write_if_changed(&self) -> Result<(), AnyError> {
|
||||
if self.skip_write {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.error_if_changed()?;
|
||||
let mut lockfile = self.lockfile.lock();
|
||||
let Some(bytes) = lockfile.resolve_write_bytes() else {
|
||||
return Ok(()); // nothing to do
|
||||
};
|
||||
// do an atomic write to reduce the chance of multiple deno
|
||||
// processes corrupting the file
|
||||
atomic_write_file_with_retries(
|
||||
&lockfile.filename,
|
||||
bytes,
|
||||
cache::CACHE_PERM,
|
||||
)
|
||||
.context("Failed writing lockfile.")?;
|
||||
lockfile.has_content_changed = false;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn discover(
|
||||
flags: &Flags,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
) -> Result<Option<Lockfile>, AnyError> {
|
||||
workspace: &Workspace,
|
||||
) -> Result<Option<CliLockfile>, AnyError> {
|
||||
fn pkg_json_deps(
|
||||
maybe_pkg_json: Option<&PackageJson>,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
let Some(pkg_json) = maybe_pkg_json else {
|
||||
return Default::default();
|
||||
};
|
||||
pkg_json
|
||||
.resolve_local_package_json_deps()
|
||||
.values()
|
||||
.filter_map(|dep| dep.as_ref().ok())
|
||||
.filter_map(|dep| match dep {
|
||||
PackageJsonDepValue::Req(req) => {
|
||||
Some(JsrDepPackageReq::npm(req.clone()))
|
||||
}
|
||||
PackageJsonDepValue::Workspace(_) => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deno_json_deps(
|
||||
maybe_deno_json: Option<&ConfigFile>,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
maybe_deno_json
|
||||
.map(|c| {
|
||||
crate::args::deno_json::deno_json_deps(c)
|
||||
.into_iter()
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
if flags.no_lock
|
||||
|| matches!(
|
||||
flags.subcommand,
|
||||
DenoSubcommand::Install(_) | DenoSubcommand::Uninstall(_)
|
||||
DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(..),
|
||||
..
|
||||
}) | DenoSubcommand::Uninstall(_)
|
||||
)
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let filename = match flags.lock {
|
||||
let file_path = match flags.lock {
|
||||
Some(ref lock) => PathBuf::from(lock),
|
||||
None => match maybe_config_file {
|
||||
Some(config_file) => {
|
||||
if config_file.specifier.scheme() == "file" {
|
||||
match config_file.resolve_lockfile_path()? {
|
||||
None => match workspace.resolve_lockfile_path()? {
|
||||
Some(path) => path,
|
||||
None => return Ok(None),
|
||||
}
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
None => return Ok(None),
|
||||
},
|
||||
};
|
||||
|
||||
let lockfile = Lockfile::new(filename, flags.lock_write)?;
|
||||
let root_folder = workspace.root_folder_configs();
|
||||
// CLI flag takes precedence over the config
|
||||
let frozen = flags.frozen_lockfile.unwrap_or_else(|| {
|
||||
root_folder
|
||||
.deno_json
|
||||
.as_ref()
|
||||
.and_then(|c| c.to_lock_config().ok().flatten().map(|c| c.frozen()))
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions {
|
||||
file_path,
|
||||
frozen,
|
||||
skip_write: flags.internal.lockfile_skip_write,
|
||||
})?;
|
||||
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
let root_url = workspace.root_dir();
|
||||
let config = deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
|
||||
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
|
||||
},
|
||||
members: workspace
|
||||
.config_folders()
|
||||
.iter()
|
||||
.filter(|(folder_url, _)| *folder_url != root_url)
|
||||
.filter_map(|(folder_url, folder)| {
|
||||
Some((
|
||||
{
|
||||
// should never be None here, but just ignore members that
|
||||
// do fail for this
|
||||
let mut relative_path = root_url.make_relative(folder_url)?;
|
||||
if relative_path.ends_with('/') {
|
||||
// make it slightly cleaner by removing the trailing slash
|
||||
relative_path.pop();
|
||||
}
|
||||
relative_path
|
||||
},
|
||||
{
|
||||
let config = WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(folder.pkg_json.as_deref()),
|
||||
dependencies: deno_json_deps(folder.deno_json.as_deref()),
|
||||
};
|
||||
if config.package_json_deps.is_empty()
|
||||
&& config.dependencies.is_empty()
|
||||
{
|
||||
// exclude empty workspace members
|
||||
return None;
|
||||
}
|
||||
config
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
lockfile.set_workspace_config(deno_lockfile::SetWorkspaceConfigOptions {
|
||||
no_npm: flags.no_npm,
|
||||
no_config: flags.config_flag == super::ConfigFlag::Disabled,
|
||||
config,
|
||||
});
|
||||
|
||||
Ok(Some(lockfile))
|
||||
}
|
||||
|
||||
pub async fn snapshot_from_lockfile(
|
||||
lockfile: Arc<Mutex<Lockfile>>,
|
||||
api: &dyn NpmRegistryApi,
|
||||
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
|
||||
let incomplete_snapshot = {
|
||||
let lock = lockfile.lock();
|
||||
deno_npm::resolution::incomplete_snapshot_from_lockfile(&lock)?
|
||||
};
|
||||
let snapshot =
|
||||
deno_npm::resolution::snapshot_from_lockfile(incomplete_snapshot, api)
|
||||
.await?;
|
||||
Ok(snapshot)
|
||||
pub fn read_from_path(
|
||||
opts: CliLockfileReadFromPathOptions,
|
||||
) -> Result<CliLockfile, AnyError> {
|
||||
let lockfile = match std::fs::read_to_string(&opts.file_path) {
|
||||
Ok(text) => Lockfile::new(deno_lockfile::NewLockfileOptions {
|
||||
file_path: opts.file_path,
|
||||
content: &text,
|
||||
overwrite: false,
|
||||
})?,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
Lockfile::new_empty(opts.file_path, false)
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("Failed reading lockfile '{}'", opts.file_path.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
Ok(CliLockfile {
|
||||
filename: lockfile.filename.clone(),
|
||||
lockfile: Mutex::new(lockfile),
|
||||
frozen: opts.frozen,
|
||||
skip_write: opts.skip_write,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn error_if_changed(&self) -> Result<(), AnyError> {
|
||||
if !self.frozen {
|
||||
return Ok(());
|
||||
}
|
||||
let lockfile = self.lockfile.lock();
|
||||
if lockfile.has_content_changed {
|
||||
let contents =
|
||||
std::fs::read_to_string(&lockfile.filename).unwrap_or_default();
|
||||
let new_contents = lockfile.as_json_string();
|
||||
let diff = crate::util::diff::diff(&contents, &new_contents);
|
||||
// has an extra newline at the end
|
||||
let diff = diff.trim_end();
|
||||
Err(deno_core::anyhow::anyhow!(
|
||||
"The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
1947
cli/args/mod.rs
1947
cli/args/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,298 +1,156 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_npm::registry::parse_dep_entry_name_and_raw_version;
|
||||
use deno_npm::registry::PackageDepNpmSchemeValueParseError;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_core::serde_json;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_package_json::PackageJsonDepValueParseError;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReq;
|
||||
use deno_semver::VersionReqSpecifierParseError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error, Clone)]
|
||||
pub enum PackageJsonDepValueParseError {
|
||||
#[error(transparent)]
|
||||
SchemeValue(#[from] PackageDepNpmSchemeValueParseError),
|
||||
#[error(transparent)]
|
||||
Specifier(#[from] VersionReqSpecifierParseError),
|
||||
#[error("Not implemented scheme '{scheme}'")]
|
||||
Unsupported { scheme: String },
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmRemotePkg {
|
||||
pub alias: Option<String>,
|
||||
pub base_dir: PathBuf,
|
||||
pub req: PackageReq,
|
||||
}
|
||||
|
||||
pub type PackageJsonDeps =
|
||||
BTreeMap<String, Result<PackageReq, PackageJsonDepValueParseError>>;
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmWorkspacePkg {
|
||||
pub alias: Option<String>,
|
||||
pub target_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonDepsProvider(Option<PackageJsonDeps>);
|
||||
|
||||
impl PackageJsonDepsProvider {
|
||||
pub fn new(deps: Option<PackageJsonDeps>) -> Self {
|
||||
Self(deps)
|
||||
pub struct NpmInstallDepsProvider {
|
||||
remote_pkgs: Vec<InstallNpmRemotePkg>,
|
||||
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
|
||||
pkg_json_dep_errors: Vec<PackageJsonDepValueParseError>,
|
||||
}
|
||||
|
||||
pub fn deps(&self) -> Option<&PackageJsonDeps> {
|
||||
self.0.as_ref()
|
||||
impl NpmInstallDepsProvider {
|
||||
pub fn empty() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn reqs(&self) -> Vec<&PackageReq> {
|
||||
match &self.0 {
|
||||
Some(deps) => {
|
||||
let mut package_reqs = deps
|
||||
.values()
|
||||
.filter_map(|r| r.as_ref().ok())
|
||||
.collect::<Vec<_>>();
|
||||
package_reqs.sort(); // deterministic resolution
|
||||
package_reqs
|
||||
}
|
||||
None => Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
|
||||
// todo(dsherret): estimate capacity?
|
||||
let mut workspace_pkgs = Vec::new();
|
||||
let mut remote_pkgs = Vec::new();
|
||||
let mut pkg_json_dep_errors = Vec::new();
|
||||
let workspace_npm_pkgs = workspace.npm_packages();
|
||||
|
||||
/// Gets an application level package.json's npm package requirements.
|
||||
///
|
||||
/// Note that this function is not general purpose. It is specifically for
|
||||
/// parsing the application level package.json that the user has control
|
||||
/// over. This is a design limitation to allow mapping these dependency
|
||||
/// entries to npm specifiers which can then be used in the resolver.
|
||||
pub fn get_local_package_json_version_reqs(
|
||||
package_json: &PackageJson,
|
||||
) -> PackageJsonDeps {
|
||||
fn parse_entry(
|
||||
key: &str,
|
||||
value: &str,
|
||||
) -> Result<PackageReq, PackageJsonDepValueParseError> {
|
||||
if value.starts_with("workspace:")
|
||||
|| value.starts_with("file:")
|
||||
|| value.starts_with("git:")
|
||||
|| value.starts_with("http:")
|
||||
|| value.starts_with("https:")
|
||||
{
|
||||
return Err(PackageJsonDepValueParseError::Unsupported {
|
||||
scheme: value.split(':').next().unwrap().to_string(),
|
||||
for (_, folder) in workspace.config_folders() {
|
||||
// deal with the deno.json first because it takes precedence during resolution
|
||||
if let Some(deno_json) = &folder.deno_json {
|
||||
// don't bother with externally referenced import maps as users
|
||||
// should inline their import map to get this behaviour
|
||||
if let Some(serde_json::Value::Object(obj)) = &deno_json.json.imports {
|
||||
let mut pkg_pkgs = Vec::with_capacity(obj.len());
|
||||
for (_alias, value) in obj {
|
||||
let serde_json::Value::String(specifier) = value else {
|
||||
continue;
|
||||
};
|
||||
let Ok(npm_req_ref) = NpmPackageReqReference::from_str(specifier)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let pkg_req = npm_req_ref.into_inner().req;
|
||||
let workspace_pkg = workspace_npm_pkgs
|
||||
.iter()
|
||||
.find(|pkg| pkg.matches_req(&pkg_req));
|
||||
|
||||
if let Some(pkg) = workspace_pkg {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias: None,
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||
alias: None,
|
||||
base_dir: deno_json.dir_path(),
|
||||
req: pkg_req,
|
||||
});
|
||||
}
|
||||
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value)
|
||||
.map_err(PackageJsonDepValueParseError::SchemeValue)?;
|
||||
}
|
||||
|
||||
let result = VersionReq::parse_from_specifier(version_req);
|
||||
match result {
|
||||
Ok(version_req) => Ok(PackageReq {
|
||||
name: name.to_string(),
|
||||
version_req,
|
||||
}),
|
||||
Err(err) => Err(PackageJsonDepValueParseError::Specifier(err)),
|
||||
// sort within each package (more like npm resolution)
|
||||
pkg_pkgs.sort_by(|a, b| a.req.cmp(&b.req));
|
||||
remote_pkgs.extend(pkg_pkgs);
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_deps(
|
||||
deps: Option<&HashMap<String, String>>,
|
||||
result: &mut PackageJsonDeps,
|
||||
) {
|
||||
if let Some(deps) = deps {
|
||||
for (key, value) in deps {
|
||||
result.insert(key.to_string(), parse_entry(key, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let deps = package_json.dependencies.as_ref();
|
||||
let dev_deps = package_json.dev_dependencies.as_ref();
|
||||
let mut result = BTreeMap::new();
|
||||
|
||||
// insert the dev dependencies first so the dependencies will
|
||||
// take priority and overwrite any collisions
|
||||
insert_deps(dev_deps, &mut result);
|
||||
insert_deps(deps, &mut result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Attempts to discover the package.json file, maybe stopping when it
|
||||
/// reaches the specified `maybe_stop_at` directory.
|
||||
pub fn discover_from(
|
||||
start: &Path,
|
||||
maybe_stop_at: Option<PathBuf>,
|
||||
) -> Result<Option<PackageJson>, AnyError> {
|
||||
const PACKAGE_JSON_NAME: &str = "package.json";
|
||||
|
||||
// note: ancestors() includes the `start` path
|
||||
for ancestor in start.ancestors() {
|
||||
let path = ancestor.join(PACKAGE_JSON_NAME);
|
||||
|
||||
let source = match std::fs::read_to_string(&path) {
|
||||
Ok(source) => source,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
if let Some(stop_at) = maybe_stop_at.as_ref() {
|
||||
if ancestor == stop_at {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(pkg_json) = &folder.pkg_json {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
let mut pkg_pkgs = Vec::with_capacity(deps.len());
|
||||
for (alias, dep) in deps {
|
||||
let dep = match dep {
|
||||
Ok(dep) => dep,
|
||||
Err(err) => {
|
||||
pkg_json_dep_errors.push(err);
|
||||
continue;
|
||||
}
|
||||
Err(err) => bail!(
|
||||
"Error loading package.json at {}. {:#}",
|
||||
path.display(),
|
||||
err
|
||||
),
|
||||
};
|
||||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_req(&pkg_req)
|
||||
// do not resolve to the current package
|
||||
&& pkg.pkg_json.path != pkg_json.path
|
||||
});
|
||||
|
||||
let package_json = PackageJson::load_from_string(path.clone(), source)?;
|
||||
log::debug!("package.json file found at '{}'", path.display());
|
||||
return Ok(Some(package_json));
|
||||
if let Some(pkg) = workspace_pkg {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias: Some(alias),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||
alias: Some(alias),
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
req: pkg_req,
|
||||
});
|
||||
}
|
||||
|
||||
log::debug!("No package.json file found");
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_dep_entry_name_and_raw_version() {
|
||||
let cases = [
|
||||
("test", "^1.2", Ok(("test", "^1.2"))),
|
||||
("test", "1.x - 2.6", Ok(("test", "1.x - 2.6"))),
|
||||
("test", "npm:package@^1.2", Ok(("package", "^1.2"))),
|
||||
(
|
||||
"test",
|
||||
"npm:package",
|
||||
Err("Could not find @ symbol in npm url 'npm:package'"),
|
||||
),
|
||||
];
|
||||
for (key, value, expected_result) in cases {
|
||||
let result = parse_dep_entry_name_and_raw_version(key, value);
|
||||
match result {
|
||||
Ok(result) => assert_eq!(result, expected_result.unwrap()),
|
||||
Err(err) => assert_eq!(err.to_string(), expected_result.err().unwrap()),
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
||||
}) {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias: Some(alias),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_local_package_json_version_reqs_for_tests(
|
||||
package_json: &PackageJson,
|
||||
) -> BTreeMap<String, Result<PackageReq, String>> {
|
||||
get_local_package_json_version_reqs(package_json)
|
||||
.into_iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k,
|
||||
match v {
|
||||
Ok(v) => Ok(v),
|
||||
Err(err) => Err(err.to_string()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
// sort within each package as npm does
|
||||
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
|
||||
remote_pkgs.extend(pkg_pkgs);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(HashMap::from([
|
||||
("test".to_string(), "^1.2".to_string()),
|
||||
("other".to_string(), "npm:package@~1.3".to_string()),
|
||||
]));
|
||||
package_json.dev_dependencies = Some(HashMap::from([
|
||||
("package_b".to_string(), "~2.2".to_string()),
|
||||
// should be ignored
|
||||
("other".to_string(), "^3.2".to_string()),
|
||||
]));
|
||||
let deps = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
deps,
|
||||
BTreeMap::from([
|
||||
(
|
||||
"test".to_string(),
|
||||
Ok(PackageReq::from_str("test@^1.2").unwrap())
|
||||
),
|
||||
(
|
||||
"other".to_string(),
|
||||
Ok(PackageReq::from_str("package@~1.3").unwrap())
|
||||
),
|
||||
(
|
||||
"package_b".to_string(),
|
||||
Ok(PackageReq::from_str("package_b@~2.2").unwrap())
|
||||
)
|
||||
])
|
||||
);
|
||||
remote_pkgs.shrink_to_fit();
|
||||
workspace_pkgs.shrink_to_fit();
|
||||
Self {
|
||||
remote_pkgs,
|
||||
workspace_pkgs,
|
||||
pkg_json_dep_errors,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs_errors_non_npm_specifier() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(HashMap::from([(
|
||||
"test".to_string(),
|
||||
"1.x - 1.3".to_string(),
|
||||
)]));
|
||||
let map = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
map,
|
||||
BTreeMap::from([(
|
||||
"test".to_string(),
|
||||
Err(
|
||||
concat!(
|
||||
"Invalid specifier version requirement. Unexpected character.\n",
|
||||
" - 1.3\n",
|
||||
" ~"
|
||||
)
|
||||
.to_string()
|
||||
)
|
||||
)])
|
||||
);
|
||||
pub fn remote_pkgs(&self) -> &[InstallNpmRemotePkg] {
|
||||
&self.remote_pkgs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs_skips_certain_specifiers() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(HashMap::from([
|
||||
("test".to_string(), "1".to_string()),
|
||||
("work-test".to_string(), "workspace:1.1.1".to_string()),
|
||||
("file-test".to_string(), "file:something".to_string()),
|
||||
("git-test".to_string(), "git:something".to_string()),
|
||||
("http-test".to_string(), "http://something".to_string()),
|
||||
("https-test".to_string(), "https://something".to_string()),
|
||||
]));
|
||||
let result = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
result,
|
||||
BTreeMap::from([
|
||||
(
|
||||
"file-test".to_string(),
|
||||
Err("Not implemented scheme 'file'".to_string()),
|
||||
),
|
||||
(
|
||||
"git-test".to_string(),
|
||||
Err("Not implemented scheme 'git'".to_string()),
|
||||
),
|
||||
(
|
||||
"http-test".to_string(),
|
||||
Err("Not implemented scheme 'http'".to_string()),
|
||||
),
|
||||
(
|
||||
"https-test".to_string(),
|
||||
Err("Not implemented scheme 'https'".to_string()),
|
||||
),
|
||||
(
|
||||
"test".to_string(),
|
||||
Ok(PackageReq::from_str("test@1").unwrap())
|
||||
),
|
||||
(
|
||||
"work-test".to_string(),
|
||||
Err("Not implemented scheme 'workspace'".to_string()),
|
||||
)
|
||||
])
|
||||
);
|
||||
pub fn workspace_pkgs(&self) -> &[InstallNpmWorkspacePkg] {
|
||||
&self.workspace_pkgs
|
||||
}
|
||||
|
||||
pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] {
|
||||
&self.pkg_json_dep_errors
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use log::debug;
|
||||
use log::error;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::net::IpAddr;
|
||||
use std::net::Ipv4Addr;
|
||||
use std::net::Ipv6Addr;
|
||||
use std::net::SocketAddr;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AuthTokenData {
|
||||
|
@ -13,7 +21,7 @@ pub enum AuthTokenData {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct AuthToken {
|
||||
host: String,
|
||||
host: AuthDomain,
|
||||
token: AuthTokenData,
|
||||
}
|
||||
|
||||
|
@ -23,7 +31,7 @@ impl fmt::Display for AuthToken {
|
|||
AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"),
|
||||
AuthTokenData::Basic { username, password } => {
|
||||
let credentials = format!("{username}:{password}");
|
||||
write!(f, "Basic {}", base64::encode(credentials))
|
||||
write!(f, "Basic {}", BASE64_STANDARD.encode(credentials))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -35,6 +43,78 @@ impl fmt::Display for AuthToken {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct AuthTokens(Vec<AuthToken>);
|
||||
|
||||
/// An authorization domain, either an exact or suffix match.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AuthDomain {
|
||||
Ip(IpAddr),
|
||||
IpPort(SocketAddr),
|
||||
/// Suffix match, no dot. May include a port.
|
||||
Suffix(Cow<'static, str>),
|
||||
}
|
||||
|
||||
impl<T: ToString> From<T> for AuthDomain {
|
||||
fn from(value: T) -> Self {
|
||||
let s = value.to_string().to_lowercase();
|
||||
if let Ok(ip) = SocketAddr::from_str(&s) {
|
||||
return AuthDomain::IpPort(ip);
|
||||
};
|
||||
if s.starts_with('[') && s.ends_with(']') {
|
||||
if let Ok(ip) = Ipv6Addr::from_str(&s[1..s.len() - 1]) {
|
||||
return AuthDomain::Ip(ip.into());
|
||||
}
|
||||
} else if let Ok(ip) = Ipv4Addr::from_str(&s) {
|
||||
return AuthDomain::Ip(ip.into());
|
||||
}
|
||||
if let Some(s) = s.strip_prefix('.') {
|
||||
AuthDomain::Suffix(Cow::Owned(s.to_owned()))
|
||||
} else {
|
||||
AuthDomain::Suffix(Cow::Owned(s))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthDomain {
|
||||
pub fn matches(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
let Some(host) = specifier.host_str() else {
|
||||
return false;
|
||||
};
|
||||
match *self {
|
||||
Self::Ip(ip) => {
|
||||
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
|
||||
return false;
|
||||
};
|
||||
ip == parsed && specifier.port().is_none()
|
||||
}
|
||||
Self::IpPort(ip) => {
|
||||
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
|
||||
return false;
|
||||
};
|
||||
ip.ip() == parsed && specifier.port() == Some(ip.port())
|
||||
}
|
||||
Self::Suffix(ref suffix) => {
|
||||
let hostname = if let Some(port) = specifier.port() {
|
||||
Cow::Owned(format!("{}:{}", host, port))
|
||||
} else {
|
||||
Cow::Borrowed(host)
|
||||
};
|
||||
|
||||
if suffix.len() == hostname.len() {
|
||||
return suffix == &hostname;
|
||||
}
|
||||
|
||||
// If it's a suffix match, ensure a dot
|
||||
if hostname.ends_with(suffix.as_ref())
|
||||
&& hostname.ends_with(&format!(".{suffix}"))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthTokens {
|
||||
/// Create a new set of tokens based on the provided string. It is intended
|
||||
/// that the string be the value of an environment variable and the string is
|
||||
|
@ -43,19 +123,19 @@ impl AuthTokens {
|
|||
pub fn new(maybe_tokens_str: Option<String>) -> Self {
|
||||
let mut tokens = Vec::new();
|
||||
if let Some(tokens_str) = maybe_tokens_str {
|
||||
for token_str in tokens_str.split(';') {
|
||||
for token_str in tokens_str.trim().split(';') {
|
||||
if token_str.contains('@') {
|
||||
let pair: Vec<&str> = token_str.rsplitn(2, '@').collect();
|
||||
let token = pair[1];
|
||||
let host = pair[0].to_lowercase();
|
||||
let mut iter = token_str.rsplitn(2, '@');
|
||||
let host = AuthDomain::from(iter.next().unwrap());
|
||||
let token = iter.next().unwrap();
|
||||
if token.contains(':') {
|
||||
let pair: Vec<&str> = token.rsplitn(2, ':').collect();
|
||||
let username = pair[1].to_string();
|
||||
let password = pair[0].to_string();
|
||||
let mut iter = token.rsplitn(2, ':');
|
||||
let password = iter.next().unwrap().to_owned();
|
||||
let username = iter.next().unwrap().to_owned();
|
||||
tokens.push(AuthToken {
|
||||
host,
|
||||
token: AuthTokenData::Basic { username, password },
|
||||
})
|
||||
});
|
||||
} else {
|
||||
tokens.push(AuthToken {
|
||||
host,
|
||||
|
@ -79,12 +159,7 @@ impl AuthTokens {
|
|||
/// matching is case insensitive.
|
||||
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<AuthToken> {
|
||||
self.0.iter().find_map(|t| {
|
||||
let hostname = if let Some(port) = specifier.port() {
|
||||
format!("{}:{}", specifier.host_str()?, port)
|
||||
} else {
|
||||
specifier.host_str()?.to_string()
|
||||
};
|
||||
if hostname.to_lowercase().ends_with(&t.host) {
|
||||
if t.host.matches(specifier) {
|
||||
Some(t.clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -136,6 +211,40 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_space() {
|
||||
let auth_tokens = AuthTokens::new(Some(
|
||||
" abc123@deno.land;def456@example.com\t".to_string(),
|
||||
));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_newline() {
|
||||
let auth_tokens = AuthTokens::new(Some(
|
||||
"\nabc123@deno.land;def456@example.com\n".to_string(),
|
||||
));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_port() {
|
||||
let auth_tokens =
|
||||
|
@ -180,4 +289,81 @@ mod tests {
|
|||
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
|
||||
assert_eq!(auth_tokens.get(&fixture), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ip() {
|
||||
let ip = AuthDomain::from("[2001:db8:a::123]");
|
||||
assert_eq!("Ip(2001:db8:a::123)", format!("{ip:?}"));
|
||||
let ip = AuthDomain::from("[2001:db8:a::123]:8080");
|
||||
assert_eq!("IpPort([2001:db8:a::123]:8080)", format!("{ip:?}"));
|
||||
let ip = AuthDomain::from("1.1.1.1");
|
||||
assert_eq!("Ip(1.1.1.1)", format!("{ip:?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_insensitive() {
|
||||
let domain = AuthDomain::from("EXAMPLE.com");
|
||||
assert!(
|
||||
domain.matches(&ModuleSpecifier::parse("http://example.com").unwrap())
|
||||
);
|
||||
assert!(
|
||||
domain.matches(&ModuleSpecifier::parse("http://example.COM").unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches() {
|
||||
let candidates = [
|
||||
"example.com",
|
||||
"www.example.com",
|
||||
"1.1.1.1",
|
||||
"[2001:db8:a::123]",
|
||||
// These will never match
|
||||
"example.com.evil.com",
|
||||
"1.1.1.1.evil.com",
|
||||
"notexample.com",
|
||||
"www.notexample.com",
|
||||
];
|
||||
let domains = [
|
||||
("example.com", vec!["example.com", "www.example.com"]),
|
||||
(".example.com", vec!["example.com", "www.example.com"]),
|
||||
("www.example.com", vec!["www.example.com"]),
|
||||
("1.1.1.1", vec!["1.1.1.1"]),
|
||||
("[2001:db8:a::123]", vec!["[2001:db8:a::123]"]),
|
||||
];
|
||||
let url = |c: &str| ModuleSpecifier::parse(&format!("http://{c}")).unwrap();
|
||||
let url_port =
|
||||
|c: &str| ModuleSpecifier::parse(&format!("http://{c}:8080")).unwrap();
|
||||
|
||||
// Generate each candidate with and without a port
|
||||
let candidates = candidates
|
||||
.into_iter()
|
||||
.flat_map(|c| [url(c), url_port(c)])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (domain, expected_domain) in domains {
|
||||
// Test without a port -- all candidates return without a port
|
||||
let auth_domain = AuthDomain::from(domain);
|
||||
let actual = candidates
|
||||
.iter()
|
||||
.filter(|c| auth_domain.matches(c))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let expected = expected_domain.iter().map(|u| url(u)).collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Test with a port, all candidates return with a port
|
||||
let auth_domain = AuthDomain::from(&format!("{domain}:8080"));
|
||||
let actual = candidates
|
||||
.iter()
|
||||
.filter(|c| auth_domain.matches(c))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let expected = expected_domain
|
||||
.iter()
|
||||
.map(|u| url_port(u))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
: [process.argv[2], process.argv[3]];
|
||||
|
||||
total = total ? parseInt(total, 0) : 50;
|
||||
count = count ? parseInt(count, 10) : 1000000;
|
||||
|
||||
async function bench(fun) {
|
||||
const start = Date.now();
|
||||
for (let i = 0; i < count; i++) await fun();
|
||||
const elapsed = Date.now() - start;
|
||||
const rate = Math.floor(count / (elapsed / 1000));
|
||||
console.log(`time ${elapsed} ms rate ${rate}`);
|
||||
if (--total) queueMicrotask(() => bench(fun));
|
||||
}
|
||||
|
||||
const core = Deno[Deno.internal].core;
|
||||
const ops = core.ops;
|
||||
const opVoidAsync = ops.op_void_async;
|
||||
bench(() => opVoidAsync());
|
|
@ -1,22 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
: [process.argv[2], process.argv[3]];
|
||||
|
||||
total = total ? parseInt(total, 0) : 50;
|
||||
count = count ? parseInt(count, 10) : 1000000;
|
||||
|
||||
async function bench(fun) {
|
||||
const start = Date.now();
|
||||
for (let i = 0; i < count; i++) await fun();
|
||||
const elapsed = Date.now() - start;
|
||||
const rate = Math.floor(count / (elapsed / 1000));
|
||||
console.log(`time ${elapsed} ms rate ${rate}`);
|
||||
if (--total) queueMicrotask(() => bench(fun));
|
||||
}
|
||||
|
||||
const core = Deno[Deno.internal].core;
|
||||
const ops = core.ops;
|
||||
const opVoidAsyncDeferred = ops.op_void_async_deferred;
|
||||
bench(() => opVoidAsyncDeferred());
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const cacheName = "cache-v1";
|
||||
const cache = await caches.open(cacheName);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
Deno.bench("echo deno", async () => {
|
||||
await new Deno.Command("echo", { args: ["deno"] }).output();
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
const count = 100000;
|
||||
for (let i = 0; i < count; i++) console.log("Hello World");
|
||||
|
|
|
@ -1,41 +1,14 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// v8 builtin that's close to the upper bound non-NOPs
|
||||
Deno.bench("date_now", { n: 5e5 }, () => {
|
||||
Date.now();
|
||||
});
|
||||
|
||||
// Fast API calls
|
||||
{
|
||||
// deno-lint-ignore camelcase
|
||||
const { op_add } = Deno[Deno.internal].core.ops;
|
||||
// deno-lint-ignore no-inner-declarations
|
||||
function add(a, b) {
|
||||
return op_add(a, b);
|
||||
}
|
||||
// deno-lint-ignore no-inner-declarations
|
||||
function addJS(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
Deno.bench("op_add", () => add(1, 2));
|
||||
Deno.bench("add_js", () => addJS(1, 2));
|
||||
}
|
||||
|
||||
// deno-lint-ignore camelcase
|
||||
const { op_void_sync } = Deno[Deno.internal].core.ops;
|
||||
function sync() {
|
||||
return op_void_sync();
|
||||
}
|
||||
sync(); // Warmup
|
||||
|
||||
// Void ops measure op-overhead
|
||||
Deno.bench("op_void_sync", () => sync());
|
||||
|
||||
Deno.bench(
|
||||
"op_void_async",
|
||||
{ n: 1e6 },
|
||||
() => Deno[Deno.internal].core.opAsync("op_void_async"),
|
||||
);
|
||||
|
||||
// A very lightweight op, that should be highly optimizable
|
||||
Deno.bench("perf_now", { n: 5e5 }, () => {
|
||||
|
@ -43,8 +16,7 @@ Deno.bench("perf_now", { n: 5e5 }, () => {
|
|||
});
|
||||
|
||||
Deno.bench("open_file_sync", () => {
|
||||
const file = Deno.openSync("./cli/bench/testdata/128k.bin");
|
||||
file.close();
|
||||
using _file = Deno.openSync("./cli/bench/testdata/128k.bin");
|
||||
});
|
||||
|
||||
// A common "language feature", that should be fast
|
||||
|
@ -74,7 +46,7 @@ Deno.bench("b64_rt_short", { n: 1e6 }, () => {
|
|||
const buf = new Uint8Array(100);
|
||||
const file = Deno.openSync("/dev/zero");
|
||||
Deno.bench("read_zero", { n: 5e5 }, () => {
|
||||
Deno.readSync(file.rid, buf);
|
||||
file.readSync(buf);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -83,7 +55,7 @@ Deno.bench("b64_rt_short", { n: 1e6 }, () => {
|
|||
const dataChunk = new Uint8Array(100);
|
||||
const file = Deno.openSync("/dev/null", { write: true });
|
||||
Deno.bench("write_null", { n: 5e5 }, () => {
|
||||
Deno.writeSync(file.rid, dataChunk);
|
||||
file.writeSync(dataChunk);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
: [process.argv[2], process.argv[3]];
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
let total = 5;
|
||||
let current = "";
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
/** @jsx h */
|
||||
import results from "./deno.json" assert { type: "json" };
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
: [process.argv[2], process.argv[3]];
|
||||
|
|
|
@ -1,155 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use std::sync::atomic::AtomicU16;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
|
||||
use super::Result;
|
||||
|
||||
pub use test_util::parse_wrk_output;
|
||||
pub use test_util::WrkOutput as HttpBenchmarkResult;
|
||||
// Some of the benchmarks in this file have been renamed. In case the history
|
||||
// somehow gets messed up:
|
||||
// "node_http" was once called "node"
|
||||
// "deno_tcp" was once called "deno"
|
||||
// "deno_http" was once called "deno_net_http"
|
||||
|
||||
const DURATION: &str = "10s";
|
||||
|
||||
pub fn benchmark(
|
||||
target_path: &Path,
|
||||
) -> Result<HashMap<String, HttpBenchmarkResult>> {
|
||||
let deno_exe = test_util::deno_exe_path();
|
||||
let deno_exe = deno_exe.to_string();
|
||||
|
||||
let hyper_hello_exe = target_path.join("test_server");
|
||||
let hyper_hello_exe = hyper_hello_exe.to_str().unwrap();
|
||||
|
||||
let mut res = HashMap::new();
|
||||
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
let http_dir = manifest_dir.join("bench").join("http");
|
||||
for entry in std::fs::read_dir(&http_dir)? {
|
||||
let entry = entry?;
|
||||
let pathbuf = entry.path();
|
||||
let path = pathbuf.to_str().unwrap();
|
||||
if path.ends_with(".lua") {
|
||||
continue;
|
||||
}
|
||||
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
|
||||
|
||||
let lua_script = http_dir.join(format!("{file_stem}.lua"));
|
||||
let mut maybe_lua = None;
|
||||
if lua_script.exists() {
|
||||
maybe_lua = Some(lua_script.to_str().unwrap());
|
||||
}
|
||||
|
||||
let port = get_port();
|
||||
// deno run -A --unstable <path> <addr>
|
||||
res.insert(
|
||||
file_stem.to_string(),
|
||||
run(
|
||||
&[
|
||||
deno_exe.as_str(),
|
||||
"run",
|
||||
"--allow-all",
|
||||
"--unstable",
|
||||
"--enable-testing-features-do-not-use",
|
||||
path,
|
||||
&server_addr(port),
|
||||
],
|
||||
port,
|
||||
None,
|
||||
None,
|
||||
maybe_lua,
|
||||
)?,
|
||||
);
|
||||
}
|
||||
|
||||
res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?);
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn run(
|
||||
server_cmd: &[&str],
|
||||
port: u16,
|
||||
env: Option<Vec<(String, String)>>,
|
||||
origin_cmd: Option<&[&str]>,
|
||||
lua_script: Option<&str>,
|
||||
) -> Result<HttpBenchmarkResult> {
|
||||
// Wait for port 4544 to become available.
|
||||
// TODO Need to use SO_REUSEPORT with tokio::net::TcpListener.
|
||||
std::thread::sleep(Duration::from_secs(5));
|
||||
|
||||
let mut origin = None;
|
||||
if let Some(cmd) = origin_cmd {
|
||||
let mut com = Command::new(cmd[0]);
|
||||
com.args(&cmd[1..]);
|
||||
if let Some(env) = env.clone() {
|
||||
com.envs(env);
|
||||
}
|
||||
origin = Some(com.spawn()?);
|
||||
};
|
||||
|
||||
println!("{}", server_cmd.join(" "));
|
||||
let mut server = {
|
||||
let mut com = Command::new(server_cmd[0]);
|
||||
com.args(&server_cmd[1..]);
|
||||
if let Some(env) = env {
|
||||
com.envs(env);
|
||||
}
|
||||
com.spawn()?
|
||||
};
|
||||
|
||||
std::thread::sleep(Duration::from_secs(5)); // wait for server to wake up. TODO racy.
|
||||
|
||||
let wrk = test_util::prebuilt_tool_path("wrk");
|
||||
assert!(wrk.is_file());
|
||||
|
||||
let addr = format!("http://127.0.0.1:{port}/");
|
||||
let wrk = wrk.to_string();
|
||||
let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr];
|
||||
|
||||
if let Some(lua_script) = lua_script {
|
||||
wrk_cmd.push("-s");
|
||||
wrk_cmd.push(lua_script);
|
||||
}
|
||||
|
||||
println!("{}", wrk_cmd.join(" "));
|
||||
let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0;
|
||||
|
||||
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
|
||||
|
||||
println!("{output}");
|
||||
assert!(
|
||||
server.try_wait()?.map(|s| s.success()).unwrap_or(true),
|
||||
"server ended with error"
|
||||
);
|
||||
|
||||
server.kill()?;
|
||||
if let Some(mut origin) = origin {
|
||||
origin.kill()?;
|
||||
}
|
||||
|
||||
Ok(parse_wrk_output(&output))
|
||||
}
|
||||
|
||||
static NEXT_PORT: AtomicU16 = AtomicU16::new(4544);
|
||||
pub(crate) fn get_port() -> u16 {
|
||||
let p = NEXT_PORT.load(Ordering::SeqCst);
|
||||
NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst);
|
||||
p
|
||||
}
|
||||
|
||||
fn server_addr(port: u16) -> String {
|
||||
format!("0.0.0.0:{port}")
|
||||
}
|
||||
|
||||
fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> {
|
||||
let port = get_port();
|
||||
println!("http_benchmark testing RUST hyper");
|
||||
run(&[exe, &port.to_string()], port, None, None, None)
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts";
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
|
||||
const app = new Hono();
|
||||
app.get("/", (c) => c.text("Hello, World!"));
|
||||
|
||||
Deno.serve({ port: Number(port), hostname }, app.fetch);
|
|
@ -1,14 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const { serve } = Deno;
|
||||
|
||||
const path = new URL("../testdata/128k.bin", import.meta.url).pathname;
|
||||
|
||||
function handler() {
|
||||
const file = Deno.openSync(path);
|
||||
return new Response(file.readable);
|
||||
}
|
||||
|
||||
serve({ hostname, port: Number(port) }, handler);
|
|
@ -1,19 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
console.log("Server listening on", addr);
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const body = encoder.encode("Hello World");
|
||||
|
||||
for await (const conn of listener) {
|
||||
(async () => {
|
||||
const requests = Deno.serveHttp(conn);
|
||||
for await (const event of requests) {
|
||||
event.respondWith(new Response(body))
|
||||
.catch((e) => console.log(e));
|
||||
}
|
||||
})();
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
console.log("Server listening on", addr);
|
||||
|
||||
for await (const conn of listener) {
|
||||
(async () => {
|
||||
const requests = Deno.serveHttp(conn);
|
||||
for await (const { respondWith } of requests) {
|
||||
respondWith(
|
||||
new Response("Hello World", {
|
||||
status: 200,
|
||||
headers: {
|
||||
server: "deno",
|
||||
"content-type": "text/plain",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.catch((e) => console.log(e));
|
||||
}
|
||||
})();
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const tcp = Deno.listen({ hostname, port: Number(port) });
|
||||
console.log("Server listening on", addr);
|
||||
|
||||
class Http {
|
||||
id;
|
||||
constructor(id) {
|
||||
this.id = id;
|
||||
}
|
||||
[Symbol.asyncIterator]() {
|
||||
return {
|
||||
next: async () => {
|
||||
const reqEvt = await Deno[Deno.internal].core.opAsync(
|
||||
"op_http_accept",
|
||||
this.id,
|
||||
);
|
||||
return { value: reqEvt ?? undefined, done: reqEvt === null };
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
for await (const conn of tcp) {
|
||||
const id = Deno[Deno.internal].core.ops.op_http_start(conn.rid);
|
||||
const http = new Http(id);
|
||||
(async () => {
|
||||
for await (const req of http) {
|
||||
if (req == null) continue;
|
||||
const { 0: stream } = req;
|
||||
await Deno[Deno.internal].core.opAsync(
|
||||
"op_http_write_headers",
|
||||
stream,
|
||||
200,
|
||||
[],
|
||||
"Hello World",
|
||||
);
|
||||
Deno[Deno.internal].core.close(stream);
|
||||
}
|
||||
})();
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
console.log("Server listening on", addr);
|
||||
|
||||
for await (const conn of listener) {
|
||||
(async () => {
|
||||
const requests = Deno.serveHttp(conn);
|
||||
for await (const { respondWith, request } of requests) {
|
||||
const bar = request.headers.get("foo");
|
||||
respondWith(new Response(bar))
|
||||
.catch((e) => console.log(e));
|
||||
}
|
||||
})();
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
wrk.headers["foo"] = "bar"
|
||||
wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
|
||||
wrk.headers["Viewport-Width"] = "1920"
|
||||
wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
|
||||
wrk.headers["Accept-Language"] = "en,la;q=0.9"
|
|
@ -1,11 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] ?? "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const { serve } = Deno;
|
||||
|
||||
function handler() {
|
||||
return new Response("Hello World");
|
||||
}
|
||||
|
||||
serve({ hostname, port: Number(port), reusePort: true }, handler);
|
|
@ -1,19 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
console.log("Server listening on", addr);
|
||||
|
||||
for await (const conn of listener) {
|
||||
(async () => {
|
||||
const requests = Deno.serveHttp(conn);
|
||||
for await (const { respondWith, request } of requests) {
|
||||
if (request.method == "POST") {
|
||||
const buffer = await request.arrayBuffer();
|
||||
respondWith(new Response(buffer.byteLength))
|
||||
.catch((e) => console.log(e));
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
wrk.method = "POST"
|
||||
wrk.headers["Content-Type"] = "application/octet-stream"
|
||||
|
||||
file = io.open("./cli/bench/testdata/128k.bin", "rb")
|
||||
wrk.body = file:read("*a")
|
|
@ -1,19 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
console.log("Server listening on", addr);
|
||||
|
||||
for await (const conn of listener) {
|
||||
(async () => {
|
||||
const requests = Deno.serveHttp(conn);
|
||||
for await (const { respondWith, request } of requests) {
|
||||
if (request.method == "POST") {
|
||||
const json = await request.json();
|
||||
respondWith(new Response(json.hello))
|
||||
.catch((e) => console.log(e));
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
wrk.method = "POST"
|
||||
wrk.headers["Content-Type"] = "application/json"
|
||||
wrk.body = '{"hello":"deno"}'
|
|
@ -1,23 +0,0 @@
|
|||
import { renderToReadableStream } from "https://esm.run/react-dom/server";
|
||||
import * as React from "https://esm.run/react";
|
||||
const { serve } = Deno;
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
|
||||
const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
const headers = {
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
};
|
||||
|
||||
serve({ hostname, port: Number(port) }, async () => {
|
||||
return new Response(await renderToReadableStream(<App />), headers);
|
||||
});
|
|
@ -1,33 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Used for benchmarking Deno's networking.
|
||||
// TODO(bartlomieju): Replace this with a real HTTP server once
|
||||
// https://github.com/denoland/deno/issues/726 is completed.
|
||||
// Note: this is a keep-alive server.
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
const response = new TextEncoder().encode(
|
||||
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n",
|
||||
);
|
||||
async function handle(conn: Deno.Conn): Promise<void> {
|
||||
const buffer = new Uint8Array(1024);
|
||||
try {
|
||||
while (true) {
|
||||
await conn.read(buffer);
|
||||
await conn.write(response);
|
||||
}
|
||||
} catch (e) {
|
||||
if (
|
||||
!(e instanceof Deno.errors.BrokenPipe) &&
|
||||
!(e instanceof Deno.errors.ConnectionReset)
|
||||
) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
conn.close();
|
||||
}
|
||||
|
||||
console.log("Listening on", addr);
|
||||
for await (const conn of listener) {
|
||||
handle(conn);
|
||||
}
|
186
cli/bench/lsp.rs
186
cli/bench/lsp.rs
|
@ -1,19 +1,23 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::serde::Deserialize;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::url::Url;
|
||||
use lsp_types::Uri;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
use test_util::lsp::LspClientBuilder;
|
||||
use test_util::PathRef;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
||||
static FIXTURE_CODE_LENS_TS: &str = include_str!("testdata/code_lens.ts");
|
||||
static FIXTURE_DB_TS: &str = include_str!("testdata/db.ts");
|
||||
static FIXTURE_DB_MESSAGES: &[u8] = include_bytes!("testdata/db_messages.json");
|
||||
static FIXTURE_DECO_APPS: &[u8] =
|
||||
include_bytes!("testdata/deco_apps_requests.json");
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum FixtureType {
|
||||
|
@ -36,6 +40,107 @@ struct FixtureMessage {
|
|||
params: Value,
|
||||
}
|
||||
|
||||
/// replaces the root directory in the URIs of the requests
|
||||
/// with the given root path
|
||||
fn patch_uris<'a>(
|
||||
reqs: impl IntoIterator<Item = &'a mut tower_lsp::jsonrpc::Request>,
|
||||
root: &PathRef,
|
||||
) {
|
||||
for req in reqs {
|
||||
let mut params = req.params().unwrap().clone();
|
||||
let new_req = if let Some(doc) = params.get_mut("textDocument") {
|
||||
if let Some(uri_val) = doc.get_mut("uri") {
|
||||
let uri = uri_val.as_str().unwrap();
|
||||
*uri_val =
|
||||
Value::from(uri.replace(
|
||||
"file:///",
|
||||
&format!("file://{}/", root.to_string_lossy()),
|
||||
));
|
||||
}
|
||||
let builder = tower_lsp::jsonrpc::Request::build(req.method().to_owned());
|
||||
let builder = if let Some(id) = req.id() {
|
||||
builder.id(id.clone())
|
||||
} else {
|
||||
builder
|
||||
};
|
||||
|
||||
Some(builder.params(params).finish())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(new_req) = new_req {
|
||||
*req = new_req;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn bench_deco_apps_edits(deno_exe: &Path) -> Duration {
|
||||
let mut requests: Vec<tower_lsp::jsonrpc::Request> =
|
||||
serde_json::from_slice(FIXTURE_DECO_APPS).unwrap();
|
||||
let apps =
|
||||
test_util::root_path().join("cli/bench/testdata/lsp_benchdata/apps");
|
||||
|
||||
// it's a bit wasteful to do this for every run, but it's the easiest with the way things
|
||||
// are currently structured
|
||||
patch_uris(&mut requests, &apps);
|
||||
|
||||
let mut client = LspClientBuilder::new()
|
||||
.use_diagnostic_sync(false)
|
||||
.set_root_dir(apps.clone())
|
||||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize(|c| {
|
||||
c.set_workspace_folders(vec![lsp_types::WorkspaceFolder {
|
||||
uri: apps.uri_dir(),
|
||||
name: "apps".to_string(),
|
||||
}]);
|
||||
c.set_deno_enable(true);
|
||||
c.set_unstable(true);
|
||||
c.set_preload_limit(1000);
|
||||
c.set_config(apps.join("deno.json").as_path().to_string_lossy());
|
||||
});
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let mut reqs = 0;
|
||||
for req in requests {
|
||||
if req.id().is_none() {
|
||||
client.write_notification(req.method(), req.params());
|
||||
} else {
|
||||
reqs += 1;
|
||||
client.write_jsonrpc(req.method(), req.params());
|
||||
}
|
||||
}
|
||||
for _ in 0..reqs {
|
||||
let _ = client.read_latest_response();
|
||||
}
|
||||
|
||||
let end = start.elapsed();
|
||||
|
||||
// part of the motivation of including this benchmark is to see how we perform
|
||||
// with a fairly large number of documents in memory.
|
||||
// make sure that's the case
|
||||
let res = client.write_request(
|
||||
"deno/virtualTextDocument",
|
||||
json!({
|
||||
"textDocument": {
|
||||
"uri": "deno:/status.md"
|
||||
}
|
||||
}),
|
||||
);
|
||||
let re = lazy_regex::regex!(r"Documents in memory: (\d+)");
|
||||
let res = res.as_str().unwrap().to_string();
|
||||
assert!(res.starts_with("# Deno Language Server Status"));
|
||||
let captures = re.captures(&res).unwrap();
|
||||
let count = captures.get(1).unwrap().as_str().parse::<usize>().unwrap();
|
||||
assert!(count > 1000, "count: {}", count);
|
||||
|
||||
client.shutdown();
|
||||
|
||||
end
|
||||
}
|
||||
|
||||
/// A benchmark that opens a 8000+ line TypeScript document, adds a function to
|
||||
/// the end of the document and does a level of hovering and gets quick fix
|
||||
/// code actions.
|
||||
|
@ -45,6 +150,11 @@ fn bench_big_file_edits(deno_exe: &Path) -> Duration {
|
|||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
client.change_configuration(json!({ "deno": { "enable": true } }));
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
|
||||
client.write_notification(
|
||||
"textDocument/didOpen",
|
||||
|
@ -58,16 +168,6 @@ fn bench_big_file_edits(deno_exe: &Path) -> Duration {
|
|||
}),
|
||||
);
|
||||
|
||||
let (id, method, _): (u64, String, Option<Value>) = client.read_request();
|
||||
assert_eq!(method, "workspace/configuration");
|
||||
|
||||
client.write_response(
|
||||
id,
|
||||
json!({
|
||||
"enable": true
|
||||
}),
|
||||
);
|
||||
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "textDocument/publishDiagnostics");
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
|
@ -110,6 +210,18 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
|
|||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
client.change_configuration(json!({ "deno": {
|
||||
"enable": true,
|
||||
"codeLens": {
|
||||
"implementations": true,
|
||||
"references": true,
|
||||
"test": true,
|
||||
},
|
||||
} }));
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
|
||||
client.write_notification(
|
||||
"textDocument/didOpen",
|
||||
|
@ -123,16 +235,6 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
|
|||
}),
|
||||
);
|
||||
|
||||
let (id, method, _): (u64, String, Option<Value>) = client.read_request();
|
||||
assert_eq!(method, "workspace/configuration");
|
||||
|
||||
client.write_response(
|
||||
id,
|
||||
json!({
|
||||
"enable": true
|
||||
}),
|
||||
);
|
||||
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "textDocument/publishDiagnostics");
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
|
@ -163,6 +265,11 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
client.change_configuration(json!({ "deno": { "enable": true } }));
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
|
||||
for i in 0..10 {
|
||||
client.write_notification(
|
||||
|
@ -178,12 +285,6 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
);
|
||||
}
|
||||
|
||||
for _ in 0..10 {
|
||||
let (id, method, _) = client.read_request::<Value>();
|
||||
assert_eq!(method, "workspace/configuration");
|
||||
client.write_response(id, json!({ "enable": true }));
|
||||
}
|
||||
|
||||
for _ in 0..3 {
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "textDocument/publishDiagnostics");
|
||||
|
@ -195,7 +296,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
"textDocument/didChange",
|
||||
lsp::DidChangeTextDocumentParams {
|
||||
text_document: lsp::VersionedTextDocumentIdentifier {
|
||||
uri: Url::parse(&file_name).unwrap(),
|
||||
uri: Uri::from_str(&file_name).unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![lsp::TextDocumentContentChangeEvent {
|
||||
|
@ -222,7 +323,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
"textDocument/formatting",
|
||||
lsp::DocumentFormattingParams {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: Url::parse(&file_name).unwrap(),
|
||||
uri: Uri::from_str(&file_name).unwrap(),
|
||||
},
|
||||
options: lsp::FormattingOptions {
|
||||
tab_size: 2,
|
||||
|
@ -252,6 +353,11 @@ fn bench_startup_shutdown(deno_exe: &Path) -> Duration {
|
|||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
client.change_configuration(json!({ "deno": { "enable": true } }));
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
|
||||
client.write_notification(
|
||||
"textDocument/didOpen",
|
||||
|
@ -265,16 +371,6 @@ fn bench_startup_shutdown(deno_exe: &Path) -> Duration {
|
|||
}),
|
||||
);
|
||||
|
||||
let (id, method, _) = client.read_request::<Value>();
|
||||
assert_eq!(method, "workspace/configuration");
|
||||
|
||||
client.write_response(
|
||||
id,
|
||||
json!({
|
||||
"enable": true
|
||||
}),
|
||||
);
|
||||
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "textDocument/publishDiagnostics");
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
|
@ -334,6 +430,16 @@ pub fn benchmarks(deno_exe: &Path) -> HashMap<String, i64> {
|
|||
println!(" ({} runs, mean: {}ms)", times.len(), mean);
|
||||
exec_times.insert("code_lens".to_string(), mean);
|
||||
|
||||
println!(" - deco-cx/apps Multiple Edits + Navigation");
|
||||
let mut times = Vec::new();
|
||||
for _ in 0..5 {
|
||||
times.push(bench_deco_apps_edits(deno_exe));
|
||||
}
|
||||
let mean =
|
||||
(times.iter().sum::<Duration>() / times.len() as u32).as_millis() as i64;
|
||||
println!(" ({} runs, mean: {}ms)", times.len(), mean);
|
||||
exec_times.insert("deco_apps_edits_nav".to_string(), mean);
|
||||
|
||||
println!("<- End benchmarking lsp");
|
||||
|
||||
exec_times
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_bench_util::bencher::benchmark_group;
|
||||
use deno_bench_util::bencher::benchmark_main;
|
||||
|
@ -13,6 +13,11 @@ use test_util::lsp::LspClientBuilder;
|
|||
fn incremental_change_wait(bench: &mut Bencher) {
|
||||
let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build();
|
||||
client.initialize_default();
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
client.change_configuration(json!({ "deno": { "enable": true } }));
|
||||
let (method, _): (String, Option<Value>) = client.read_notification();
|
||||
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
|
||||
|
||||
client.write_notification(
|
||||
"textDocument/didOpen",
|
||||
|
@ -26,15 +31,6 @@ fn incremental_change_wait(bench: &mut Bencher) {
|
|||
}),
|
||||
);
|
||||
|
||||
let (id, method, _): (u64, String, Option<Value>) = client.read_request();
|
||||
assert_eq!(method, "workspace/configuration");
|
||||
client.write_response(
|
||||
id,
|
||||
json!({
|
||||
"enable": true
|
||||
}),
|
||||
);
|
||||
|
||||
let (method, _maybe_diag): (String, Option<Value>) =
|
||||
client.read_notification();
|
||||
assert_eq!(method, "textDocument/publishDiagnostics");
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
#![allow(clippy::print_stdout)]
|
||||
#![allow(clippy::print_stderr)]
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
|
@ -14,9 +17,6 @@ use std::process::Stdio;
|
|||
use std::time::SystemTime;
|
||||
use test_util::PathRef;
|
||||
|
||||
include!("../util/time.rs");
|
||||
|
||||
mod http;
|
||||
mod lsp;
|
||||
|
||||
fn read_json(filename: &Path) -> Result<Value> {
|
||||
|
@ -37,7 +37,7 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
// invalidating that cache.
|
||||
(
|
||||
"cold_hello",
|
||||
&["run", "--reload", "cli/tests/testdata/run/002_hello.ts"],
|
||||
&["run", "--reload", "tests/testdata/run/002_hello.ts"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
|
@ -45,23 +45,19 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
&[
|
||||
"run",
|
||||
"--reload",
|
||||
"cli/tests/testdata/run/003_relative_import.ts",
|
||||
"tests/testdata/run/003_relative_import.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"hello",
|
||||
&["run", "cli/tests/testdata/run/002_hello.ts"],
|
||||
None,
|
||||
),
|
||||
("hello", &["run", "tests/testdata/run/002_hello.ts"], None),
|
||||
(
|
||||
"relative_import",
|
||||
&["run", "cli/tests/testdata/run/003_relative_import.ts"],
|
||||
&["run", "tests/testdata/run/003_relative_import.ts"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"error_001",
|
||||
&["run", "cli/tests/testdata/run/error_001.ts"],
|
||||
&["run", "tests/testdata/run/error_001.ts"],
|
||||
Some(1),
|
||||
),
|
||||
(
|
||||
|
@ -70,7 +66,7 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
"run",
|
||||
"--reload",
|
||||
"--no-check",
|
||||
"cli/tests/testdata/run/002_hello.ts",
|
||||
"tests/testdata/run/002_hello.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
|
@ -79,7 +75,7 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
&[
|
||||
"run",
|
||||
"--allow-read",
|
||||
"cli/tests/testdata/workers/bench_startup.ts",
|
||||
"tests/testdata/workers/bench_startup.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
|
@ -88,7 +84,7 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
&[
|
||||
"run",
|
||||
"--allow-read",
|
||||
"cli/tests/testdata/workers/bench_round_robin.ts",
|
||||
"tests/testdata/workers/bench_round_robin.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
|
@ -97,31 +93,28 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
&[
|
||||
"run",
|
||||
"--allow-read",
|
||||
"cli/tests/testdata/workers/bench_large_message.ts",
|
||||
"tests/testdata/workers/bench_large_message.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"text_decoder",
|
||||
&["run", "cli/tests/testdata/benches/text_decoder_perf.js"],
|
||||
&["run", "tests/testdata/benches/text_decoder_perf.js"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"text_encoder",
|
||||
&["run", "cli/tests/testdata/benches/text_encoder_perf.js"],
|
||||
&["run", "tests/testdata/benches/text_encoder_perf.js"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"text_encoder_into",
|
||||
&[
|
||||
"run",
|
||||
"cli/tests/testdata/benches/text_encoder_into_perf.js",
|
||||
],
|
||||
&["run", "tests/testdata/benches/text_encoder_into_perf.js"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"response_string",
|
||||
&["run", "cli/tests/testdata/benches/response_string_perf.js"],
|
||||
&["run", "tests/testdata/benches/response_string_perf.js"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
|
@ -130,7 +123,9 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
"check",
|
||||
"--reload",
|
||||
"--unstable",
|
||||
"test_util/std/examples/chat/server_test.ts",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
|
@ -141,26 +136,9 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
"--reload",
|
||||
"--no-check",
|
||||
"--unstable",
|
||||
"test_util/std/examples/chat/server_test.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"bundle",
|
||||
&[
|
||||
"bundle",
|
||||
"--unstable",
|
||||
"test_util/std/examples/chat/server_test.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"bundle_no_check",
|
||||
&[
|
||||
"bundle",
|
||||
"--no-check",
|
||||
"--unstable",
|
||||
"test_util/std/examples/chat/server_test.ts",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
|
@ -312,38 +290,6 @@ fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, i64>> {
|
|||
Ok(sizes)
|
||||
}
|
||||
|
||||
const BUNDLES: &[(&str, &str)] = &[
|
||||
("file_server", "./test_util/std/http/file_server.ts"),
|
||||
("gist", "./test_util/std/examples/gist.ts"),
|
||||
];
|
||||
fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
|
||||
let mut sizes = HashMap::<String, i64>::new();
|
||||
|
||||
for (name, url) in BUNDLES {
|
||||
let path = format!("{name}.bundle.js");
|
||||
test_util::run(
|
||||
&[
|
||||
deno_exe.to_str().unwrap(),
|
||||
"bundle",
|
||||
"--unstable",
|
||||
url,
|
||||
&path,
|
||||
],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
true,
|
||||
);
|
||||
|
||||
let file = PathBuf::from(path);
|
||||
assert!(file.is_file());
|
||||
sizes.insert(name.to_string(), file.metadata()?.len() as i64);
|
||||
let _ = fs::remove_file(file);
|
||||
}
|
||||
|
||||
Ok(sizes)
|
||||
}
|
||||
|
||||
fn run_max_mem_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
|
||||
let mut results = HashMap::<String, i64>::new();
|
||||
|
||||
|
@ -398,9 +344,11 @@ struct BenchResult {
|
|||
binary_size: HashMap<String, i64>,
|
||||
bundle_size: HashMap<String, i64>,
|
||||
cargo_deps: usize,
|
||||
// TODO(bartlomieju): remove
|
||||
max_latency: HashMap<String, f64>,
|
||||
max_memory: HashMap<String, i64>,
|
||||
lsp_exec_time: HashMap<String, i64>,
|
||||
// TODO(bartlomieju): remove
|
||||
req_per_sec: HashMap<String, i64>,
|
||||
syscall_count: HashMap<String, i64>,
|
||||
thread_count: HashMap<String, i64>,
|
||||
|
@ -411,12 +359,10 @@ async fn main() -> Result<()> {
|
|||
let mut args = env::args();
|
||||
|
||||
let mut benchmarks = vec![
|
||||
"bundle",
|
||||
"exec_time",
|
||||
"binary_size",
|
||||
"cargo_deps",
|
||||
"lsp",
|
||||
"http",
|
||||
"strace",
|
||||
"mem_usage",
|
||||
];
|
||||
|
@ -438,11 +384,16 @@ async fn main() -> Result<()> {
|
|||
println!("Starting Deno benchmark");
|
||||
|
||||
let target_dir = test_util::target_dir();
|
||||
let deno_exe = test_util::deno_exe_path().to_path_buf();
|
||||
let deno_exe = if let Ok(p) = std::env::var("DENO_BENCH_EXE") {
|
||||
PathBuf::from(p)
|
||||
} else {
|
||||
test_util::deno_exe_path().to_path_buf()
|
||||
};
|
||||
env::set_current_dir(test_util::root_path())?;
|
||||
|
||||
let mut new_data = BenchResult {
|
||||
created_at: utc_now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
||||
created_at: chrono::Utc::now()
|
||||
.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
||||
sha1: test_util::run_collect(
|
||||
&["git", "rev-parse", "HEAD"],
|
||||
None,
|
||||
|
@ -456,11 +407,6 @@ async fn main() -> Result<()> {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
if benchmarks.contains(&"bundle") {
|
||||
let bundle_size = bundle_benchmark(&deno_exe)?;
|
||||
new_data.bundle_size = bundle_size;
|
||||
}
|
||||
|
||||
if benchmarks.contains(&"exec_time") {
|
||||
let exec_times = run_exec_time(&deno_exe, &target_dir)?;
|
||||
new_data.benchmark = exec_times;
|
||||
|
@ -481,21 +427,6 @@ async fn main() -> Result<()> {
|
|||
new_data.lsp_exec_time = lsp_exec_times;
|
||||
}
|
||||
|
||||
if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) {
|
||||
let stats = http::benchmark(target_dir.as_path())?;
|
||||
let req_per_sec = stats
|
||||
.iter()
|
||||
.map(|(name, result)| (name.clone(), result.requests as i64))
|
||||
.collect();
|
||||
new_data.req_per_sec = req_per_sec;
|
||||
let max_latency = stats
|
||||
.iter()
|
||||
.map(|(name, result)| (name.clone(), result.latency))
|
||||
.collect();
|
||||
|
||||
new_data.max_latency = max_latency;
|
||||
}
|
||||
|
||||
if cfg!(target_os = "linux") && benchmarks.contains(&"strace") {
|
||||
use std::io::Read;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
import { loadTestLibrary } from "../../../test_napi/common.js";
|
||||
import { loadTestLibrary } from "../../../tests/napi/common.js";
|
||||
|
||||
const lib = loadTestLibrary();
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const lib = require("../../../test_napi.node");
|
||||
const lib = require("../../../tests/napi.node");
|
||||
|
||||
bench("warmup", () => {});
|
||||
bench("napi_get_undefined", () => lib.test_get_undefined(0));
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
: [process.argv[2], process.argv[3]];
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// From https://github.com/just-js/benchmarks/tree/main/01-stdio
|
||||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
//
|
||||
// From https://github.com/just-js/benchmarks/tree/main/01-stdio
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
const blocksize = parseInt(Deno.args[0] || 65536);
|
||||
const buf = new Uint8Array(blocksize);
|
||||
let size = 0;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const listener = Deno.listen({ port: 4500 });
|
||||
const response = new TextEncoder().encode(
|
||||
|
|
2905
cli/bench/testdata/deco_apps_requests.json
vendored
Normal file
2905
cli/bench/testdata/deco_apps_requests.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
cli/bench/testdata/lsp_benchdata
vendored
Submodule
1
cli/bench/testdata/lsp_benchdata
vendored
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit af4c6a1eee825f19d3b3cce74cfdd03ebe1a3b92
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
// Note: when benchmarking across different Deno version, make sure to clear
|
||||
// the DENO_DIR cache.
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
299
cli/build.rs
299
cli/build.rs
|
@ -1,79 +1,92 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_core::snapshot_util::*;
|
||||
use deno_core::ExtensionFileSource;
|
||||
use deno_core::ExtensionFileSourceCode;
|
||||
use deno_core::snapshot::*;
|
||||
use deno_runtime::*;
|
||||
mod shared;
|
||||
|
||||
mod ts {
|
||||
use super::*;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct LoadArgs {
|
||||
/// The fully qualified specifier that should be loaded.
|
||||
specifier: String,
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct BuildInfoResponse {
|
||||
build_specifier: String,
|
||||
libs: Vec<String>,
|
||||
}
|
||||
|
||||
#[op]
|
||||
fn op_build_info(state: &mut OpState) -> Value {
|
||||
let build_specifier = "asset:///bootstrap.ts";
|
||||
|
||||
let node_built_in_module_names = SUPPORTED_BUILTIN_NODE_MODULES.to_vec();
|
||||
let build_libs = state.borrow::<Vec<&str>>();
|
||||
json!({
|
||||
"buildSpecifier": build_specifier,
|
||||
"libs": build_libs,
|
||||
"nodeBuiltInModuleNames": node_built_in_module_names,
|
||||
})
|
||||
#[op2]
|
||||
#[serde]
|
||||
fn op_build_info(state: &mut OpState) -> BuildInfoResponse {
|
||||
let build_specifier = "asset:///bootstrap.ts".to_string();
|
||||
let build_libs = state
|
||||
.borrow::<Vec<&str>>()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
BuildInfoResponse {
|
||||
build_specifier,
|
||||
libs: build_libs,
|
||||
}
|
||||
}
|
||||
|
||||
#[op]
|
||||
#[op2(fast)]
|
||||
fn op_is_node_file() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[op]
|
||||
#[op2]
|
||||
#[string]
|
||||
fn op_script_version(
|
||||
_state: &mut OpState,
|
||||
_args: Value,
|
||||
#[string] _arg: &str,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(Some("1".to_string()))
|
||||
}
|
||||
|
||||
#[op]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct LoadResponse {
|
||||
data: String,
|
||||
version: String,
|
||||
script_kind: i32,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
// using the same op that is used in `tsc.rs` for loading modules and reading
|
||||
// files, but a slightly different implementation at build time.
|
||||
fn op_load(state: &mut OpState, args: LoadArgs) -> Result<Value, AnyError> {
|
||||
fn op_load(
|
||||
state: &mut OpState,
|
||||
#[string] load_specifier: &str,
|
||||
) -> Result<LoadResponse, AnyError> {
|
||||
let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>();
|
||||
let path_dts = state.borrow::<PathBuf>();
|
||||
let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts");
|
||||
let build_specifier = "asset:///bootstrap.ts";
|
||||
|
||||
// we need a basic file to send to tsc to warm it up.
|
||||
if args.specifier == build_specifier {
|
||||
Ok(json!({
|
||||
"data": r#"Deno.writeTextFile("hello.txt", "hello deno!");"#,
|
||||
"version": "1",
|
||||
if load_specifier == build_specifier {
|
||||
Ok(LoadResponse {
|
||||
data: r#"Deno.writeTextFile("hello.txt", "hello deno!");"#.to_string(),
|
||||
version: "1".to_string(),
|
||||
// this corresponds to `ts.ScriptKind.TypeScript`
|
||||
"scriptKind": 3
|
||||
}))
|
||||
script_kind: 3,
|
||||
})
|
||||
// specifiers come across as `asset:///lib.{lib_name}.d.ts` and we need to
|
||||
// parse out just the name so we can lookup the asset.
|
||||
} else if let Some(caps) = re_asset.captures(&args.specifier) {
|
||||
} else if let Some(caps) = re_asset.captures(load_specifier) {
|
||||
if let Some(lib) = caps.get(1).map(|m| m.as_str()) {
|
||||
// if it comes from an op crate, we were supplied with the path to the
|
||||
// file.
|
||||
|
@ -84,22 +97,22 @@ mod ts {
|
|||
path_dts.join(format!("lib.{lib}.d.ts"))
|
||||
};
|
||||
let data = std::fs::read_to_string(path)?;
|
||||
Ok(json!({
|
||||
"data": data,
|
||||
"version": "1",
|
||||
Ok(LoadResponse {
|
||||
data,
|
||||
version: "1".to_string(),
|
||||
// this corresponds to `ts.ScriptKind.TypeScript`
|
||||
"scriptKind": 3
|
||||
}))
|
||||
script_kind: 3,
|
||||
})
|
||||
} else {
|
||||
Err(custom_error(
|
||||
"InvalidSpecifier",
|
||||
format!("An invalid specifier was requested: {}", args.specifier),
|
||||
format!("An invalid specifier was requested: {}", load_specifier),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(custom_error(
|
||||
"InvalidSpecifier",
|
||||
format!("An invalid specifier was requested: {}", args.specifier),
|
||||
format!("An invalid specifier was requested: {}", load_specifier),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -131,8 +144,10 @@ mod ts {
|
|||
op_crate_libs.insert("deno.url", deno_url::get_declaration());
|
||||
op_crate_libs.insert("deno.web", deno_web::get_declaration());
|
||||
op_crate_libs.insert("deno.fetch", deno_fetch::get_declaration());
|
||||
op_crate_libs.insert("deno.webgpu", deno_webgpu_get_declaration());
|
||||
op_crate_libs.insert("deno.websocket", deno_websocket::get_declaration());
|
||||
op_crate_libs.insert("deno.webstorage", deno_webstorage::get_declaration());
|
||||
op_crate_libs.insert("deno.canvas", deno_canvas::get_declaration());
|
||||
op_crate_libs.insert("deno.crypto", deno_crypto::get_declaration());
|
||||
op_crate_libs.insert(
|
||||
"deno.broadcast_channel",
|
||||
|
@ -168,8 +183,10 @@ mod ts {
|
|||
"es2015.symbol",
|
||||
"es2015.symbol.wellknown",
|
||||
"es2016.array.include",
|
||||
"es2016.intl",
|
||||
"es2016",
|
||||
"es2017",
|
||||
"es2017.date",
|
||||
"es2017.intl",
|
||||
"es2017.object",
|
||||
"es2017.sharedmemory",
|
||||
|
@ -211,9 +228,19 @@ mod ts {
|
|||
"es2022.string",
|
||||
"es2023",
|
||||
"es2023.array",
|
||||
"es2023.collection",
|
||||
"es2023.intl",
|
||||
"esnext",
|
||||
"esnext.array",
|
||||
"esnext.collection",
|
||||
"esnext.decorators",
|
||||
"esnext.disposable",
|
||||
"esnext.intl",
|
||||
"esnext.iterator",
|
||||
"esnext.object",
|
||||
"esnext.promise",
|
||||
"esnext.regexp",
|
||||
"esnext.string",
|
||||
];
|
||||
|
||||
let path_dts = cwd.join("tsc/dts");
|
||||
|
@ -241,33 +268,40 @@ mod ts {
|
|||
)
|
||||
.unwrap();
|
||||
|
||||
let output = create_snapshot(CreateSnapshotOptions {
|
||||
let output = create_snapshot(
|
||||
CreateSnapshotOptions {
|
||||
cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"),
|
||||
snapshot_path,
|
||||
startup_snapshot: None,
|
||||
extensions: vec![deno_tsc::init_ops_and_esm(
|
||||
op_crate_libs,
|
||||
build_libs,
|
||||
path_dts,
|
||||
)],
|
||||
extension_transpiler: None,
|
||||
with_runtime_cb: None,
|
||||
skip_op_registration: false,
|
||||
},
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// NOTE(bartlomieju): Compressing the TSC snapshot in debug build took
|
||||
// ~45s on M1 MacBook Pro; without compression it took ~1s.
|
||||
// Thus we're not not using compressed snapshot, trading off
|
||||
// Thus we're not using compressed snapshot, trading off
|
||||
// a lot of build time for some startup time in debug build.
|
||||
#[cfg(debug_assertions)]
|
||||
compression_cb: None,
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
compression_cb: Some(Box::new(|vec, snapshot_slice| {
|
||||
eprintln!("Compressing TSC snapshot...");
|
||||
let mut file = std::fs::File::create(snapshot_path).unwrap();
|
||||
if cfg!(debug_assertions) {
|
||||
file.write_all(&output.output).unwrap();
|
||||
} else {
|
||||
let mut vec = Vec::with_capacity(output.output.len());
|
||||
vec.extend((output.output.len() as u32).to_le_bytes());
|
||||
vec.extend_from_slice(
|
||||
&zstd::bulk::compress(snapshot_slice, 22)
|
||||
&zstd::bulk::compress(&output.output, 22)
|
||||
.expect("snapshot compression failed"),
|
||||
);
|
||||
})),
|
||||
with_runtime_cb: None,
|
||||
});
|
||||
file.write_all(&vec).unwrap();
|
||||
}
|
||||
|
||||
for path in output.files_loaded_during_snapshot {
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
|
@ -286,87 +320,21 @@ mod ts {
|
|||
}
|
||||
}
|
||||
|
||||
// Duplicated in `ops/mod.rs`. Keep in sync!
|
||||
deno_core::extension!(
|
||||
cli,
|
||||
deps = [runtime],
|
||||
esm_entry_point = "ext:cli/99_main.js",
|
||||
esm = [
|
||||
dir "js",
|
||||
"40_testing.js",
|
||||
"99_main.js"
|
||||
],
|
||||
customizer = |ext: &mut deno_core::Extension| {
|
||||
ext.esm_files.to_mut().push(ExtensionFileSource {
|
||||
specifier: "ext:cli/runtime/js/99_main.js",
|
||||
code: ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(
|
||||
deno_runtime::js::PATH_FOR_99_MAIN_JS,
|
||||
),
|
||||
});
|
||||
}
|
||||
);
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) {
|
||||
use deno_runtime::ops::bootstrap::SnapshotOptions;
|
||||
|
||||
#[cfg(not(feature = "__runtime_js_sources"))]
|
||||
#[must_use = "The files listed by create_cli_snapshot should be printed as 'cargo:rerun-if-changed' lines"]
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) -> CreateSnapshotOutput {
|
||||
use deno_core::Extension;
|
||||
use deno_runtime::deno_cache::SqliteBackedCache;
|
||||
use deno_runtime::deno_http::DefaultHttpPropertyExtractor;
|
||||
use deno_runtime::deno_kv::sqlite::SqliteDbHandler;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use std::sync::Arc;
|
||||
let snapshot_options = SnapshotOptions {
|
||||
ts_version: ts::version(),
|
||||
v8_version: deno_core::v8::VERSION_STRING,
|
||||
target: std::env::var("TARGET").unwrap(),
|
||||
};
|
||||
|
||||
// NOTE(bartlomieju): ordering is important here, keep it in sync with
|
||||
// `runtime/worker.rs`, `runtime/web_worker.rs` and `runtime/build.rs`!
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let extensions: Vec<Extension> = vec![
|
||||
deno_webidl::deno_webidl::init_ops(),
|
||||
deno_console::deno_console::init_ops(),
|
||||
deno_url::deno_url::init_ops(),
|
||||
deno_web::deno_web::init_ops::<PermissionsContainer>(
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
),
|
||||
deno_fetch::deno_fetch::init_ops::<PermissionsContainer>(Default::default()),
|
||||
deno_cache::deno_cache::init_ops::<SqliteBackedCache>(None),
|
||||
deno_websocket::deno_websocket::init_ops::<PermissionsContainer>(
|
||||
"".to_owned(),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
deno_webstorage::deno_webstorage::init_ops(None),
|
||||
deno_crypto::deno_crypto::init_ops(None),
|
||||
deno_broadcast_channel::deno_broadcast_channel::init_ops(
|
||||
deno_broadcast_channel::InMemoryBroadcastChannel::default(),
|
||||
false, // No --unstable.
|
||||
),
|
||||
deno_ffi::deno_ffi::init_ops::<PermissionsContainer>(false),
|
||||
deno_net::deno_net::init_ops::<PermissionsContainer>(
|
||||
None, false, // No --unstable.
|
||||
None,
|
||||
),
|
||||
deno_tls::deno_tls::init_ops(),
|
||||
deno_kv::deno_kv::init_ops(
|
||||
SqliteDbHandler::<PermissionsContainer>::new(None),
|
||||
false, // No --unstable.
|
||||
),
|
||||
deno_napi::deno_napi::init_ops::<PermissionsContainer>(),
|
||||
deno_http::deno_http::init_ops::<DefaultHttpPropertyExtractor>(),
|
||||
deno_io::deno_io::init_ops(Default::default()),
|
||||
deno_fs::deno_fs::init_ops::<PermissionsContainer>(false, fs.clone()),
|
||||
deno_node::deno_node::init_ops::<PermissionsContainer>(None, fs),
|
||||
deno_runtime::runtime::init_ops(),
|
||||
cli::init_ops_and_esm(), // NOTE: This needs to be init_ops_and_esm!
|
||||
];
|
||||
|
||||
create_snapshot(CreateSnapshotOptions {
|
||||
cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"),
|
||||
deno_runtime::snapshot::create_runtime_snapshot(
|
||||
snapshot_path,
|
||||
startup_snapshot: deno_runtime::js::deno_isolate_init(),
|
||||
extensions,
|
||||
compression_cb: None,
|
||||
with_runtime_cb: None,
|
||||
})
|
||||
snapshot_options,
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
fn git_commit_hash() -> String {
|
||||
|
@ -397,52 +365,18 @@ fn main() {
|
|||
return;
|
||||
}
|
||||
|
||||
deno_napi::print_linker_flags("deno");
|
||||
deno_napi::print_linker_flags("denort");
|
||||
|
||||
// Host snapshots won't work when cross compiling.
|
||||
let target = env::var("TARGET").unwrap();
|
||||
let host = env::var("HOST").unwrap();
|
||||
if target != host {
|
||||
let skip_cross_check =
|
||||
env::var("DENO_SKIP_CROSS_BUILD_CHECK").map_or(false, |v| v == "1");
|
||||
if !skip_cross_check && target != host {
|
||||
panic!("Cross compiling with snapshot is not supported.");
|
||||
}
|
||||
|
||||
let symbols_path = std::path::Path::new("napi").join(
|
||||
format!("generated_symbol_exports_list_{}.def", env::consts::OS).as_str(),
|
||||
)
|
||||
.canonicalize()
|
||||
.expect(
|
||||
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
|
||||
);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
|
||||
symbols_path.display()
|
||||
);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}",
|
||||
symbols_path.display()
|
||||
);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// If a custom compiler is set, the glibc version is not reliable.
|
||||
// Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list.
|
||||
if env::var("CC").is_err()
|
||||
&& glibc_version::get_version()
|
||||
.map(|ver| ver.major <= 2 && ver.minor < 35)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build.");
|
||||
println!("cargo:rustc-link-arg-bin=deno=-rdynamic");
|
||||
} else {
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
|
||||
symbols_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// To debug snapshot issues uncomment:
|
||||
// op_fetch_asset::trace_serializer();
|
||||
|
||||
|
@ -459,7 +393,7 @@ fn main() {
|
|||
);
|
||||
|
||||
let ts_version = ts::version();
|
||||
debug_assert_eq!(ts_version, "5.1.6"); // bump this assertion when it changes
|
||||
debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
|
||||
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
|
||||
println!("cargo:rerun-if-env-changed=TS_VERSION");
|
||||
|
||||
|
@ -472,13 +406,10 @@ fn main() {
|
|||
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
|
||||
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
|
||||
|
||||
#[cfg(not(feature = "__runtime_js_sources"))]
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
|
||||
let output = create_cli_snapshot(cli_snapshot_path);
|
||||
for path in output.files_loaded_during_snapshot {
|
||||
println!("cargo:rerun-if-changed={}", path.display())
|
||||
}
|
||||
create_cli_snapshot(cli_snapshot_path);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
|
@ -492,3 +423,11 @@ fn main() {
|
|||
res.compile().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
fn deno_webgpu_get_declaration() -> PathBuf {
|
||||
let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
manifest_dir
|
||||
.join("tsc")
|
||||
.join("dts")
|
||||
.join("lib.deno_webgpu.d.ts")
|
||||
}
|
||||
|
|
318
cli/cache/cache_db.rs
vendored
318
cli/cache/cache_db.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -10,9 +10,52 @@ use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
|
|||
use deno_runtime::deno_webstorage::rusqlite::Params;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::FastInsecureHasher;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CacheDBHash(u64);
|
||||
|
||||
impl CacheDBHash {
|
||||
pub fn new(hash: u64) -> Self {
|
||||
Self(hash)
|
||||
}
|
||||
|
||||
pub fn from_source(source: impl std::hash::Hash) -> Self {
|
||||
Self::new(
|
||||
// always write in the deno version just in case
|
||||
// the clearing on deno version change doesn't work
|
||||
FastInsecureHasher::new_deno_versioned()
|
||||
.write_hashable(source)
|
||||
.finish(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl rusqlite::types::ToSql for CacheDBHash {
|
||||
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||
Ok(rusqlite::types::ToSqlOutput::Owned(
|
||||
// sqlite doesn't support u64, but it does support i64 so store
|
||||
// this value "incorrectly" as i64 then convert back to u64 on read
|
||||
rusqlite::types::Value::Integer(self.0 as i64),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl rusqlite::types::FromSql for CacheDBHash {
|
||||
fn column_result(
|
||||
value: rusqlite::types::ValueRef,
|
||||
) -> rusqlite::types::FromSqlResult<Self> {
|
||||
match value {
|
||||
rusqlite::types::ValueRef::Integer(i) => Ok(Self::new(i as u64)),
|
||||
_ => Err(rusqlite::types::FromSqlError::InvalidType),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// What should the cache should do on failure?
|
||||
#[derive(Default)]
|
||||
pub enum CacheFailure {
|
||||
|
@ -40,21 +83,16 @@ pub struct CacheDBConfiguration {
|
|||
impl CacheDBConfiguration {
|
||||
fn create_combined_sql(&self) -> String {
|
||||
format!(
|
||||
"
|
||||
PRAGMA journal_mode=TRUNCATE;
|
||||
PRAGMA synchronous=NORMAL;
|
||||
PRAGMA temp_store=memory;
|
||||
PRAGMA page_size=4096;
|
||||
PRAGMA mmap_size=6000000;
|
||||
PRAGMA optimize;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS info (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
||||
{}
|
||||
",
|
||||
concat!(
|
||||
"PRAGMA journal_mode=WAL;",
|
||||
"PRAGMA synchronous=NORMAL;",
|
||||
"PRAGMA temp_store=memory;",
|
||||
"PRAGMA page_size=4096;",
|
||||
"PRAGMA mmap_size=6000000;",
|
||||
"PRAGMA optimize;",
|
||||
"CREATE TABLE IF NOT EXISTS info (key TEXT PRIMARY KEY, value TEXT NOT NULL);",
|
||||
"{}",
|
||||
),
|
||||
self.table_initializer
|
||||
)
|
||||
}
|
||||
|
@ -178,7 +216,7 @@ impl CacheDB {
|
|||
/// Open the connection in memory or on disk.
|
||||
fn actually_open_connection(
|
||||
&self,
|
||||
path: &Option<PathBuf>,
|
||||
path: Option<&Path>,
|
||||
) -> Result<Connection, rusqlite::Error> {
|
||||
match path {
|
||||
// This should never fail unless something is very wrong
|
||||
|
@ -224,7 +262,7 @@ impl CacheDB {
|
|||
/// Open and initialize a connection.
|
||||
fn open_connection_and_init(
|
||||
&self,
|
||||
path: &Option<PathBuf>,
|
||||
path: Option<&Path>,
|
||||
) -> Result<Connection, AnyError> {
|
||||
let conn = self.actually_open_connection(path)?;
|
||||
Self::initialize_connection(self.config, &conn, self.version)?;
|
||||
|
@ -234,83 +272,9 @@ impl CacheDB {
|
|||
/// This function represents the policy for dealing with corrupted cache files. We try fairly aggressively
|
||||
/// to repair the situation, and if we can't, we prefer to log noisily and continue with in-memory caches.
|
||||
fn open_connection(&self) -> Result<ConnectionState, AnyError> {
|
||||
// Success on first try? We hope that this is the case.
|
||||
let err = match self.open_connection_and_init(&self.path) {
|
||||
Ok(conn) => return Ok(ConnectionState::Connected(conn)),
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
if self.path.is_none() {
|
||||
// If an in-memory DB fails, that's game over
|
||||
log::error!("Failed to initialize in-memory cache database.");
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let path = self.path.as_ref().unwrap();
|
||||
|
||||
// There are rare times in the tests when we can't initialize a cache DB the first time, but it succeeds the second time, so
|
||||
// we don't log these at a debug level.
|
||||
log::trace!(
|
||||
"Could not initialize cache database '{}', retrying... ({err:?})",
|
||||
path.to_string_lossy(),
|
||||
);
|
||||
|
||||
// Try a second time
|
||||
let err = match self.open_connection_and_init(&self.path) {
|
||||
Ok(conn) => return Ok(ConnectionState::Connected(conn)),
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
// Failed, try deleting it
|
||||
let is_tty = std::io::stderr().is_terminal();
|
||||
log::log!(
|
||||
if is_tty { log::Level::Warn } else { log::Level::Trace },
|
||||
"Could not initialize cache database '{}', deleting and retrying... ({err:?})",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
if std::fs::remove_file(path).is_ok() {
|
||||
// Try a third time if we successfully deleted it
|
||||
let res = self.open_connection_and_init(&self.path);
|
||||
if let Ok(conn) = res {
|
||||
return Ok(ConnectionState::Connected(conn));
|
||||
};
|
||||
}
|
||||
|
||||
match self.config.on_failure {
|
||||
CacheFailure::InMemory => {
|
||||
log::log!(
|
||||
if is_tty {
|
||||
log::Level::Error
|
||||
} else {
|
||||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', opening in-memory cache.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
Ok(ConnectionState::Connected(
|
||||
self.open_connection_and_init(&None)?,
|
||||
))
|
||||
}
|
||||
CacheFailure::Blackhole => {
|
||||
log::log!(
|
||||
if is_tty {
|
||||
log::Level::Error
|
||||
} else {
|
||||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', performance may be degraded.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
Ok(ConnectionState::Blackhole)
|
||||
}
|
||||
CacheFailure::Error => {
|
||||
log::error!(
|
||||
"Failed to open cache file '{}', expect further errors.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
open_connection(self.config, self.path.as_deref(), |maybe_path| {
|
||||
self.open_connection_and_init(maybe_path)
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize<'a>(
|
||||
|
@ -397,8 +361,105 @@ impl CacheDB {
|
|||
}
|
||||
}
|
||||
|
||||
/// This function represents the policy for dealing with corrupted cache files. We try fairly aggressively
|
||||
/// to repair the situation, and if we can't, we prefer to log noisily and continue with in-memory caches.
|
||||
fn open_connection(
|
||||
config: &CacheDBConfiguration,
|
||||
path: Option<&Path>,
|
||||
open_connection_and_init: impl Fn(Option<&Path>) -> Result<Connection, AnyError>,
|
||||
) -> Result<ConnectionState, AnyError> {
|
||||
// Success on first try? We hope that this is the case.
|
||||
let err = match open_connection_and_init(path) {
|
||||
Ok(conn) => return Ok(ConnectionState::Connected(conn)),
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
let Some(path) = path.as_ref() else {
|
||||
// If an in-memory DB fails, that's game over
|
||||
log::error!("Failed to initialize in-memory cache database.");
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
// ensure the parent directory exists
|
||||
if let Some(parent) = path.parent() {
|
||||
match std::fs::create_dir_all(parent) {
|
||||
Ok(_) => {
|
||||
log::debug!("Created parent directory for cache db.");
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!("Failed creating the cache db parent dir: {:#}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// There are rare times in the tests when we can't initialize a cache DB the first time, but it succeeds the second time, so
|
||||
// we don't log these at a debug level.
|
||||
log::trace!(
|
||||
"Could not initialize cache database '{}', retrying... ({err:?})",
|
||||
path.to_string_lossy(),
|
||||
);
|
||||
|
||||
// Try a second time
|
||||
let err = match open_connection_and_init(Some(path)) {
|
||||
Ok(conn) => return Ok(ConnectionState::Connected(conn)),
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
// Failed, try deleting it
|
||||
let is_tty = std::io::stderr().is_terminal();
|
||||
log::log!(
|
||||
if is_tty { log::Level::Warn } else { log::Level::Trace },
|
||||
"Could not initialize cache database '{}', deleting and retrying... ({err:?})",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
if std::fs::remove_file(path).is_ok() {
|
||||
// Try a third time if we successfully deleted it
|
||||
let res = open_connection_and_init(Some(path));
|
||||
if let Ok(conn) = res {
|
||||
return Ok(ConnectionState::Connected(conn));
|
||||
};
|
||||
}
|
||||
|
||||
match config.on_failure {
|
||||
CacheFailure::InMemory => {
|
||||
log::log!(
|
||||
if is_tty {
|
||||
log::Level::Error
|
||||
} else {
|
||||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', opening in-memory cache.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
Ok(ConnectionState::Connected(open_connection_and_init(None)?))
|
||||
}
|
||||
CacheFailure::Blackhole => {
|
||||
log::log!(
|
||||
if is_tty {
|
||||
log::Level::Error
|
||||
} else {
|
||||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', performance may be degraded.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
Ok(ConnectionState::Blackhole)
|
||||
}
|
||||
CacheFailure::Error => {
|
||||
log::error!(
|
||||
"Failed to open cache file '{}', expect further errors.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deno_core::anyhow::anyhow;
|
||||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
||||
static TEST_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
|
@ -409,15 +470,15 @@ mod tests {
|
|||
};
|
||||
|
||||
static TEST_DB_BLACKHOLE: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: "create table if not exists test(value TEXT);",
|
||||
on_version_change: "delete from test;",
|
||||
table_initializer: "syntax error", // intentionally cause an error
|
||||
on_version_change: "",
|
||||
preheat_queries: &[],
|
||||
on_failure: CacheFailure::Blackhole,
|
||||
};
|
||||
|
||||
static TEST_DB_ERROR: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: "create table if not exists test(value TEXT);",
|
||||
on_version_change: "delete from test;",
|
||||
table_initializer: "syntax error", // intentionally cause an error
|
||||
on_version_change: "",
|
||||
preheat_queries: &[],
|
||||
on_failure: CacheFailure::Error,
|
||||
};
|
||||
|
@ -429,8 +490,6 @@ mod tests {
|
|||
on_failure: CacheFailure::InMemory,
|
||||
};
|
||||
|
||||
static FAILURE_PATH: &str = "/tmp/this/doesnt/exist/so/will/always/fail";
|
||||
|
||||
#[tokio::test]
|
||||
async fn simple_database() {
|
||||
let db = CacheDB::in_memory(&TEST_DB, "1.0");
|
||||
|
@ -443,7 +502,7 @@ mod tests {
|
|||
Ok(row.get::<_, String>(0).unwrap())
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(Some("1".into()), res);
|
||||
assert_eq!(res, Some("1".into()));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -455,22 +514,23 @@ mod tests {
|
|||
|
||||
#[tokio::test]
|
||||
async fn failure_mode_in_memory() {
|
||||
let db = CacheDB::from_path(&TEST_DB, FAILURE_PATH.into(), "1.0");
|
||||
db.ensure_connected()
|
||||
.expect("Should have created a database");
|
||||
|
||||
db.execute("insert into test values (?1)", [1]).unwrap();
|
||||
let res = db
|
||||
.query_row("select * from test", [], |row| {
|
||||
Ok(row.get::<_, String>(0).unwrap())
|
||||
let temp_dir = TempDir::new();
|
||||
let path = temp_dir.path().join("data").to_path_buf();
|
||||
let state = open_connection(&TEST_DB, Some(path.as_path()), |maybe_path| {
|
||||
match maybe_path {
|
||||
Some(_) => Err(anyhow!("fail")),
|
||||
None => Ok(Connection::open_in_memory().unwrap()),
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(Some("1".into()), res);
|
||||
assert!(matches!(state, ConnectionState::Connected(_)));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn failure_mode_blackhole() {
|
||||
let db = CacheDB::from_path(&TEST_DB_BLACKHOLE, FAILURE_PATH.into(), "1.0");
|
||||
let temp_dir = TempDir::new();
|
||||
let path = temp_dir.path().join("data");
|
||||
let db = CacheDB::from_path(&TEST_DB_BLACKHOLE, path.to_path_buf(), "1.0");
|
||||
db.ensure_connected()
|
||||
.expect("Should have created a database");
|
||||
|
||||
|
@ -480,12 +540,14 @@ mod tests {
|
|||
Ok(row.get::<_, String>(0).unwrap())
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(None, res);
|
||||
assert_eq!(res, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn failure_mode_error() {
|
||||
let db = CacheDB::from_path(&TEST_DB_ERROR, FAILURE_PATH.into(), "1.0");
|
||||
let temp_dir = TempDir::new();
|
||||
let path = temp_dir.path().join("data");
|
||||
let db = CacheDB::from_path(&TEST_DB_ERROR, path.to_path_buf(), "1.0");
|
||||
db.ensure_connected().expect_err("Should have failed");
|
||||
|
||||
db.execute("insert into test values (?1)", [1])
|
||||
|
@ -495,4 +557,32 @@ mod tests {
|
|||
})
|
||||
.expect_err("Should have failed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_db_hash_max_u64_value() {
|
||||
assert_same_serialize_deserialize(CacheDBHash::new(u64::MAX));
|
||||
assert_same_serialize_deserialize(CacheDBHash::new(u64::MAX - 1));
|
||||
assert_same_serialize_deserialize(CacheDBHash::new(u64::MIN));
|
||||
assert_same_serialize_deserialize(CacheDBHash::new(u64::MIN + 1));
|
||||
}
|
||||
|
||||
fn assert_same_serialize_deserialize(original_hash: CacheDBHash) {
|
||||
use rusqlite::types::FromSql;
|
||||
use rusqlite::types::ValueRef;
|
||||
use rusqlite::ToSql;
|
||||
|
||||
let value = original_hash.to_sql().unwrap();
|
||||
match value {
|
||||
rusqlite::types::ToSqlOutput::Owned(rusqlite::types::Value::Integer(
|
||||
value,
|
||||
)) => {
|
||||
let value_ref = ValueRef::Integer(value);
|
||||
assert_eq!(
|
||||
original_hash,
|
||||
CacheDBHash::column_result(value_ref).unwrap()
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
44
cli/cache/caches.rs
vendored
44
cli/cache/caches.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -8,18 +8,22 @@ use once_cell::sync::OnceCell;
|
|||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::check::TYPE_CHECK_CACHE_DB;
|
||||
use super::code_cache::CODE_CACHE_DB;
|
||||
use super::deno_dir::DenoDirProvider;
|
||||
use super::fast_check::FAST_CHECK_CACHE_DB;
|
||||
use super::incremental::INCREMENTAL_CACHE_DB;
|
||||
use super::module_info::MODULE_INFO_CACHE_DB;
|
||||
use super::node::NODE_ANALYSIS_CACHE_DB;
|
||||
use super::parsed_source::PARSED_SOURCE_CACHE_DB;
|
||||
|
||||
pub struct Caches {
|
||||
dir_provider: Arc<DenoDirProvider>,
|
||||
fmt_incremental_cache_db: OnceCell<CacheDB>,
|
||||
lint_incremental_cache_db: OnceCell<CacheDB>,
|
||||
dep_analysis_db: OnceCell<CacheDB>,
|
||||
fast_check_db: OnceCell<CacheDB>,
|
||||
node_analysis_db: OnceCell<CacheDB>,
|
||||
type_checking_cache_db: OnceCell<CacheDB>,
|
||||
code_cache_db: OnceCell<CacheDB>,
|
||||
}
|
||||
|
||||
impl Caches {
|
||||
|
@ -29,8 +33,10 @@ impl Caches {
|
|||
fmt_incremental_cache_db: Default::default(),
|
||||
lint_incremental_cache_db: Default::default(),
|
||||
dep_analysis_db: Default::default(),
|
||||
fast_check_db: Default::default(),
|
||||
node_analysis_db: Default::default(),
|
||||
type_checking_cache_db: Default::default(),
|
||||
code_cache_db: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,9 +48,13 @@ impl Caches {
|
|||
cell
|
||||
.get_or_init(|| {
|
||||
if let Some(path) = path {
|
||||
CacheDB::from_path(config, path, crate::version::deno())
|
||||
CacheDB::from_path(
|
||||
config,
|
||||
path,
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
)
|
||||
} else {
|
||||
CacheDB::in_memory(config, crate::version::deno())
|
||||
CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno)
|
||||
}
|
||||
})
|
||||
.clone()
|
||||
|
@ -77,7 +87,7 @@ impl Caches {
|
|||
pub fn dep_analysis_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.dep_analysis_db,
|
||||
&PARSED_SOURCE_CACHE_DB,
|
||||
&MODULE_INFO_CACHE_DB,
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
|
@ -86,6 +96,18 @@ impl Caches {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn fast_check_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.fast_check_db,
|
||||
&FAST_CHECK_CACHE_DB,
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.fast_check_cache_db_file_path()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn node_analysis_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.node_analysis_db,
|
||||
|
@ -109,4 +131,16 @@ impl Caches {
|
|||
.map(|dir| dir.type_checking_cache_db_file_path()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn code_cache_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.code_cache_db,
|
||||
&CODE_CACHE_DB,
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.code_cache_db_file_path()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
57
cli/cache/check.rs
vendored
57
cli/cache/check.rs
vendored
|
@ -1,7 +1,8 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -9,13 +10,13 @@ use deno_runtime::deno_webstorage::rusqlite::params;
|
|||
|
||||
pub static TYPE_CHECK_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS checkcache (
|
||||
check_hash TEXT PRIMARY KEY
|
||||
);",
|
||||
"CREATE TABLE IF NOT EXISTS tsbuildinfo (
|
||||
specifier TEXT PRIMARY KEY,
|
||||
text TEXT NOT NULL
|
||||
);",
|
||||
"CREATE TABLE IF NOT EXISTS checkcache (",
|
||||
"check_hash INT PRIMARY KEY",
|
||||
");",
|
||||
"CREATE TABLE IF NOT EXISTS tsbuildinfo (",
|
||||
"specifier TEXT PRIMARY KEY,",
|
||||
"text TEXT NOT NULL",
|
||||
");",
|
||||
),
|
||||
on_version_change: concat!(
|
||||
"DELETE FROM checkcache;",
|
||||
|
@ -37,7 +38,7 @@ impl TypeCheckCache {
|
|||
Self(db)
|
||||
}
|
||||
|
||||
pub fn has_check_hash(&self, hash: u64) -> bool {
|
||||
pub fn has_check_hash(&self, hash: CacheDBHash) -> bool {
|
||||
match self.hash_check_hash_result(hash) {
|
||||
Ok(val) => val,
|
||||
Err(err) => {
|
||||
|
@ -52,14 +53,17 @@ impl TypeCheckCache {
|
|||
}
|
||||
}
|
||||
|
||||
fn hash_check_hash_result(&self, hash: u64) -> Result<bool, AnyError> {
|
||||
fn hash_check_hash_result(
|
||||
&self,
|
||||
hash: CacheDBHash,
|
||||
) -> Result<bool, AnyError> {
|
||||
self.0.exists(
|
||||
"SELECT * FROM checkcache WHERE check_hash=?1 LIMIT 1",
|
||||
params![hash.to_string()],
|
||||
params![hash],
|
||||
)
|
||||
}
|
||||
|
||||
pub fn add_check_hash(&self, check_hash: u64) {
|
||||
pub fn add_check_hash(&self, check_hash: CacheDBHash) {
|
||||
if let Err(err) = self.add_check_hash_result(check_hash) {
|
||||
if cfg!(debug_assertions) {
|
||||
panic!("Error saving check hash: {err}");
|
||||
|
@ -69,13 +73,16 @@ impl TypeCheckCache {
|
|||
}
|
||||
}
|
||||
|
||||
fn add_check_hash_result(&self, check_hash: u64) -> Result<(), AnyError> {
|
||||
fn add_check_hash_result(
|
||||
&self,
|
||||
check_hash: CacheDBHash,
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
INSERT OR REPLACE INTO
|
||||
checkcache (check_hash)
|
||||
VALUES
|
||||
(?1)";
|
||||
self.0.execute(sql, params![&check_hash.to_string(),])?;
|
||||
self.0.execute(sql, params![check_hash])?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -123,10 +130,10 @@ mod test {
|
|||
let conn = CacheDB::in_memory(&TYPE_CHECK_CACHE_DB, "1.0.0");
|
||||
let cache = TypeCheckCache::new(conn);
|
||||
|
||||
assert!(!cache.has_check_hash(1));
|
||||
cache.add_check_hash(1);
|
||||
assert!(cache.has_check_hash(1));
|
||||
assert!(!cache.has_check_hash(2));
|
||||
assert!(!cache.has_check_hash(CacheDBHash::new(1)));
|
||||
cache.add_check_hash(CacheDBHash::new(1));
|
||||
assert!(cache.has_check_hash(CacheDBHash::new(1)));
|
||||
assert!(!cache.has_check_hash(CacheDBHash::new(2)));
|
||||
|
||||
let specifier1 = ModuleSpecifier::parse("file:///test.json").unwrap();
|
||||
assert_eq!(cache.get_tsbuildinfo(&specifier1), None);
|
||||
|
@ -137,9 +144,9 @@ mod test {
|
|||
let conn = cache.0.recreate_with_version("2.0.0");
|
||||
let cache = TypeCheckCache::new(conn);
|
||||
|
||||
assert!(!cache.has_check_hash(1));
|
||||
cache.add_check_hash(1);
|
||||
assert!(cache.has_check_hash(1));
|
||||
assert!(!cache.has_check_hash(CacheDBHash::new(1)));
|
||||
cache.add_check_hash(CacheDBHash::new(1));
|
||||
assert!(cache.has_check_hash(CacheDBHash::new(1)));
|
||||
assert_eq!(cache.get_tsbuildinfo(&specifier1), None);
|
||||
cache.set_tsbuildinfo(&specifier1, "test");
|
||||
assert_eq!(cache.get_tsbuildinfo(&specifier1), Some("test".to_string()));
|
||||
|
@ -148,13 +155,13 @@ mod test {
|
|||
let conn = cache.0.recreate_with_version("2.0.0");
|
||||
let cache = TypeCheckCache::new(conn);
|
||||
|
||||
assert!(cache.has_check_hash(1));
|
||||
assert!(!cache.has_check_hash(2));
|
||||
assert!(cache.has_check_hash(CacheDBHash::new(1)));
|
||||
assert!(!cache.has_check_hash(CacheDBHash::new(2)));
|
||||
assert_eq!(cache.get_tsbuildinfo(&specifier1), Some("test".to_string()));
|
||||
|
||||
// adding when already exists should not cause issue
|
||||
cache.add_check_hash(1);
|
||||
assert!(cache.has_check_hash(1));
|
||||
cache.add_check_hash(CacheDBHash::new(1));
|
||||
assert!(cache.has_check_hash(CacheDBHash::new(1)));
|
||||
cache.set_tsbuildinfo(&specifier1, "other");
|
||||
assert_eq!(
|
||||
cache.get_tsbuildinfo(&specifier1),
|
||||
|
|
250
cli/cache/code_cache.rs
vendored
Normal file
250
cli/cache/code_cache.rs
vendored
Normal file
|
@ -0,0 +1,250 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::code_cache;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
|
||||
pub static CODE_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS codecache (",
|
||||
"specifier TEXT NOT NULL,",
|
||||
"type INTEGER NOT NULL,",
|
||||
"source_hash INTEGER NOT NULL,",
|
||||
"data BLOB NOT NULL,",
|
||||
"PRIMARY KEY (specifier, type)",
|
||||
");"
|
||||
),
|
||||
on_version_change: "DELETE FROM codecache;",
|
||||
preheat_queries: &[],
|
||||
on_failure: CacheFailure::Blackhole,
|
||||
};
|
||||
|
||||
pub struct CodeCache {
|
||||
inner: CodeCacheInner,
|
||||
}
|
||||
|
||||
impl CodeCache {
|
||||
pub fn new(db: CacheDB) -> Self {
|
||||
Self {
|
||||
inner: CodeCacheInner::new(db),
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_ok<T: Default>(res: Result<T, AnyError>) -> T {
|
||||
match res {
|
||||
Ok(x) => x,
|
||||
Err(err) => {
|
||||
// TODO(mmastrac): This behavior was inherited from before the refactoring but it probably makes sense to move it into the cache
|
||||
// at some point.
|
||||
// should never error here, but if it ever does don't fail
|
||||
if cfg!(debug_assertions) {
|
||||
panic!("Error using code cache: {err:#}");
|
||||
} else {
|
||||
log::debug!("Error using code cache: {:#}", err);
|
||||
}
|
||||
T::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
Self::ensure_ok(self.inner.get_sync(
|
||||
specifier.as_str(),
|
||||
code_cache_type,
|
||||
CacheDBHash::new(source_hash),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn set_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
source_hash: u64,
|
||||
data: &[u8],
|
||||
) {
|
||||
Self::ensure_ok(self.inner.set_sync(
|
||||
specifier.as_str(),
|
||||
code_cache_type,
|
||||
CacheDBHash::new(source_hash),
|
||||
data,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
impl code_cache::CodeCache for CodeCache {
|
||||
fn get_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
self.get_sync(specifier, code_cache_type, source_hash)
|
||||
}
|
||||
|
||||
fn set_sync(
|
||||
&self,
|
||||
specifier: ModuleSpecifier,
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
source_hash: u64,
|
||||
data: &[u8],
|
||||
) {
|
||||
self.set_sync(&specifier, code_cache_type, source_hash, data);
|
||||
}
|
||||
}
|
||||
|
||||
struct CodeCacheInner {
|
||||
conn: CacheDB,
|
||||
}
|
||||
|
||||
impl CodeCacheInner {
|
||||
pub fn new(conn: CacheDB) -> Self {
|
||||
Self { conn }
|
||||
}
|
||||
|
||||
pub fn get_sync(
|
||||
&self,
|
||||
specifier: &str,
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
source_hash: CacheDBHash,
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
let query = "
|
||||
SELECT
|
||||
data
|
||||
FROM
|
||||
codecache
|
||||
WHERE
|
||||
specifier=?1 AND type=?2 AND source_hash=?3
|
||||
LIMIT 1";
|
||||
let params = params![
|
||||
specifier,
|
||||
serialize_code_cache_type(code_cache_type),
|
||||
source_hash,
|
||||
];
|
||||
self.conn.query_row(query, params, |row| {
|
||||
let value: Vec<u8> = row.get(0)?;
|
||||
Ok(value)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_sync(
|
||||
&self,
|
||||
specifier: &str,
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
source_hash: CacheDBHash,
|
||||
data: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
INSERT OR REPLACE INTO
|
||||
codecache (specifier, type, source_hash, data)
|
||||
VALUES
|
||||
(?1, ?2, ?3, ?4)";
|
||||
let params = params![
|
||||
specifier,
|
||||
serialize_code_cache_type(code_cache_type),
|
||||
source_hash,
|
||||
data
|
||||
];
|
||||
self.conn.execute(sql, params)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_code_cache_type(
|
||||
code_cache_type: code_cache::CodeCacheType,
|
||||
) -> i64 {
|
||||
match code_cache_type {
|
||||
code_cache::CodeCacheType::Script => 0,
|
||||
code_cache::CodeCacheType::EsModule => 1,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
pub fn end_to_end() {
|
||||
let conn = CacheDB::in_memory(&CODE_CACHE_DB, "1.0.0");
|
||||
let cache = CodeCacheInner::new(conn);
|
||||
|
||||
assert!(cache
|
||||
.get_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::EsModule,
|
||||
CacheDBHash::new(1),
|
||||
)
|
||||
.unwrap()
|
||||
.is_none());
|
||||
let data_esm = vec![1, 2, 3];
|
||||
cache
|
||||
.set_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::EsModule,
|
||||
CacheDBHash::new(1),
|
||||
&data_esm,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::EsModule,
|
||||
CacheDBHash::new(1),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
data_esm
|
||||
);
|
||||
|
||||
assert!(cache
|
||||
.get_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::Script,
|
||||
CacheDBHash::new(1),
|
||||
)
|
||||
.unwrap()
|
||||
.is_none());
|
||||
let data_script = vec![4, 5, 6];
|
||||
cache
|
||||
.set_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::Script,
|
||||
CacheDBHash::new(1),
|
||||
&data_script,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::Script,
|
||||
CacheDBHash::new(1),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
data_script
|
||||
);
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_sync(
|
||||
"file:///foo/bar.js",
|
||||
code_cache::CodeCacheType::EsModule,
|
||||
CacheDBHash::new(1),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
data_esm
|
||||
);
|
||||
}
|
||||
}
|
13
cli/cache/common.rs
vendored
13
cli/cache/common.rs
vendored
|
@ -1,18 +1,19 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::hash::Hasher;
|
||||
|
||||
/// A very fast insecure hasher that uses the xxHash algorithm.
|
||||
#[derive(Default)]
|
||||
pub struct FastInsecureHasher(twox_hash::XxHash64);
|
||||
|
||||
impl FastInsecureHasher {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
pub fn new_without_deno_version() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
|
||||
pub fn hash(hashable: impl std::hash::Hash) -> u64 {
|
||||
Self::new().write_hashable(hashable).finish()
|
||||
pub fn new_deno_versioned() -> Self {
|
||||
let mut hasher = Self::new_without_deno_version();
|
||||
hasher.write_str(crate::version::DENO_VERSION_INFO.deno);
|
||||
hasher
|
||||
}
|
||||
|
||||
pub fn write_str(&mut self, text: &str) -> &mut Self {
|
||||
|
|
45
cli/cache/deno_dir.rs
vendored
45
cli/cache/deno_dir.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
|
@ -33,11 +33,10 @@ impl DenoDirProvider {
|
|||
|
||||
/// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them
|
||||
/// in single directory that can be controlled with `$DENO_DIR` env variable.
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DenoDir {
|
||||
/// Example: /Users/rld/.deno/
|
||||
/// Note: This is not exposed in order to encourage using re-usable methods.
|
||||
root: PathBuf,
|
||||
pub root: PathBuf,
|
||||
/// Used by TsCompiler to cache compiler output.
|
||||
pub gen_cache: DiskCache,
|
||||
}
|
||||
|
@ -80,34 +79,46 @@ impl DenoDir {
|
|||
self.root.display()
|
||||
}
|
||||
|
||||
/// Path for the V8 code cache.
|
||||
pub fn code_cache_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("v8_code_cache_v2")
|
||||
}
|
||||
|
||||
/// Path for the incremental cache used for formatting.
|
||||
pub fn fmt_incremental_cache_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("fmt_incremental_cache_v1")
|
||||
self.root.join("fmt_incremental_cache_v2")
|
||||
}
|
||||
|
||||
/// Path for the incremental cache used for linting.
|
||||
pub fn lint_incremental_cache_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("lint_incremental_cache_v1")
|
||||
self.root.join("lint_incremental_cache_v2")
|
||||
}
|
||||
|
||||
/// Path for caching swc dependency analysis.
|
||||
pub fn dep_analysis_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("dep_analysis_cache_v1")
|
||||
self.root.join("dep_analysis_cache_v2")
|
||||
}
|
||||
|
||||
/// Path for the cache used for fast check.
|
||||
pub fn fast_check_cache_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("fast_check_cache_v2")
|
||||
}
|
||||
|
||||
/// Path for caching node analysis.
|
||||
pub fn node_analysis_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("node_analysis_cache_v1")
|
||||
self.root.join("node_analysis_cache_v2")
|
||||
}
|
||||
|
||||
/// Path for the cache used for type checking.
|
||||
pub fn type_checking_cache_db_file_path(&self) -> PathBuf {
|
||||
// bump this version name to invalidate the entire cache
|
||||
self.root.join("check_cache_v1")
|
||||
self.root.join("check_cache_v2")
|
||||
}
|
||||
|
||||
/// Path to the registries cache, used for the lps.
|
||||
|
@ -115,9 +126,9 @@ impl DenoDir {
|
|||
self.root.join("registries")
|
||||
}
|
||||
|
||||
/// Path to the dependencies cache folder.
|
||||
pub fn deps_folder_path(&self) -> PathBuf {
|
||||
self.root.join("deps")
|
||||
/// Path to the remote cache folder.
|
||||
pub fn remote_folder_path(&self) -> PathBuf {
|
||||
self.root.join("remote")
|
||||
}
|
||||
|
||||
/// Path to the origin data cache folder.
|
||||
|
@ -158,7 +169,7 @@ impl DenoDir {
|
|||
|
||||
/// To avoid the poorly managed dirs crate
|
||||
#[cfg(not(windows))]
|
||||
mod dirs {
|
||||
pub mod dirs {
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn cache_dir() -> Option<PathBuf> {
|
||||
|
@ -216,7 +227,7 @@ mod dirs {
|
|||
// https://github.com/dirs-dev/dirs-sys-rs/blob/ec7cee0b3e8685573d847f0a0f60aae3d9e07fa2/src/lib.rs#L140-L164
|
||||
// MIT license. Copyright (c) 2018-2019 dirs-rs contributors
|
||||
#[cfg(windows)]
|
||||
mod dirs {
|
||||
pub mod dirs {
|
||||
use std::ffi::OsString;
|
||||
use std::os::windows::ffi::OsStringExt;
|
||||
use std::path::PathBuf;
|
||||
|
@ -255,6 +266,12 @@ mod dirs {
|
|||
}
|
||||
|
||||
pub fn home_dir() -> Option<PathBuf> {
|
||||
if let Some(userprofile) = std::env::var_os("USERPROFILE") {
|
||||
if !userprofile.is_empty() {
|
||||
return Some(PathBuf::from(userprofile));
|
||||
}
|
||||
}
|
||||
|
||||
known_folder(&knownfolders::FOLDERID_Profile)
|
||||
}
|
||||
}
|
||||
|
|
8
cli/cache/disk_cache.rs
vendored
8
cli/cache/disk_cache.rs
vendored
|
@ -1,7 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::CACHE_PERM;
|
||||
use crate::util::fs::atomic_write_file;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
|
||||
use deno_cache_dir::url_to_filename;
|
||||
use deno_core::url::Host;
|
||||
|
@ -14,7 +14,7 @@ use std::path::PathBuf;
|
|||
use std::path::Prefix;
|
||||
use std::str;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DiskCache {
|
||||
pub location: PathBuf,
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ impl DiskCache {
|
|||
|
||||
pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
|
||||
let path = self.location.join(filename);
|
||||
atomic_write_file(&path, data, CACHE_PERM)
|
||||
atomic_write_file_with_retries(&path, data, CACHE_PERM)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
172
cli/cache/emit.rs
vendored
172
cli/cache/emit.rs
vendored
|
@ -1,35 +1,29 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use deno_core::unsync::sync::AtomicFlag;
|
||||
|
||||
use super::DiskCache;
|
||||
use super::FastInsecureHasher;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
struct EmitMetadata {
|
||||
pub source_hash: String,
|
||||
pub emit_hash: String,
|
||||
}
|
||||
|
||||
/// The cache that stores previously emitted files.
|
||||
#[derive(Clone)]
|
||||
pub struct EmitCache {
|
||||
disk_cache: DiskCache,
|
||||
cli_version: &'static str,
|
||||
emit_failed_flag: AtomicFlag,
|
||||
file_serializer: EmitFileSerializer,
|
||||
}
|
||||
|
||||
impl EmitCache {
|
||||
pub fn new(disk_cache: DiskCache) -> Self {
|
||||
Self {
|
||||
disk_cache,
|
||||
cli_version: crate::version::deno(),
|
||||
emit_failed_flag: Default::default(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: crate::version::DENO_VERSION_INFO.deno,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -46,38 +40,11 @@ impl EmitCache {
|
|||
specifier: &ModuleSpecifier,
|
||||
expected_source_hash: u64,
|
||||
) -> Option<String> {
|
||||
let meta_filename = self.get_meta_filename(specifier)?;
|
||||
let emit_filename = self.get_emit_filename(specifier)?;
|
||||
|
||||
// load and verify the meta data file is for this source and CLI version
|
||||
let bytes = self.disk_cache.get(&meta_filename).ok()?;
|
||||
let meta: EmitMetadata = serde_json::from_slice(&bytes).ok()?;
|
||||
if meta.source_hash != expected_source_hash.to_string() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// load and verify the emit is for the meta data
|
||||
let emit_bytes = self.disk_cache.get(&emit_filename).ok()?;
|
||||
if meta.emit_hash != compute_emit_hash(&emit_bytes, self.cli_version) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// everything looks good, return it
|
||||
let emit_text = String::from_utf8(emit_bytes).ok()?;
|
||||
Some(emit_text)
|
||||
}
|
||||
|
||||
/// Gets the filepath which stores the emit.
|
||||
pub fn get_emit_filepath(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<PathBuf> {
|
||||
Some(
|
||||
let bytes = self.disk_cache.get(&emit_filename).ok()?;
|
||||
self
|
||||
.disk_cache
|
||||
.location
|
||||
.join(self.get_emit_filename(specifier)?),
|
||||
)
|
||||
.file_serializer
|
||||
.deserialize(bytes, expected_source_hash)
|
||||
}
|
||||
|
||||
/// Sets the emit code in the cache.
|
||||
|
@ -85,15 +52,13 @@ impl EmitCache {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source_hash: u64,
|
||||
code: &str,
|
||||
code: &[u8],
|
||||
) {
|
||||
if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
|
||||
// should never error here, but if it ever does don't fail
|
||||
if cfg!(debug_assertions) {
|
||||
panic!("Error saving emit data ({specifier}): {err}");
|
||||
} else {
|
||||
// might error in cases such as a readonly file system
|
||||
log::debug!("Error saving emit data ({}): {}", specifier, err);
|
||||
}
|
||||
// assume the cache can't be written to and disable caching to it
|
||||
self.emit_failed_flag.raise();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,36 +66,22 @@ impl EmitCache {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source_hash: u64,
|
||||
code: &str,
|
||||
code: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let meta_filename = self
|
||||
.get_meta_filename(specifier)
|
||||
.ok_or_else(|| anyhow!("Could not get meta filename."))?;
|
||||
if self.emit_failed_flag.is_raised() {
|
||||
log::debug!("Skipped emit cache save of {}", specifier);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let emit_filename = self
|
||||
.get_emit_filename(specifier)
|
||||
.ok_or_else(|| anyhow!("Could not get emit filename."))?;
|
||||
|
||||
// save the metadata
|
||||
let metadata = EmitMetadata {
|
||||
source_hash: source_hash.to_string(),
|
||||
emit_hash: compute_emit_hash(code.as_bytes(), self.cli_version),
|
||||
};
|
||||
self
|
||||
.disk_cache
|
||||
.set(&meta_filename, &serde_json::to_vec(&metadata)?)?;
|
||||
|
||||
// save the emit source
|
||||
self.disk_cache.set(&emit_filename, code.as_bytes())?;
|
||||
let cache_data = self.file_serializer.serialize(code, source_hash);
|
||||
self.disk_cache.set(&emit_filename, &cache_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_meta_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier, "meta")
|
||||
}
|
||||
|
||||
fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
self
|
||||
.disk_cache
|
||||
|
@ -138,16 +89,68 @@ impl EmitCache {
|
|||
}
|
||||
}
|
||||
|
||||
fn compute_emit_hash(bytes: &[u8], cli_version: &str) -> String {
|
||||
const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
|
||||
|
||||
struct EmitFileSerializer {
|
||||
cli_version: &'static str,
|
||||
}
|
||||
|
||||
impl EmitFileSerializer {
|
||||
pub fn deserialize(
|
||||
&self,
|
||||
mut bytes: Vec<u8>,
|
||||
expected_source_hash: u64,
|
||||
) -> Option<String> {
|
||||
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
|
||||
let (content, last_line) = bytes.split_at(last_newline_index);
|
||||
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
|
||||
let hashes = String::from_utf8_lossy(hashes);
|
||||
let (source_hash, emit_hash) = hashes.split_once(',')?;
|
||||
|
||||
// verify the meta data file is for this source and CLI version
|
||||
let source_hash = source_hash.parse::<u64>().ok()?;
|
||||
if source_hash != expected_source_hash {
|
||||
return None;
|
||||
}
|
||||
let emit_hash = emit_hash.parse::<u64>().ok()?;
|
||||
// prevent using an emit from a different cli version or emits that were tampered with
|
||||
if emit_hash != self.compute_emit_hash(content) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// everything looks good, truncate and return it
|
||||
bytes.truncate(content.len());
|
||||
String::from_utf8(bytes).ok()
|
||||
}
|
||||
|
||||
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
|
||||
let source_hash = source_hash.to_string();
|
||||
let emit_hash = self.compute_emit_hash(code).to_string();
|
||||
let capacity = code.len()
|
||||
+ LAST_LINE_PREFIX.len()
|
||||
+ source_hash.len()
|
||||
+ 1
|
||||
+ emit_hash.len();
|
||||
let mut cache_data = Vec::with_capacity(capacity);
|
||||
cache_data.extend(code);
|
||||
cache_data.extend(LAST_LINE_PREFIX.as_bytes());
|
||||
cache_data.extend(source_hash.as_bytes());
|
||||
cache_data.push(b',');
|
||||
cache_data.extend(emit_hash.as_bytes());
|
||||
debug_assert_eq!(cache_data.len(), capacity);
|
||||
cache_data
|
||||
}
|
||||
|
||||
fn compute_emit_hash(&self, bytes: &[u8]) -> u64 {
|
||||
// it's ok to use an insecure hash here because
|
||||
// if someone can change the emit source then they
|
||||
// can also change the version hash
|
||||
FastInsecureHasher::new()
|
||||
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
|
||||
.write(bytes)
|
||||
// emit should not be re-used between cli versions
|
||||
.write(cli_version.as_bytes())
|
||||
.write_str(self.cli_version)
|
||||
.finish()
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -162,7 +165,10 @@ mod test {
|
|||
let disk_cache = DiskCache::new(temp_dir.path().as_path());
|
||||
let cache = EmitCache {
|
||||
disk_cache: disk_cache.clone(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: "1.0.0",
|
||||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
|
||||
let specifier1 =
|
||||
|
@ -174,8 +180,8 @@ mod test {
|
|||
assert_eq!(cache.get_emit_code(&specifier1, 1), None);
|
||||
let emit_code1 = "text1".to_string();
|
||||
let emit_code2 = "text2".to_string();
|
||||
cache.set_emit_code(&specifier1, 10, &emit_code1);
|
||||
cache.set_emit_code(&specifier2, 2, &emit_code2);
|
||||
cache.set_emit_code(&specifier1, 10, emit_code1.as_bytes());
|
||||
cache.set_emit_code(&specifier2, 2, emit_code2.as_bytes());
|
||||
// providing the incorrect source hash
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
|
||||
// providing the correct source hash
|
||||
|
@ -188,21 +194,27 @@ mod test {
|
|||
// try changing the cli version (should not load previous ones)
|
||||
let cache = EmitCache {
|
||||
disk_cache: disk_cache.clone(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: "2.0.0",
|
||||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 10), None);
|
||||
cache.set_emit_code(&specifier1, 5, &emit_code1);
|
||||
cache.set_emit_code(&specifier1, 5, emit_code1.as_bytes());
|
||||
|
||||
// recreating the cache should still load the data because the CLI version is the same
|
||||
let cache = EmitCache {
|
||||
disk_cache,
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: "2.0.0",
|
||||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
|
||||
|
||||
// adding when already exists should not cause issue
|
||||
let emit_code3 = "asdf".to_string();
|
||||
cache.set_emit_code(&specifier1, 20, &emit_code3);
|
||||
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
|
||||
}
|
||||
|
|
169
cli/cache/fast_check.rs
vendored
Normal file
169
cli/cache/fast_check.rs
vendored
Normal file
|
@ -0,0 +1,169 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::FastCheckCacheItem;
|
||||
use deno_graph::FastCheckCacheKey;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
|
||||
pub static FAST_CHECK_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS fastcheckcache (",
|
||||
"hash INTEGER PRIMARY KEY,",
|
||||
"data TEXT NOT NULL",
|
||||
");"
|
||||
),
|
||||
on_version_change: "DELETE FROM fastcheckcache;",
|
||||
preheat_queries: &[],
|
||||
on_failure: CacheFailure::Blackhole,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FastCheckCache {
|
||||
inner: FastCheckCacheInner,
|
||||
}
|
||||
|
||||
impl FastCheckCache {
|
||||
pub fn new(db: CacheDB) -> Self {
|
||||
Self {
|
||||
inner: FastCheckCacheInner::new(db),
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_ok<T: Default>(res: Result<T, AnyError>) -> T {
|
||||
match res {
|
||||
Ok(x) => x,
|
||||
Err(err) => {
|
||||
// TODO(mmastrac): This behavior was inherited from before the refactoring but it probably makes sense to move it into the cache
|
||||
// at some point.
|
||||
// should never error here, but if it ever does don't fail
|
||||
if cfg!(debug_assertions) {
|
||||
panic!("Error using fast check cache: {err:#}");
|
||||
} else {
|
||||
log::debug!("Error using fast check cache: {:#}", err);
|
||||
}
|
||||
T::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl deno_graph::FastCheckCache for FastCheckCache {
|
||||
fn get(&self, key: FastCheckCacheKey) -> Option<FastCheckCacheItem> {
|
||||
Self::ensure_ok(self.inner.get(key))
|
||||
}
|
||||
|
||||
fn set(&self, key: FastCheckCacheKey, value: FastCheckCacheItem) {
|
||||
Self::ensure_ok(self.inner.set(key, &value));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FastCheckCacheInner {
|
||||
conn: CacheDB,
|
||||
}
|
||||
|
||||
impl FastCheckCacheInner {
|
||||
pub fn new(conn: CacheDB) -> Self {
|
||||
Self { conn }
|
||||
}
|
||||
|
||||
pub fn get(
|
||||
&self,
|
||||
key: FastCheckCacheKey,
|
||||
) -> Result<Option<FastCheckCacheItem>, AnyError> {
|
||||
let query = "
|
||||
SELECT
|
||||
data
|
||||
FROM
|
||||
fastcheckcache
|
||||
WHERE
|
||||
hash=?1
|
||||
LIMIT 1";
|
||||
let res = self.conn.query_row(
|
||||
query,
|
||||
params![CacheDBHash::new(key.as_u64())],
|
||||
|row| {
|
||||
let value: Vec<u8> = row.get(0)?;
|
||||
Ok(bincode::deserialize::<FastCheckCacheItem>(&value)?)
|
||||
},
|
||||
)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn set(
|
||||
&self,
|
||||
key: FastCheckCacheKey,
|
||||
data: &FastCheckCacheItem,
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
INSERT OR REPLACE INTO
|
||||
fastcheckcache (hash, data)
|
||||
VALUES
|
||||
(?1, ?2)";
|
||||
self.conn.execute(
|
||||
sql,
|
||||
params![CacheDBHash::new(key.as_u64()), &bincode::serialize(data)?],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_graph::FastCheckCache as _;
|
||||
use deno_graph::FastCheckCacheModuleItem;
|
||||
use deno_graph::FastCheckCacheModuleItemDiagnostic;
|
||||
use deno_semver::package::PackageNv;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
pub fn cache_general_use() {
|
||||
let conn = CacheDB::in_memory(&FAST_CHECK_CACHE_DB, "1.0.0");
|
||||
let cache = FastCheckCache::new(conn);
|
||||
|
||||
let key = FastCheckCacheKey::build(
|
||||
cache.hash_seed(),
|
||||
&PackageNv::from_str("@scope/a@1.0.0").unwrap(),
|
||||
&Default::default(),
|
||||
);
|
||||
let cache = cache.inner;
|
||||
assert!(cache.get(key).unwrap().is_none());
|
||||
let value = FastCheckCacheItem {
|
||||
dependencies: BTreeSet::from([
|
||||
PackageNv::from_str("@scope/b@1.0.0").unwrap()
|
||||
]),
|
||||
modules: vec![(
|
||||
ModuleSpecifier::parse("https://jsr.io/test.ts").unwrap(),
|
||||
FastCheckCacheModuleItem::Diagnostic(
|
||||
FastCheckCacheModuleItemDiagnostic { source_hash: 123 },
|
||||
),
|
||||
)],
|
||||
};
|
||||
cache.set(key, &value).unwrap();
|
||||
let stored_value = cache.get(key).unwrap().unwrap();
|
||||
assert_eq!(stored_value, value);
|
||||
|
||||
// adding when already exists should not cause issue
|
||||
cache.set(key, &value).unwrap();
|
||||
|
||||
// recreating with same cli version should still have it
|
||||
let conn = cache.conn.recreate_with_version("1.0.0");
|
||||
let cache = FastCheckCacheInner::new(conn);
|
||||
let stored_value = cache.get(key).unwrap().unwrap();
|
||||
assert_eq!(stored_value, value);
|
||||
|
||||
// now changing the cli version should clear it
|
||||
let conn = cache.conn.recreate_with_version("2.0.0");
|
||||
let cache = FastCheckCacheInner::new(conn);
|
||||
assert!(cache.get(key).unwrap().is_none());
|
||||
}
|
||||
}
|
91
cli/cache/incremental.rs
vendored
91
cli/cache/incremental.rs
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
@ -6,23 +6,23 @@ use std::path::PathBuf;
|
|||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::unsync::spawn;
|
||||
use deno_core::unsync::JoinHandle;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
use super::common::FastInsecureHasher;
|
||||
|
||||
pub static INCREMENTAL_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: "CREATE TABLE IF NOT EXISTS incrementalcache (
|
||||
file_path TEXT PRIMARY KEY,
|
||||
state_hash TEXT NOT NULL,
|
||||
source_hash TEXT NOT NULL
|
||||
);",
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS incrementalcache (",
|
||||
"file_path TEXT PRIMARY KEY,",
|
||||
"state_hash INTEGER NOT NULL,",
|
||||
"source_hash INTEGER NOT NULL",
|
||||
");"
|
||||
),
|
||||
on_version_change: "DELETE FROM incrementalcache;",
|
||||
preheat_queries: &[],
|
||||
// If the cache fails, just ignore all caching attempts
|
||||
|
@ -34,7 +34,7 @@ pub static INCREMENTAL_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
|||
pub struct IncrementalCache(IncrementalCacheInner);
|
||||
|
||||
impl IncrementalCache {
|
||||
pub fn new<TState: Serialize>(
|
||||
pub fn new<TState: std::hash::Hash>(
|
||||
db: CacheDB,
|
||||
state: &TState,
|
||||
initial_file_paths: &[PathBuf],
|
||||
|
@ -56,24 +56,23 @@ impl IncrementalCache {
|
|||
}
|
||||
|
||||
enum ReceiverMessage {
|
||||
Update(PathBuf, u64),
|
||||
Update(PathBuf, CacheDBHash),
|
||||
Exit,
|
||||
}
|
||||
|
||||
struct IncrementalCacheInner {
|
||||
previous_hashes: HashMap<PathBuf, u64>,
|
||||
previous_hashes: HashMap<PathBuf, CacheDBHash>,
|
||||
sender: tokio::sync::mpsc::UnboundedSender<ReceiverMessage>,
|
||||
handle: Mutex<Option<JoinHandle<()>>>,
|
||||
}
|
||||
|
||||
impl IncrementalCacheInner {
|
||||
pub fn new<TState: Serialize>(
|
||||
pub fn new<TState: std::hash::Hash>(
|
||||
db: CacheDB,
|
||||
state: &TState,
|
||||
initial_file_paths: &[PathBuf],
|
||||
) -> Self {
|
||||
let state_hash =
|
||||
FastInsecureHasher::hash(serde_json::to_string(state).unwrap());
|
||||
let state_hash = CacheDBHash::from_source(state);
|
||||
let sql_cache = SqlIncrementalCache::new(db, state_hash);
|
||||
Self::from_sql_incremental_cache(sql_cache, initial_file_paths)
|
||||
}
|
||||
|
@ -113,13 +112,13 @@ impl IncrementalCacheInner {
|
|||
|
||||
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||
match self.previous_hashes.get(file_path) {
|
||||
Some(hash) => *hash == FastInsecureHasher::hash(file_text),
|
||||
Some(hash) => *hash == CacheDBHash::from_source(file_text),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_file(&self, file_path: &Path, file_text: &str) {
|
||||
let hash = FastInsecureHasher::hash(file_text);
|
||||
let hash = CacheDBHash::from_source(file_text);
|
||||
if let Some(previous_hash) = self.previous_hashes.get(file_path) {
|
||||
if *previous_hash == hash {
|
||||
return; // do not bother updating the db file because nothing has changed
|
||||
|
@ -146,15 +145,15 @@ struct SqlIncrementalCache {
|
|||
/// A hash of the state used to produce the formatting/linting other than
|
||||
/// the CLI version. This state is a hash of the configuration and ensures
|
||||
/// we format/lint a file when the configuration changes.
|
||||
state_hash: u64,
|
||||
state_hash: CacheDBHash,
|
||||
}
|
||||
|
||||
impl SqlIncrementalCache {
|
||||
pub fn new(conn: CacheDB, state_hash: u64) -> Self {
|
||||
pub fn new(conn: CacheDB, state_hash: CacheDBHash) -> Self {
|
||||
Self { conn, state_hash }
|
||||
}
|
||||
|
||||
pub fn get_source_hash(&self, path: &Path) -> Option<u64> {
|
||||
pub fn get_source_hash(&self, path: &Path) -> Option<CacheDBHash> {
|
||||
match self.get_source_hash_result(path) {
|
||||
Ok(option) => option,
|
||||
Err(err) => {
|
||||
|
@ -171,7 +170,7 @@ impl SqlIncrementalCache {
|
|||
fn get_source_hash_result(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> Result<Option<u64>, AnyError> {
|
||||
) -> Result<Option<CacheDBHash>, AnyError> {
|
||||
let query = "
|
||||
SELECT
|
||||
source_hash
|
||||
|
@ -183,10 +182,10 @@ impl SqlIncrementalCache {
|
|||
LIMIT 1";
|
||||
let res = self.conn.query_row(
|
||||
query,
|
||||
params![path.to_string_lossy(), self.state_hash.to_string()],
|
||||
params![path.to_string_lossy(), self.state_hash],
|
||||
|row| {
|
||||
let hash: String = row.get(0)?;
|
||||
Ok(hash.parse::<u64>()?)
|
||||
let hash: CacheDBHash = row.get(0)?;
|
||||
Ok(hash)
|
||||
},
|
||||
)?;
|
||||
Ok(res)
|
||||
|
@ -195,7 +194,7 @@ impl SqlIncrementalCache {
|
|||
pub fn set_source_hash(
|
||||
&self,
|
||||
path: &Path,
|
||||
source_hash: u64,
|
||||
source_hash: CacheDBHash,
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
INSERT OR REPLACE INTO
|
||||
|
@ -204,11 +203,7 @@ impl SqlIncrementalCache {
|
|||
(?1, ?2, ?3)";
|
||||
self.conn.execute(
|
||||
sql,
|
||||
params![
|
||||
path.to_string_lossy(),
|
||||
&self.state_hash.to_string(),
|
||||
&source_hash,
|
||||
],
|
||||
params![path.to_string_lossy(), self.state_hash, source_hash],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -223,51 +218,51 @@ mod test {
|
|||
#[test]
|
||||
pub fn sql_cache_general_use() {
|
||||
let conn = CacheDB::in_memory(&INCREMENTAL_CACHE_DB, "1.0.0");
|
||||
let cache = SqlIncrementalCache::new(conn, 1);
|
||||
let cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
|
||||
let path = PathBuf::from("/mod.ts");
|
||||
|
||||
assert_eq!(cache.get_source_hash(&path), None);
|
||||
cache.set_source_hash(&path, 2).unwrap();
|
||||
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||
cache.set_source_hash(&path, CacheDBHash::new(2)).unwrap();
|
||||
assert_eq!(cache.get_source_hash(&path), Some(CacheDBHash::new(2)));
|
||||
|
||||
// try changing the cli version (should clear)
|
||||
let conn = cache.conn.recreate_with_version("2.0.0");
|
||||
let mut cache = SqlIncrementalCache::new(conn, 1);
|
||||
let mut cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
|
||||
assert_eq!(cache.get_source_hash(&path), None);
|
||||
|
||||
// add back the file to the cache
|
||||
cache.set_source_hash(&path, 2).unwrap();
|
||||
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||
cache.set_source_hash(&path, CacheDBHash::new(2)).unwrap();
|
||||
assert_eq!(cache.get_source_hash(&path), Some(CacheDBHash::new(2)));
|
||||
|
||||
// try changing the state hash
|
||||
cache.state_hash = 2;
|
||||
cache.state_hash = CacheDBHash::new(2);
|
||||
assert_eq!(cache.get_source_hash(&path), None);
|
||||
cache.state_hash = 1;
|
||||
cache.state_hash = CacheDBHash::new(1);
|
||||
|
||||
// should return now that everything is back
|
||||
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||
assert_eq!(cache.get_source_hash(&path), Some(CacheDBHash::new(2)));
|
||||
|
||||
// recreating the cache should not remove the data because the CLI version and state hash is the same
|
||||
let conn = cache.conn.recreate_with_version("2.0.0");
|
||||
let cache = SqlIncrementalCache::new(conn, 1);
|
||||
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||
let cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
|
||||
assert_eq!(cache.get_source_hash(&path), Some(CacheDBHash::new(2)));
|
||||
|
||||
// now try replacing and using another path
|
||||
cache.set_source_hash(&path, 3).unwrap();
|
||||
cache.set_source_hash(&path, 4).unwrap();
|
||||
cache.set_source_hash(&path, CacheDBHash::new(3)).unwrap();
|
||||
cache.set_source_hash(&path, CacheDBHash::new(4)).unwrap();
|
||||
let path2 = PathBuf::from("/mod2.ts");
|
||||
cache.set_source_hash(&path2, 5).unwrap();
|
||||
assert_eq!(cache.get_source_hash(&path), Some(4));
|
||||
assert_eq!(cache.get_source_hash(&path2), Some(5));
|
||||
cache.set_source_hash(&path2, CacheDBHash::new(5)).unwrap();
|
||||
assert_eq!(cache.get_source_hash(&path), Some(CacheDBHash::new(4)));
|
||||
assert_eq!(cache.get_source_hash(&path2), Some(CacheDBHash::new(5)));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn incremental_cache_general_use() {
|
||||
let conn = CacheDB::in_memory(&INCREMENTAL_CACHE_DB, "1.0.0");
|
||||
let sql_cache = SqlIncrementalCache::new(conn, 1);
|
||||
let sql_cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
|
||||
let file_path = PathBuf::from("/mod.ts");
|
||||
let file_text = "test";
|
||||
let file_hash = FastInsecureHasher::hash(file_text);
|
||||
let file_hash = CacheDBHash::from_source(file_text);
|
||||
sql_cache.set_source_hash(&file_path, file_hash).unwrap();
|
||||
let cache = IncrementalCacheInner::from_sql_incremental_cache(
|
||||
sql_cache,
|
||||
|
|
332
cli/cache/mod.rs
vendored
332
cli/cache/mod.rs
vendored
|
@ -1,9 +1,23 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FetchNoFollowOptions;
|
||||
use crate::file_fetcher::FetchOptions;
|
||||
use crate::file_fetcher::FetchPermissionsOptionRef;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::util::fs::atomic_write_file;
|
||||
use crate::file_fetcher::FileOrRedirect;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::atomic_write_file_with_retries_and_fs;
|
||||
use crate::util::fs::AtomicWriteFileFsAdapter;
|
||||
use crate::util::path::specifier_has_extension;
|
||||
use crate::util::text_encoding::arc_str_to_bytes;
|
||||
use crate::util::text_encoding::from_utf8_lossy_owned;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::ModuleSpecifier;
|
||||
|
@ -11,7 +25,7 @@ use deno_graph::source::CacheInfo;
|
|||
use deno_graph::source::LoadFuture;
|
||||
use deno_graph::source::LoadResponse;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -21,23 +35,33 @@ use std::time::SystemTime;
|
|||
mod cache_db;
|
||||
mod caches;
|
||||
mod check;
|
||||
mod code_cache;
|
||||
mod common;
|
||||
mod deno_dir;
|
||||
mod disk_cache;
|
||||
mod emit;
|
||||
mod fast_check;
|
||||
mod incremental;
|
||||
mod module_info;
|
||||
mod node;
|
||||
mod parsed_source;
|
||||
|
||||
pub use cache_db::CacheDBHash;
|
||||
pub use caches::Caches;
|
||||
pub use check::TypeCheckCache;
|
||||
pub use code_cache::CodeCache;
|
||||
pub use common::FastInsecureHasher;
|
||||
pub use deno_dir::dirs::home_dir;
|
||||
pub use deno_dir::DenoDir;
|
||||
pub use deno_dir::DenoDirProvider;
|
||||
pub use disk_cache::DiskCache;
|
||||
pub use emit::EmitCache;
|
||||
pub use fast_check::FastCheckCache;
|
||||
pub use incremental::IncrementalCache;
|
||||
pub use module_info::ModuleInfoCache;
|
||||
pub use node::NodeAnalysisCache;
|
||||
pub use parsed_source::EsmOrCjsChecker;
|
||||
pub use parsed_source::LazyGraphSourceParser;
|
||||
pub use parsed_source::ParsedSourceCache;
|
||||
|
||||
/// Permissions used to save a file in the disk caches.
|
||||
|
@ -47,12 +71,8 @@ pub const CACHE_PERM: u32 = 0o644;
|
|||
pub struct RealDenoCacheEnv;
|
||||
|
||||
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Option<Vec<u8>>> {
|
||||
match std::fs::read(path) {
|
||||
Ok(s) => Ok(Some(s)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
|
||||
std::fs::read(path)
|
||||
}
|
||||
|
||||
fn atomic_write_file(
|
||||
|
@ -60,7 +80,15 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
|||
path: &Path,
|
||||
bytes: &[u8],
|
||||
) -> std::io::Result<()> {
|
||||
atomic_write_file(path, bytes, CACHE_PERM)
|
||||
atomic_write_file_with_retries(path, bytes, CACHE_PERM)
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
crate::util::fs::canonicalize_path(path)
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
|
||||
std::fs::create_dir_all(path)
|
||||
}
|
||||
|
||||
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
|
||||
|
@ -82,42 +110,118 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DenoCacheEnvFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
|
||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
|
||||
self
|
||||
.0
|
||||
.read_file_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn atomic_write_file(
|
||||
&self,
|
||||
path: &Path,
|
||||
bytes: &[u8],
|
||||
) -> std::io::Result<()> {
|
||||
atomic_write_file_with_retries_and_fs(
|
||||
&AtomicWriteFileFsAdapter {
|
||||
fs: self.0,
|
||||
write_mode: CACHE_PERM,
|
||||
},
|
||||
path,
|
||||
bytes,
|
||||
)
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.0
|
||||
.mkdir_sync(path, true, None)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
|
||||
self
|
||||
.0
|
||||
.stat_sync(path)
|
||||
.map(|stat| {
|
||||
stat
|
||||
.mtime
|
||||
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
|
||||
})
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
self.0.is_file_sync(path)
|
||||
}
|
||||
|
||||
fn time_now(&self) -> SystemTime {
|
||||
SystemTime::now()
|
||||
}
|
||||
}
|
||||
|
||||
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
|
||||
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
|
||||
pub type LocalLspHttpCache =
|
||||
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
|
||||
pub use deno_cache_dir::CachedUrlMetadata;
|
||||
pub use deno_cache_dir::HttpCache;
|
||||
|
||||
pub struct FetchCacherOptions {
|
||||
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
pub permissions: PermissionsContainer,
|
||||
/// If we're publishing for `deno publish`.
|
||||
pub is_deno_publish: bool,
|
||||
pub unstable_detect_cjs: bool,
|
||||
}
|
||||
|
||||
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
|
||||
/// a concise interface to the DENO_DIR when building module graphs.
|
||||
pub struct FetchCacher {
|
||||
emit_cache: EmitCache,
|
||||
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
permissions: PermissionsContainer,
|
||||
is_deno_publish: bool,
|
||||
unstable_detect_cjs: bool,
|
||||
cache_info_enabled: bool,
|
||||
maybe_local_node_modules_url: Option<ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl FetchCacher {
|
||||
pub fn new(
|
||||
emit_cache: EmitCache,
|
||||
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
permissions: PermissionsContainer,
|
||||
maybe_local_node_modules_url: Option<ModuleSpecifier>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
options: FetchCacherOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
emit_cache,
|
||||
file_fetcher,
|
||||
file_header_overrides,
|
||||
esm_or_cjs_checker,
|
||||
global_http_cache,
|
||||
permissions,
|
||||
node_resolver,
|
||||
npm_resolver,
|
||||
module_info_cache,
|
||||
file_header_overrides: options.file_header_overrides,
|
||||
permissions: options.permissions,
|
||||
is_deno_publish: options.is_deno_publish,
|
||||
unstable_detect_cjs: options.unstable_detect_cjs,
|
||||
cache_info_enabled: false,
|
||||
maybe_local_node_modules_url,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -127,15 +231,7 @@ impl FetchCacher {
|
|||
self.cache_info_enabled = true;
|
||||
}
|
||||
|
||||
// DEPRECATED: Where the file is stored and how it's stored should be an implementation
|
||||
// detail of the cache.
|
||||
//
|
||||
// todo(dsheret): remove once implementing
|
||||
// * https://github.com/denoland/deno/issues/17707
|
||||
// * https://github.com/denoland/deno/issues/17703
|
||||
#[deprecated(
|
||||
note = "There should not be a way to do this because the file may not be cached at a local path in the future."
|
||||
)]
|
||||
/// Only use this for `deno info`.
|
||||
fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
// TODO(@kitsonk) fix when deno_graph does not query cache for synthetic
|
||||
// modules
|
||||
|
@ -162,52 +258,136 @@ impl Loader for FetchCacher {
|
|||
#[allow(deprecated)]
|
||||
let local = self.get_local_path(specifier)?;
|
||||
if local.is_file() {
|
||||
let emit = self
|
||||
.emit_cache
|
||||
.get_emit_filepath(specifier)
|
||||
.filter(|p| p.is_file());
|
||||
Some(CacheInfo {
|
||||
local: Some(local),
|
||||
emit,
|
||||
map: None,
|
||||
})
|
||||
Some(CacheInfo { local: Some(local) })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn load(
|
||||
&mut self,
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
_is_dynamic: bool,
|
||||
options: deno_graph::source::LoadOptions,
|
||||
) -> LoadFuture {
|
||||
if let Some(node_modules_url) = self.maybe_local_node_modules_url.as_ref() {
|
||||
// The specifier might be in a completely different symlinked tree than
|
||||
// what the resolved node_modules_url is in (ex. `/my-project-1/node_modules`
|
||||
// symlinked to `/my-project-2/node_modules`), so first check if the path
|
||||
// is in a node_modules dir to avoid needlessly canonicalizing, then compare
|
||||
// against the canonicalized specifier.
|
||||
use deno_graph::source::CacheSetting as LoaderCacheSetting;
|
||||
|
||||
if specifier.scheme() == "file" {
|
||||
if specifier.path().contains("/node_modules/") {
|
||||
// The specifier might be in a completely different symlinked tree than
|
||||
// what the node_modules url is in (ex. `/my-project-1/node_modules`
|
||||
// symlinked to `/my-project-2/node_modules`), so first we checked if the path
|
||||
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare
|
||||
// against the canonicalized specifier.
|
||||
let specifier =
|
||||
crate::node::resolve_specifier_into_node_modules(specifier);
|
||||
if specifier.as_str().starts_with(node_modules_url.as_str()) {
|
||||
if self.npm_resolver.in_npm_package(&specifier) {
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::External { specifier },
|
||||
))));
|
||||
}
|
||||
}
|
||||
|
||||
// make local CJS modules external to the graph
|
||||
if specifier_has_extension(specifier, "cjs") {
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::External {
|
||||
specifier: specifier.clone(),
|
||||
},
|
||||
))));
|
||||
}
|
||||
|
||||
if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") {
|
||||
if let Ok(Some(pkg_json)) =
|
||||
self.node_resolver.get_closest_package_json(specifier)
|
||||
{
|
||||
if pkg_json.typ == "commonjs" {
|
||||
if let Ok(path) = specifier.to_file_path() {
|
||||
if let Ok(bytes) = std::fs::read(&path) {
|
||||
let text: Arc<str> = from_utf8_lossy_owned(bytes).into();
|
||||
let is_es_module = match self.esm_or_cjs_checker.is_esm(
|
||||
specifier,
|
||||
text.clone(),
|
||||
MediaType::JavaScript,
|
||||
) {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
return Box::pin(futures::future::ready(Err(err.into())));
|
||||
}
|
||||
};
|
||||
if !is_es_module {
|
||||
self.node_resolver.mark_cjs_resolution(specifier.clone());
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::External {
|
||||
specifier: specifier.clone(),
|
||||
},
|
||||
))));
|
||||
} else {
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::Module {
|
||||
specifier: specifier.clone(),
|
||||
content: arc_str_to_bytes(text),
|
||||
maybe_headers: None,
|
||||
},
|
||||
))));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.is_deno_publish
|
||||
&& matches!(specifier.scheme(), "http" | "https")
|
||||
&& !specifier.as_str().starts_with(jsr_url().as_str())
|
||||
{
|
||||
// mark non-JSR remote modules as external so we don't need --allow-import
|
||||
// permissions as these will error out later when publishing
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::External {
|
||||
specifier: specifier.clone(),
|
||||
},
|
||||
))));
|
||||
}
|
||||
|
||||
let permissions = self.permissions.clone();
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let file_header_overrides = self.file_header_overrides.clone();
|
||||
let permissions = self.permissions.clone();
|
||||
let specifier = specifier.clone();
|
||||
let is_statically_analyzable = !options.was_dynamic_root;
|
||||
|
||||
async move {
|
||||
let maybe_cache_setting = match options.cache_setting {
|
||||
LoaderCacheSetting::Use => None,
|
||||
LoaderCacheSetting::Reload => {
|
||||
if matches!(file_fetcher.cache_setting(), CacheSetting::Only) {
|
||||
return Err(deno_core::anyhow::anyhow!(
|
||||
"Could not resolve version constraint using only cached data. Try running again without --cached-only"
|
||||
));
|
||||
}
|
||||
Some(CacheSetting::ReloadAll)
|
||||
}
|
||||
LoaderCacheSetting::Only => Some(CacheSetting::Only),
|
||||
};
|
||||
file_fetcher
|
||||
.fetch(&specifier, permissions)
|
||||
.fetch_no_follow_with_options(FetchNoFollowOptions {
|
||||
fetch_options: FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: if is_statically_analyzable {
|
||||
FetchPermissionsOptionRef::StaticContainer(&permissions)
|
||||
} else {
|
||||
FetchPermissionsOptionRef::DynamicContainer(&permissions)
|
||||
},
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
||||
},
|
||||
maybe_checksum: options.maybe_checksum.as_ref(),
|
||||
})
|
||||
.await
|
||||
.map(|file| {
|
||||
.map(|file_or_redirect| {
|
||||
match file_or_redirect {
|
||||
FileOrRedirect::File(file) => {
|
||||
let maybe_headers =
|
||||
match (file.maybe_headers, file_header_overrides.get(&specifier)) {
|
||||
(Some(headers), Some(overrides)) => {
|
||||
|
@ -222,18 +402,54 @@ impl Loader for FetchCacher {
|
|||
maybe_headers,
|
||||
content: file.source,
|
||||
}))
|
||||
},
|
||||
FileOrRedirect::Redirect(redirect_specifier) => {
|
||||
Ok(Some(LoadResponse::Redirect {
|
||||
specifier: redirect_specifier,
|
||||
}))
|
||||
},
|
||||
}
|
||||
})
|
||||
.unwrap_or_else(|err| {
|
||||
if let Some(err) = err.downcast_ref::<std::io::Error>() {
|
||||
if err.kind() == std::io::ErrorKind::NotFound {
|
||||
if let Some(io_err) = err.downcast_ref::<std::io::Error>() {
|
||||
if io_err.kind() == std::io::ErrorKind::NotFound {
|
||||
return Ok(None);
|
||||
} else {
|
||||
return Err(err);
|
||||
}
|
||||
} else if get_error_class_name(&err) == "NotFound" {
|
||||
return Ok(None);
|
||||
}
|
||||
Err(err)
|
||||
let error_class_name = get_error_class_name(&err);
|
||||
match error_class_name {
|
||||
"NotFound" => Ok(None),
|
||||
"NotCached" if options.cache_setting == LoaderCacheSetting::Only => Ok(None),
|
||||
_ => Err(err),
|
||||
}
|
||||
})
|
||||
}
|
||||
.boxed()
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn cache_module_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source: &Arc<[u8]>,
|
||||
module_info: &deno_graph::ModuleInfo,
|
||||
) {
|
||||
log::debug!("Caching module info for {}", specifier);
|
||||
let source_hash = CacheDBHash::from_source(source);
|
||||
let result = self.module_info_cache.set_module_info(
|
||||
specifier,
|
||||
media_type,
|
||||
source_hash,
|
||||
module_info,
|
||||
);
|
||||
if let Err(err) = result {
|
||||
log::debug!(
|
||||
"Error saving module cache info for {}. {:#}",
|
||||
specifier,
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
331
cli/cache/module_info.rs
vendored
Normal file
331
cli/cache/module_info.rs
vendored
Normal file
|
@ -0,0 +1,331 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_graph::ModuleInfo;
|
||||
use deno_graph::ParserModuleAnalyzer;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
use super::ParsedSourceCache;
|
||||
|
||||
const SELECT_MODULE_INFO: &str = "
|
||||
SELECT
|
||||
module_info
|
||||
FROM
|
||||
moduleinfocache
|
||||
WHERE
|
||||
specifier=?1
|
||||
AND media_type=?2
|
||||
AND source_hash=?3
|
||||
LIMIT 1";
|
||||
|
||||
pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS moduleinfocache (",
|
||||
"specifier TEXT PRIMARY KEY,",
|
||||
"media_type INTEGER NOT NULL,",
|
||||
"source_hash INTEGER NOT NULL,",
|
||||
"module_info TEXT NOT NULL",
|
||||
");"
|
||||
),
|
||||
on_version_change: "DELETE FROM moduleinfocache;",
|
||||
preheat_queries: &[SELECT_MODULE_INFO],
|
||||
on_failure: CacheFailure::InMemory,
|
||||
};
|
||||
|
||||
/// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable
|
||||
/// performance improvement because when it exists we can skip parsing a module for
|
||||
/// deno_graph.
|
||||
pub struct ModuleInfoCache {
|
||||
conn: CacheDB,
|
||||
}
|
||||
|
||||
impl ModuleInfoCache {
|
||||
#[cfg(test)]
|
||||
pub fn new_in_memory(version: &'static str) -> Self {
|
||||
Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version))
|
||||
}
|
||||
|
||||
pub fn new(conn: CacheDB) -> Self {
|
||||
Self { conn }
|
||||
}
|
||||
|
||||
/// Useful for testing: re-create this cache DB with a different current version.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn recreate_with_version(self, version: &'static str) -> Self {
|
||||
Self {
|
||||
conn: self.conn.recreate_with_version(version),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_module_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
expected_source_hash: CacheDBHash,
|
||||
) -> Result<Option<ModuleInfo>, AnyError> {
|
||||
let query = SELECT_MODULE_INFO;
|
||||
let res = self.conn.query_row(
|
||||
query,
|
||||
params![
|
||||
&specifier.as_str(),
|
||||
serialize_media_type(media_type),
|
||||
expected_source_hash,
|
||||
],
|
||||
|row| {
|
||||
let module_info: String = row.get(0)?;
|
||||
let module_info = serde_json::from_str(&module_info)?;
|
||||
Ok(module_info)
|
||||
},
|
||||
)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn set_module_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source_hash: CacheDBHash,
|
||||
module_info: &ModuleInfo,
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
INSERT OR REPLACE INTO
|
||||
moduleinfocache (specifier, media_type, source_hash, module_info)
|
||||
VALUES
|
||||
(?1, ?2, ?3, ?4)";
|
||||
self.conn.execute(
|
||||
sql,
|
||||
params![
|
||||
specifier.as_str(),
|
||||
serialize_media_type(media_type),
|
||||
source_hash,
|
||||
&serde_json::to_string(&module_info)?,
|
||||
],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn as_module_analyzer<'a>(
|
||||
&'a self,
|
||||
parsed_source_cache: &'a Arc<ParsedSourceCache>,
|
||||
) -> ModuleInfoCacheModuleAnalyzer<'a> {
|
||||
ModuleInfoCacheModuleAnalyzer {
|
||||
module_info_cache: self,
|
||||
parsed_source_cache,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ModuleInfoCacheModuleAnalyzer<'a> {
|
||||
module_info_cache: &'a ModuleInfoCache,
|
||||
parsed_source_cache: &'a Arc<ParsedSourceCache>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
|
||||
async fn analyze(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: Arc<str>,
|
||||
media_type: MediaType,
|
||||
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
|
||||
// attempt to load from the cache
|
||||
let source_hash = CacheDBHash::from_source(&source);
|
||||
match self.module_info_cache.get_module_info(
|
||||
specifier,
|
||||
media_type,
|
||||
source_hash,
|
||||
) {
|
||||
Ok(Some(info)) => return Ok(info),
|
||||
Ok(None) => {}
|
||||
Err(err) => {
|
||||
log::debug!(
|
||||
"Error loading module cache info for {}. {:#}",
|
||||
specifier,
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, get the module info from the parsed source cache
|
||||
let module_info = deno_core::unsync::spawn_blocking({
|
||||
let cache = self.parsed_source_cache.clone();
|
||||
let specifier = specifier.clone();
|
||||
move || {
|
||||
let parser = cache.as_capturing_parser();
|
||||
let analyzer = ParserModuleAnalyzer::new(&parser);
|
||||
analyzer.analyze_sync(&specifier, source, media_type)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
// then attempt to cache it
|
||||
if let Err(err) = self.module_info_cache.set_module_info(
|
||||
specifier,
|
||||
media_type,
|
||||
source_hash,
|
||||
&module_info,
|
||||
) {
|
||||
log::debug!(
|
||||
"Error saving module cache info for {}. {:#}",
|
||||
specifier,
|
||||
err
|
||||
);
|
||||
}
|
||||
|
||||
Ok(module_info)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_media_type(media_type: MediaType) -> i64 {
|
||||
use MediaType::*;
|
||||
match media_type {
|
||||
JavaScript => 1,
|
||||
Jsx => 2,
|
||||
Mjs => 3,
|
||||
Cjs => 4,
|
||||
TypeScript => 5,
|
||||
Mts => 6,
|
||||
Cts => 7,
|
||||
Dts => 8,
|
||||
Dmts => 9,
|
||||
Dcts => 10,
|
||||
Tsx => 11,
|
||||
Json => 12,
|
||||
Wasm => 13,
|
||||
TsBuildInfo => 14,
|
||||
SourceMap => 15,
|
||||
Unknown => 16,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use deno_graph::PositionRange;
|
||||
use deno_graph::SpecifierWithRange;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
pub fn module_info_cache_general_use() {
|
||||
let cache = ModuleInfoCache::new_in_memory("1.0.0");
|
||||
let specifier1 =
|
||||
ModuleSpecifier::parse("https://localhost/mod.ts").unwrap();
|
||||
let specifier2 =
|
||||
ModuleSpecifier::parse("https://localhost/mod2.ts").unwrap();
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier1,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(1)
|
||||
)
|
||||
.unwrap(),
|
||||
None
|
||||
);
|
||||
|
||||
let mut module_info = ModuleInfo::default();
|
||||
module_info.jsdoc_imports.push(SpecifierWithRange {
|
||||
range: PositionRange {
|
||||
start: deno_graph::Position {
|
||||
line: 0,
|
||||
character: 3,
|
||||
},
|
||||
end: deno_graph::Position {
|
||||
line: 1,
|
||||
character: 2,
|
||||
},
|
||||
},
|
||||
text: "test".to_string(),
|
||||
});
|
||||
cache
|
||||
.set_module_info(
|
||||
&specifier1,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(1),
|
||||
&module_info,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier1,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(1)
|
||||
)
|
||||
.unwrap(),
|
||||
Some(module_info.clone())
|
||||
);
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier2,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(1)
|
||||
)
|
||||
.unwrap(),
|
||||
None,
|
||||
);
|
||||
// different media type
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier1,
|
||||
MediaType::TypeScript,
|
||||
CacheDBHash::new(1)
|
||||
)
|
||||
.unwrap(),
|
||||
None,
|
||||
);
|
||||
// different source hash
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier1,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(2)
|
||||
)
|
||||
.unwrap(),
|
||||
None,
|
||||
);
|
||||
|
||||
// try recreating with the same version
|
||||
let cache = cache.recreate_with_version("1.0.0");
|
||||
|
||||
// should get it
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier1,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(1)
|
||||
)
|
||||
.unwrap(),
|
||||
Some(module_info)
|
||||
);
|
||||
|
||||
// try recreating with a different version
|
||||
let cache = cache.recreate_with_version("1.0.1");
|
||||
|
||||
// should no longer exist
|
||||
assert_eq!(
|
||||
cache
|
||||
.get_module_info(
|
||||
&specifier1,
|
||||
MediaType::JavaScript,
|
||||
CacheDBHash::new(1)
|
||||
)
|
||||
.unwrap(),
|
||||
None,
|
||||
);
|
||||
}
|
||||
}
|
82
cli/cache/node.rs
vendored
82
cli/cache/node.rs
vendored
|
@ -1,22 +1,25 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_ast::CjsAnalysis;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use crate::node::CliCjsAnalysis;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheFailure;
|
||||
use super::FastInsecureHasher;
|
||||
use super::CacheDBHash;
|
||||
|
||||
pub static NODE_ANALYSIS_CACHE_DB: CacheDBConfiguration =
|
||||
CacheDBConfiguration {
|
||||
table_initializer: "CREATE TABLE IF NOT EXISTS cjsanalysiscache (
|
||||
specifier TEXT PRIMARY KEY,
|
||||
source_hash TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);",
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS cjsanalysiscache (",
|
||||
"specifier TEXT PRIMARY KEY,",
|
||||
"source_hash INTEGER NOT NULL,",
|
||||
"data TEXT NOT NULL",
|
||||
");"
|
||||
),
|
||||
on_version_change: "DELETE FROM cjsanalysiscache;",
|
||||
preheat_queries: &[],
|
||||
on_failure: CacheFailure::InMemory,
|
||||
|
@ -34,10 +37,6 @@ impl NodeAnalysisCache {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn compute_source_hash(text: &str) -> String {
|
||||
FastInsecureHasher::hash(text).to_string()
|
||||
}
|
||||
|
||||
fn ensure_ok<T: Default>(res: Result<T, AnyError>) -> T {
|
||||
match res {
|
||||
Ok(x) => x,
|
||||
|
@ -58,8 +57,8 @@ impl NodeAnalysisCache {
|
|||
pub fn get_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
expected_source_hash: &str,
|
||||
) -> Option<CjsAnalysis> {
|
||||
expected_source_hash: CacheDBHash,
|
||||
) -> Option<CliCjsAnalysis> {
|
||||
Self::ensure_ok(
|
||||
self.inner.get_cjs_analysis(specifier, expected_source_hash),
|
||||
)
|
||||
|
@ -68,8 +67,8 @@ impl NodeAnalysisCache {
|
|||
pub fn set_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
source_hash: &str,
|
||||
cjs_analysis: &CjsAnalysis,
|
||||
source_hash: CacheDBHash,
|
||||
cjs_analysis: &CliCjsAnalysis,
|
||||
) {
|
||||
Self::ensure_ok(self.inner.set_cjs_analysis(
|
||||
specifier,
|
||||
|
@ -92,8 +91,8 @@ impl NodeAnalysisCacheInner {
|
|||
pub fn get_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
expected_source_hash: &str,
|
||||
) -> Result<Option<CjsAnalysis>, AnyError> {
|
||||
expected_source_hash: CacheDBHash,
|
||||
) -> Result<Option<CliCjsAnalysis>, AnyError> {
|
||||
let query = "
|
||||
SELECT
|
||||
data
|
||||
|
@ -105,7 +104,7 @@ impl NodeAnalysisCacheInner {
|
|||
LIMIT 1";
|
||||
let res = self.conn.query_row(
|
||||
query,
|
||||
params![specifier, &expected_source_hash],
|
||||
params![specifier, expected_source_hash],
|
||||
|row| {
|
||||
let analysis_info: String = row.get(0)?;
|
||||
Ok(serde_json::from_str(&analysis_info)?)
|
||||
|
@ -117,8 +116,8 @@ impl NodeAnalysisCacheInner {
|
|||
pub fn set_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
source_hash: &str,
|
||||
cjs_analysis: &CjsAnalysis,
|
||||
source_hash: CacheDBHash,
|
||||
cjs_analysis: &CliCjsAnalysis,
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
INSERT OR REPLACE INTO
|
||||
|
@ -129,7 +128,7 @@ impl NodeAnalysisCacheInner {
|
|||
sql,
|
||||
params![
|
||||
specifier,
|
||||
&source_hash.to_string(),
|
||||
source_hash,
|
||||
&serde_json::to_string(&cjs_analysis)?,
|
||||
],
|
||||
)?;
|
||||
|
@ -146,36 +145,47 @@ mod test {
|
|||
let conn = CacheDB::in_memory(&NODE_ANALYSIS_CACHE_DB, "1.0.0");
|
||||
let cache = NodeAnalysisCacheInner::new(conn);
|
||||
|
||||
assert!(cache.get_cjs_analysis("file.js", "2").unwrap().is_none());
|
||||
let cjs_analysis = CjsAnalysis {
|
||||
assert!(cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.is_none());
|
||||
let cjs_analysis = CliCjsAnalysis::Cjs {
|
||||
exports: vec!["export1".to_string()],
|
||||
reexports: vec!["re-export1".to_string()],
|
||||
};
|
||||
cache
|
||||
.set_cjs_analysis("file.js", "2", &cjs_analysis)
|
||||
.set_cjs_analysis("file.js", CacheDBHash::new(2), &cjs_analysis)
|
||||
.unwrap();
|
||||
assert!(cache.get_cjs_analysis("file.js", "3").unwrap().is_none()); // different hash
|
||||
let actual_cjs_analysis =
|
||||
cache.get_cjs_analysis("file.js", "2").unwrap().unwrap();
|
||||
assert_eq!(actual_cjs_analysis.exports, cjs_analysis.exports);
|
||||
assert_eq!(actual_cjs_analysis.reexports, cjs_analysis.reexports);
|
||||
assert!(cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(3))
|
||||
.unwrap()
|
||||
.is_none()); // different hash
|
||||
let actual_cjs_analysis = cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(actual_cjs_analysis, cjs_analysis);
|
||||
|
||||
// adding when already exists should not cause issue
|
||||
cache
|
||||
.set_cjs_analysis("file.js", "2", &cjs_analysis)
|
||||
.set_cjs_analysis("file.js", CacheDBHash::new(2), &cjs_analysis)
|
||||
.unwrap();
|
||||
|
||||
// recreating with same cli version should still have it
|
||||
let conn = cache.conn.recreate_with_version("1.0.0");
|
||||
let cache = NodeAnalysisCacheInner::new(conn);
|
||||
let actual_analysis =
|
||||
cache.get_cjs_analysis("file.js", "2").unwrap().unwrap();
|
||||
assert_eq!(actual_analysis.exports, cjs_analysis.exports);
|
||||
assert_eq!(actual_analysis.reexports, cjs_analysis.reexports);
|
||||
let actual_analysis = cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(actual_analysis, cjs_analysis);
|
||||
|
||||
// now changing the cli version should clear it
|
||||
let conn = cache.conn.recreate_with_version("2.0.0");
|
||||
let cache = NodeAnalysisCacheInner::new(conn);
|
||||
assert!(cache.get_cjs_analysis("file.js", "2").unwrap().is_none());
|
||||
assert!(cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.is_none());
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue