mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Merge remote-tracking branch 'upstream/main' into check-workspace-member-compiler-options
This commit is contained in:
commit
b0cbae7486
13 changed files with 68 additions and 66 deletions
28
.github/workflows/ci.generate.ts
vendored
28
.github/workflows/ci.generate.ts
vendored
|
@ -716,19 +716,6 @@ const ci = {
|
|||
"df -h",
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
name: "Build denort release",
|
||||
if: [
|
||||
"matrix.job == 'test' &&",
|
||||
"matrix.profile == 'release' &&",
|
||||
"github.repository == 'denoland/deno'",
|
||||
].join("\n"),
|
||||
run: [
|
||||
"df -h",
|
||||
"cargo build --profile=release-slim --locked --bin denort",
|
||||
"df -h",
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
// Run a minimal check to ensure that binary is not corrupted, regardless
|
||||
// of our build mode
|
||||
|
@ -775,11 +762,10 @@ const ci = {
|
|||
"cd target/release",
|
||||
"zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno",
|
||||
"shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
|
||||
"./deno types > lib.deno.d.ts",
|
||||
"cd ../release-slim",
|
||||
"zip -r ../release/denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
|
||||
"cd ../release",
|
||||
"strip denort",
|
||||
"zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
|
||||
"shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
|
||||
"./deno types > lib.deno.d.ts",
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
|
@ -804,9 +790,8 @@ const ci = {
|
|||
"cd target/release",
|
||||
"zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno",
|
||||
"shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
|
||||
"cd ../release-slim",
|
||||
"zip -r ../release/denort-${{ matrix.arch }}-apple-darwin.zip denort",
|
||||
"cd ../release",
|
||||
"strip denort",
|
||||
"zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort",
|
||||
"shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
|
||||
]
|
||||
.join("\n"),
|
||||
|
@ -823,8 +808,7 @@ const ci = {
|
|||
run: [
|
||||
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip",
|
||||
"Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
|
||||
|
||||
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release-slim/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
|
||||
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
|
||||
"Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
|
||||
].join("\n"),
|
||||
},
|
||||
|
|
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
|
@ -419,15 +419,6 @@ jobs:
|
|||
df -h
|
||||
cargo build --release --locked --all-targets
|
||||
df -h
|
||||
- name: Build denort release
|
||||
if: |-
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno')
|
||||
run: |-
|
||||
df -h
|
||||
cargo build --profile=release-slim --locked --bin denort
|
||||
df -h
|
||||
- name: Check deno binary
|
||||
if: '!(matrix.skip) && (matrix.job == ''test'')'
|
||||
run: 'target/${{ matrix.profile }}/deno eval "console.log(1+2)" | grep 3'
|
||||
|
@ -457,11 +448,10 @@ jobs:
|
|||
cd target/release
|
||||
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
|
||||
shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
|
||||
./deno types > lib.deno.d.ts
|
||||
cd ../release-slim
|
||||
zip -r ../release/denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
|
||||
cd ../release
|
||||
strip denort
|
||||
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
|
||||
shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
|
||||
./deno types > lib.deno.d.ts
|
||||
- name: Pre-release (mac)
|
||||
if: |-
|
||||
!(matrix.skip) && (matrix.os == 'macos' &&
|
||||
|
@ -477,9 +467,8 @@ jobs:
|
|||
cd target/release
|
||||
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
|
||||
shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum
|
||||
cd ../release-slim
|
||||
zip -r ../release/denort-${{ matrix.arch }}-apple-darwin.zip denort
|
||||
cd ../release
|
||||
strip denort
|
||||
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
|
||||
shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum
|
||||
- name: Pre-release (windows)
|
||||
if: |-
|
||||
|
@ -491,7 +480,7 @@ jobs:
|
|||
run: |-
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release-slim/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
|
||||
- name: Upload canary to dl.deno.land
|
||||
if: |-
|
||||
|
|
|
@ -252,11 +252,6 @@ incremental = true
|
|||
lto = true
|
||||
opt-level = 'z' # Optimize for size
|
||||
|
||||
[profile.release-slim]
|
||||
inherits = "release"
|
||||
panic = "abort"
|
||||
strip = "symbols"
|
||||
|
||||
# Build release with debug symbols: cargo build --profile=release-with-debug
|
||||
[profile.release-with-debug]
|
||||
inherits = "release"
|
||||
|
|
4
cli/cache/cache_db.rs
vendored
4
cli/cache/cache_db.rs
vendored
|
@ -25,12 +25,12 @@ impl CacheDBHash {
|
|||
Self(hash)
|
||||
}
|
||||
|
||||
pub fn from_source(source: impl std::hash::Hash) -> Self {
|
||||
pub fn from_hashable(hashable: impl std::hash::Hash) -> Self {
|
||||
Self::new(
|
||||
// always write in the deno version just in case
|
||||
// the clearing on deno version change doesn't work
|
||||
FastInsecureHasher::new_deno_versioned()
|
||||
.write_hashable(source)
|
||||
.write_hashable(hashable)
|
||||
.finish(),
|
||||
)
|
||||
}
|
||||
|
|
21
cli/cache/incremental.rs
vendored
21
cli/cache/incremental.rs
vendored
|
@ -34,12 +34,16 @@ pub static INCREMENTAL_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
|||
pub struct IncrementalCache(IncrementalCacheInner);
|
||||
|
||||
impl IncrementalCache {
|
||||
pub fn new<TState: std::hash::Hash>(
|
||||
pub fn new(
|
||||
db: CacheDB,
|
||||
state: &TState,
|
||||
state_hash: CacheDBHash,
|
||||
initial_file_paths: &[PathBuf],
|
||||
) -> Self {
|
||||
IncrementalCache(IncrementalCacheInner::new(db, state, initial_file_paths))
|
||||
IncrementalCache(IncrementalCacheInner::new(
|
||||
db,
|
||||
state_hash,
|
||||
initial_file_paths,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||
|
@ -67,12 +71,11 @@ struct IncrementalCacheInner {
|
|||
}
|
||||
|
||||
impl IncrementalCacheInner {
|
||||
pub fn new<TState: std::hash::Hash>(
|
||||
pub fn new(
|
||||
db: CacheDB,
|
||||
state: &TState,
|
||||
state_hash: CacheDBHash,
|
||||
initial_file_paths: &[PathBuf],
|
||||
) -> Self {
|
||||
let state_hash = CacheDBHash::from_source(state);
|
||||
let sql_cache = SqlIncrementalCache::new(db, state_hash);
|
||||
Self::from_sql_incremental_cache(sql_cache, initial_file_paths)
|
||||
}
|
||||
|
@ -112,13 +115,13 @@ impl IncrementalCacheInner {
|
|||
|
||||
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||
match self.previous_hashes.get(file_path) {
|
||||
Some(hash) => *hash == CacheDBHash::from_source(file_text),
|
||||
Some(hash) => *hash == CacheDBHash::from_hashable(file_text),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_file(&self, file_path: &Path, file_text: &str) {
|
||||
let hash = CacheDBHash::from_source(file_text);
|
||||
let hash = CacheDBHash::from_hashable(file_text);
|
||||
if let Some(previous_hash) = self.previous_hashes.get(file_path) {
|
||||
if *previous_hash == hash {
|
||||
return; // do not bother updating the db file because nothing has changed
|
||||
|
@ -262,7 +265,7 @@ mod test {
|
|||
let sql_cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
|
||||
let file_path = PathBuf::from("/mod.ts");
|
||||
let file_text = "test";
|
||||
let file_hash = CacheDBHash::from_source(file_text);
|
||||
let file_hash = CacheDBHash::from_hashable(file_text);
|
||||
sql_cache.set_source_hash(&file_path, file_hash).unwrap();
|
||||
let cache = IncrementalCacheInner::from_sql_incremental_cache(
|
||||
sql_cache,
|
||||
|
|
2
cli/cache/mod.rs
vendored
2
cli/cache/mod.rs
vendored
|
@ -298,7 +298,7 @@ impl Loader for FetchCacher {
|
|||
module_info: &deno_graph::ModuleInfo,
|
||||
) {
|
||||
log::debug!("Caching module info for {}", specifier);
|
||||
let source_hash = CacheDBHash::from_source(source);
|
||||
let source_hash = CacheDBHash::from_hashable(source);
|
||||
let result = self.module_info_cache.set_module_info(
|
||||
specifier,
|
||||
media_type,
|
||||
|
|
4
cli/cache/module_info.rs
vendored
4
cli/cache/module_info.rs
vendored
|
@ -194,7 +194,7 @@ impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
|
|||
source: &Arc<str>,
|
||||
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
|
||||
// attempt to load from the cache
|
||||
let source_hash = CacheDBHash::from_source(source);
|
||||
let source_hash = CacheDBHash::from_hashable(source);
|
||||
if let Some(info) =
|
||||
self.load_cached_module_info(specifier, media_type, source_hash)
|
||||
{
|
||||
|
@ -228,7 +228,7 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
|
|||
media_type: MediaType,
|
||||
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
|
||||
// attempt to load from the cache
|
||||
let source_hash = CacheDBHash::from_source(&source);
|
||||
let source_hash = CacheDBHash::from_hashable(&source);
|
||||
if let Some(info) =
|
||||
self.load_cached_module_info(specifier, media_type, source_hash)
|
||||
{
|
||||
|
|
|
@ -68,7 +68,7 @@ impl CliCjsCodeAnalyzer {
|
|||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Result<CliCjsAnalysis, AnyError> {
|
||||
let source_hash = CacheDBHash::from_source(source);
|
||||
let source_hash = CacheDBHash::from_hashable(source);
|
||||
if let Some(analysis) =
|
||||
self.cache.get_cjs_analysis(specifier.as_str(), source_hash)
|
||||
{
|
||||
|
|
|
@ -43,6 +43,7 @@ use crate::args::FmtOptions;
|
|||
use crate::args::FmtOptionsConfig;
|
||||
use crate::args::ProseWrap;
|
||||
use crate::args::UnstableFmtOptions;
|
||||
use crate::cache::CacheDBHash;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::IncrementalCache;
|
||||
use crate::colors;
|
||||
|
@ -202,7 +203,7 @@ async fn format_files(
|
|||
let paths = paths_with_options.paths;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&(&fmt_options.options, &fmt_options.unstable), // cache key
|
||||
CacheDBHash::from_hashable((&fmt_options.options, &fmt_options.unstable)),
|
||||
&paths,
|
||||
));
|
||||
formatter
|
||||
|
|
|
@ -39,6 +39,7 @@ use crate::args::Flags;
|
|||
use crate::args::LintFlags;
|
||||
use crate::args::LintOptions;
|
||||
use crate::args::WorkspaceLintOptions;
|
||||
use crate::cache::CacheDBHash;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::IncrementalCache;
|
||||
use crate::colors;
|
||||
|
@ -291,7 +292,7 @@ impl WorkspaceLinter {
|
|||
lint_rules.incremental_cache_state().map(|state| {
|
||||
Arc::new(IncrementalCache::new(
|
||||
self.caches.lint_incremental_cache_db(),
|
||||
&state,
|
||||
CacheDBHash::from_hashable(&state),
|
||||
&paths,
|
||||
))
|
||||
});
|
||||
|
|
|
@ -153,7 +153,7 @@ export function openPromise(
|
|||
return new Promise((resolve, reject) => {
|
||||
open(path, flags, mode, (err, fd) => {
|
||||
if (err) reject(err);
|
||||
else resolve(new FileHandle(fd));
|
||||
else resolve(new FileHandle(fd, path));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
import { EventEmitter } from "node:events";
|
||||
import { Buffer } from "node:buffer";
|
||||
import { promises, read, write } from "node:fs";
|
||||
import { Mode, promises, read, write } from "node:fs";
|
||||
export type { BigIntStats, Stats } from "ext:deno_node/_fs/_fs_stat.ts";
|
||||
import {
|
||||
BinaryOptionsArgument,
|
||||
|
@ -26,11 +26,15 @@ interface ReadResult {
|
|||
buffer: Buffer;
|
||||
}
|
||||
|
||||
type Path = string | Buffer | URL;
|
||||
export class FileHandle extends EventEmitter {
|
||||
#rid: number;
|
||||
constructor(rid: number) {
|
||||
#path: Path;
|
||||
|
||||
constructor(rid: number, path: Path) {
|
||||
super();
|
||||
this.#rid = rid;
|
||||
this.#path = path;
|
||||
}
|
||||
|
||||
get fd() {
|
||||
|
@ -144,17 +148,24 @@ export class FileHandle extends EventEmitter {
|
|||
stat(options?: { bigint: boolean }): Promise<Stats | BigIntStats> {
|
||||
return fsCall(promises.fstat, this, options);
|
||||
}
|
||||
chmod(mode: Mode): Promise<void> {
|
||||
assertNotClosed(this, promises.chmod.name);
|
||||
return promises.chmod(this.#path, mode);
|
||||
}
|
||||
}
|
||||
|
||||
function fsCall(fn, handle, ...args) {
|
||||
function assertNotClosed(handle: FileHandle, syscall: string) {
|
||||
if (handle.fd === -1) {
|
||||
const err = new Error("file closed");
|
||||
throw Object.assign(err, {
|
||||
code: "EBADF",
|
||||
syscall: fn.name,
|
||||
syscall,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function fsCall(fn, handle, ...args) {
|
||||
assertNotClosed(handle, fn.name);
|
||||
return fn(handle.fd, ...args);
|
||||
}
|
||||
|
||||
|
|
|
@ -199,3 +199,21 @@ Deno.test(
|
|||
assertEquals(data.length, 0);
|
||||
},
|
||||
);
|
||||
|
||||
Deno.test({
|
||||
name: "[node/fs filehandle.chmod] Change the permissions of the file",
|
||||
ignore: Deno.build.os === "windows",
|
||||
async fn() {
|
||||
const fileHandle = await fs.open(testData);
|
||||
|
||||
const readOnly = 0o444;
|
||||
await fileHandle.chmod(readOnly.toString(8));
|
||||
assertEquals(Deno.statSync(testData).mode! & 0o777, readOnly);
|
||||
|
||||
const readWrite = 0o666;
|
||||
await fileHandle.chmod(readWrite.toString(8));
|
||||
assertEquals(Deno.statSync(testData).mode! & 0o777, readWrite);
|
||||
|
||||
await fileHandle.close();
|
||||
},
|
||||
});
|
||||
|
|
Loading…
Add table
Reference in a new issue