1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge remote-tracking branch 'upstream/main' into check-workspace-member-compiler-options

This commit is contained in:
Nayeem Rahman 2024-12-10 01:24:22 +00:00
commit 61aa023d92
17 changed files with 144 additions and 78 deletions

20
Cargo.lock generated
View file

@ -668,6 +668,15 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bf2a5fb3207c12b5d208ebc145f967fea5cac41a021c37417ccc31ba40f39ee"
[[package]]
name = "capacity_builder"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2c0f637033edd76ceb881faaee372868a383f0ed7a4a59e8fdf90db2502f3d3"
dependencies = [
"itoa",
]
[[package]]
name = "caseless"
version = "0.2.1"
@ -2087,6 +2096,7 @@ dependencies = [
name = "deno_permissions"
version = "0.41.0"
dependencies = [
"capacity_builder",
"deno_core",
"deno_path_util",
"deno_terminal 0.2.0",
@ -2191,9 +2201,9 @@ dependencies = [
[[package]]
name = "deno_semver"
version = "0.6.0"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4756be7351289726087408984db18b9eb5e0186907673f39f858d119d0162071"
checksum = "7d1259270d66a5e6d29bb75c9289656541874f79ae9ff6c9f1c790846d5c07ba"
dependencies = [
"deno_error",
"monch",
@ -4374,9 +4384,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.11"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]]
name = "jni-sys"
@ -4568,7 +4578,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets 0.48.5",
"windows-targets 0.52.6",
]
[[package]]

View file

@ -59,7 +59,7 @@ deno_npm = "=0.26.0"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.41.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.190.0", path = "./runtime" }
deno_semver = "=0.6.0"
deno_semver = "=0.6.1"
deno_terminal = "0.2.0"
napi_sym = { version = "0.111.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
@ -109,6 +109,7 @@ boxed_error = "0.2.2"
brotli = "6.0.0"
bytes = "1.4.0"
cache_control = "=0.2.0"
capacity_builder = "0.1.0"
cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now()

View file

@ -743,13 +743,16 @@ fn get_node_completions(
}
let items = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.map(|name| {
.filter_map(|name| {
if name.starts_with('_') {
return None;
}
let specifier = format!("node:{}", name);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
lsp::CompletionItem {
Some(lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(node)".to_string()),
@ -758,7 +761,7 @@ fn get_node_completions(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
})
.collect();
Some(CompletionList {

View file

@ -266,17 +266,20 @@ impl VfsBuilder {
let dir = self.add_dir(path.parent().unwrap())?;
let name = path.file_name().unwrap().to_string_lossy();
let data_len = data.len();
let offset_and_len = OffsetWithLength {
offset,
len: data.len() as u64,
};
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
Ok(index) => {
let entry = &mut dir.entries[index];
match entry {
VfsEntry::File(virtual_file) => match sub_data_kind {
VfsFileSubDataKind::Raw => {
virtual_file.offset = offset;
virtual_file.offset = offset_and_len;
}
VfsFileSubDataKind::ModuleGraph => {
virtual_file.module_graph_offset = offset;
virtual_file.module_graph_offset = offset_and_len;
}
},
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
@ -287,9 +290,8 @@ impl VfsBuilder {
insert_index,
VfsEntry::File(VirtualFile {
name: name.to_string(),
offset,
module_graph_offset: offset,
len: data.len() as u64,
offset: offset_and_len,
module_graph_offset: offset_and_len,
}),
);
}
@ -298,7 +300,7 @@ impl VfsBuilder {
// new file, update the list of files
if self.current_offset == offset {
self.files.push(data);
self.current_offset += data_len as u64;
self.current_offset += offset_and_len.len;
}
Ok(())
@ -403,7 +405,7 @@ impl<'a> VfsEntryRef<'a> {
mtime: None,
ctime: None,
blksize: 0,
size: file.len,
size: file.offset.len,
dev: 0,
ino: 0,
mode: 0,
@ -472,27 +474,41 @@ impl VfsEntry {
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualDirectory {
#[serde(rename = "n")]
pub name: String,
// should be sorted by name
#[serde(rename = "e")]
pub entries: Vec<VfsEntry>,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct OffsetWithLength {
#[serde(rename = "o")]
pub offset: u64,
#[serde(rename = "l")]
pub len: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VirtualFile {
#[serde(rename = "n")]
pub name: String,
pub offset: u64,
#[serde(rename = "o")]
pub offset: OffsetWithLength,
/// Offset file to use for module loading when it differs from the
/// raw file. Often this will be the same offset as above for data
/// such as JavaScript files, but for TypeScript files the `offset`
/// will be the original raw bytes when included as an asset and this
/// offset will be to the transpiled JavaScript source.
pub module_graph_offset: u64,
pub len: u64,
#[serde(rename = "m")]
pub module_graph_offset: OffsetWithLength,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualSymlink {
#[serde(rename = "n")]
pub name: String,
#[serde(rename = "p")]
pub dest_parts: Vec<String>,
}
@ -636,7 +652,7 @@ impl FileBackedVfsFile {
Ok(pos)
}
SeekFrom::End(offset) => {
if offset < 0 && -offset as u64 > self.file.len {
if offset < 0 && -offset as u64 > self.file.offset.len {
let msg = "An attempt was made to move the file pointer before the beginning of the file.";
Err(
std::io::Error::new(std::io::ErrorKind::PermissionDenied, msg)
@ -645,9 +661,9 @@ impl FileBackedVfsFile {
} else {
let mut current_pos = self.pos.lock();
*current_pos = if offset >= 0 {
self.file.len - (offset as u64)
self.file.offset.len - (offset as u64)
} else {
self.file.len + (-offset as u64)
self.file.offset.len + (-offset as u64)
};
Ok(*current_pos)
}
@ -671,7 +687,7 @@ impl FileBackedVfsFile {
let mut pos = self.pos.lock();
let read_pos = *pos;
// advance the position due to the read
*pos = std::cmp::min(self.file.len, *pos + buf.len() as u64);
*pos = std::cmp::min(self.file.offset.len, *pos + buf.len() as u64);
read_pos
};
self
@ -685,13 +701,13 @@ impl FileBackedVfsFile {
let mut pos = self.pos.lock();
let read_pos = *pos;
// todo(dsherret): should this always set it to the end of the file?
if *pos < self.file.len {
if *pos < self.file.offset.len {
// advance the position due to the read
*pos = self.file.len;
*pos = self.file.offset.len;
}
read_pos
};
if read_pos > self.file.len {
if read_pos > self.file.offset.len {
return Ok(Cow::Borrowed(&[]));
}
if read_pos == 0 {
@ -701,7 +717,7 @@ impl FileBackedVfsFile {
.read_file_all(&self.file, VfsFileSubDataKind::Raw)?,
)
} else {
let size = (self.file.len - read_pos) as usize;
let size = (self.file.offset.len - read_pos) as usize;
let mut buf = vec![0; size];
self.vfs.read_file(&self.file, read_pos, &mut buf)?;
Ok(Cow::Owned(buf))
@ -921,7 +937,11 @@ impl FileBackedVfs {
file: &VirtualFile,
sub_data_kind: VfsFileSubDataKind,
) -> std::io::Result<Cow<'static, [u8]>> {
let read_range = self.get_read_range(file, sub_data_kind, 0, file.len)?;
let read_len = match sub_data_kind {
VfsFileSubDataKind::Raw => file.offset.len,
VfsFileSubDataKind::ModuleGraph => file.module_graph_offset.len,
};
let read_range = self.get_read_range(file, sub_data_kind, 0, read_len)?;
match &self.vfs_data {
Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])),
Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())),
@ -952,19 +972,20 @@ impl FileBackedVfs {
pos: u64,
len: u64,
) -> std::io::Result<Range<usize>> {
if pos > file.len {
let file_offset_and_len = match sub_data_kind {
VfsFileSubDataKind::Raw => file.offset,
VfsFileSubDataKind::ModuleGraph => file.module_graph_offset,
};
if pos > file_offset_and_len.len {
return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof,
"unexpected EOF",
));
}
let offset = match sub_data_kind {
VfsFileSubDataKind::Raw => file.offset,
VfsFileSubDataKind::ModuleGraph => file.module_graph_offset,
};
let file_offset = self.fs_root.start_file_offset + offset;
let file_offset =
self.fs_root.start_file_offset + file_offset_and_len.offset;
let start = file_offset + pos;
let end = file_offset + std::cmp::min(pos + len, file.len);
let end = file_offset + std::cmp::min(pos + len, file_offset_and_len.len);
Ok(start as usize..end as usize)
}

View file

@ -432,9 +432,8 @@ pub async fn add(
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() {
let req = AddRmPackageReq::parse(entry_text).with_context(|| {
format!("Failed to parse package required: {}", entry_text)
})?;
let req = AddRmPackageReq::parse(entry_text)
.with_context(|| format!("Failed to parse package: {}", entry_text))?;
match req {
Ok(add_req) => package_reqs.push(add_req),
@ -805,9 +804,8 @@ pub async fn remove(
let mut removed_packages = vec![];
for package in &remove_flags.packages {
let req = AddRmPackageReq::parse(package).with_context(|| {
format!("Failed to parse package required: {}", package)
})?;
let req = AddRmPackageReq::parse(package)
.with_context(|| format!("Failed to parse package: {}", package))?;
let mut parsed_pkg_name = None;
for config in configs.iter_mut().flatten() {
match &req {

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::fmt::Write;
use std::path::Path;
use std::path::PathBuf;
@ -58,8 +59,8 @@ pub fn get_atomic_file_path(file_path: &Path) -> PathBuf {
}
fn gen_rand_path_component() -> String {
(0..4).fold(String::new(), |mut output, _| {
output.push_str(&format!("{:02x}", rand::random::<u8>()));
(0..4).fold(String::with_capacity(8), |mut output, _| {
write!(&mut output, "{:02x}", rand::random::<u8>()).unwrap();
output
})
}

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::fmt::Write;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::time::Duration;
@ -81,12 +82,14 @@ impl ProgressBarRenderer for BarProgressBarRenderer {
let elapsed_text = get_elapsed_text(data.duration);
let mut text = String::new();
if !display_entry.message.is_empty() {
text.push_str(&format!(
"{} {}{}\n",
writeln!(
&mut text,
"{} {}{}",
colors::green("Download"),
display_entry.message,
bytes_text,
));
)
.unwrap();
}
text.push_str(&elapsed_text);
let max_width = (data.terminal_width as i32 - 5).clamp(10, 75) as usize;

View file

@ -25,6 +25,17 @@ macro_rules! generate_builtin_node_module_lists {
// NOTE(bartlomieju): keep this list in sync with `ext/node/polyfills/01_require.js`
generate_builtin_node_module_lists! {
"_http_agent",
"_http_common",
"_http_outgoing",
"_http_server",
"_stream_duplex",
"_stream_passthrough",
"_stream_readable",
"_stream_transform",
"_stream_writable",
"_tls_common",
"_tls_wrap",
"assert",
"assert/strict",
"async_hooks",

View file

@ -7,6 +7,7 @@ const {
SafeRegExp,
Symbol,
} = primordials;
import { HTTPParser } from "ext:deno_node/internal_binding/http_parser.ts";
export const CRLF = "\r\n";
export const kIncomingMessage = Symbol("IncomingMessage");
@ -79,6 +80,8 @@ export {
checkIsHttpToken as _checkIsHttpToken,
};
export { HTTPParser };
export default {
_checkInvalidHeaderChar: checkInvalidHeaderChar,
_checkIsHttpToken: checkIsHttpToken,
@ -87,4 +90,5 @@ export default {
continueExpression,
kIncomingMessage,
methods,
HTTPParser,
};

View file

@ -732,9 +732,9 @@ fn op_otel_instrumentation_scope_enter(
#[op2(fast)]
fn op_otel_instrumentation_scope_enter_builtin(state: &mut OpState) {
state.put(InstrumentationScope(
BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap().clone(),
));
if let Some(scope) = BUILT_IN_INSTRUMENTATION_SCOPE.get() {
state.put(InstrumentationScope(scope.clone()));
}
}
#[op2(fast)]
@ -749,6 +749,9 @@ fn op_otel_log(
let Some(Processors { logs, .. }) = OTEL_PROCESSORS.get() else {
return;
};
let Some(instrumentation_scope) = BUILT_IN_INSTRUMENTATION_SCOPE.get() else {
return;
};
// Convert the integer log level that ext/console uses to the corresponding
// OpenTelemetry log severity.
@ -776,10 +779,7 @@ fn op_otel_log(
);
}
logs.emit(
&mut log_record,
BUILT_IN_INSTRUMENTATION_SCOPE.get().unwrap(),
);
logs.emit(&mut log_record, instrumentation_scope);
}
fn owned_string<'s>(

View file

@ -220,6 +220,7 @@ function submitSpan(
startTime: number,
endTime: number,
) {
if (!TRACING_ENABLED) return;
if (!(traceFlags & TRACE_FLAG_SAMPLED)) return;
// TODO(@lucacasonato): `resource` is ignored for now, should we implement it?

View file

@ -219,8 +219,9 @@ fn get_atomic_dir_path(file_path: &Path) -> PathBuf {
}
fn gen_rand_path_component() -> String {
(0..4).fold(String::new(), |mut output, _| {
output.push_str(&format!("{:02x}", rand::random::<u8>()));
use std::fmt::Write;
(0..4).fold(String::with_capacity(8), |mut output, _| {
write!(&mut output, "{:02x}", rand::random::<u8>()).unwrap();
output
})
}

View file

@ -14,6 +14,7 @@ name = "deno_permissions"
path = "lib.rs"
[dependencies]
capacity_builder.workspace = true
deno_core.workspace = true
deno_path_util.workspace = true
deno_terminal.workspace = true

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use capacity_builder::StringBuilder;
use deno_core::parking_lot::Mutex;
use deno_core::serde::de;
use deno_core::serde::Deserialize;
@ -179,13 +180,18 @@ impl PermissionState {
(Ok(()), false, false)
}
PermissionState::Prompt if prompt => {
let msg = format!(
"{} access{}",
name,
info()
.map(|info| { format!(" to {info}") })
.unwrap_or_default(),
);
let msg = {
let info = info();
StringBuilder::build(|builder| {
builder.append(name);
builder.append(" access");
if let Some(info) = &info {
builder.append(" to ");
builder.append(info);
}
})
.unwrap()
};
match permission_prompt(&msg, name, api_name, true) {
PromptResponse::Allow => {
Self::log_perm_access(name, info);
@ -344,11 +350,11 @@ pub trait QueryDescriptor: Debug {
fn overlaps_deny(&self, other: &Self::DenyDesc) -> bool;
}
fn format_display_name(display_name: Cow<str>) -> String {
fn format_display_name(display_name: Cow<str>) -> Cow<str> {
if display_name.starts_with('<') && display_name.ends_with('>') {
display_name.into_owned()
display_name
} else {
format!("\"{}\"", display_name)
Cow::Owned(format!("\"{}\"", display_name))
}
}
@ -424,7 +430,7 @@ impl<TQuery: QueryDescriptor> UnaryPermission<TQuery> {
.check2(
TQuery::flag_name(),
api_name,
|| desc.map(|d| format_display_name(d.display_name())),
|| desc.map(|d| format_display_name(d.display_name()).into_owned()),
self.prompt,
);
if prompted {
@ -487,12 +493,17 @@ impl<TQuery: QueryDescriptor> UnaryPermission<TQuery> {
if !self.prompt {
return PermissionState::Denied;
}
let mut message = String::with_capacity(40);
message.push_str(&format!("{} access", TQuery::flag_name()));
if let Some(desc) = desc {
message
.push_str(&format!(" to {}", format_display_name(desc.display_name())));
}
let maybe_formatted_display_name =
desc.map(|d| format_display_name(d.display_name()));
let message = StringBuilder::build(|builder| {
builder.append(TQuery::flag_name());
builder.append(" access");
if let Some(display_name) = &maybe_formatted_display_name {
builder.append(" to ");
builder.append(display_name)
}
})
.unwrap();
match permission_prompt(
&message,
TQuery::flag_name(),

View file

@ -1,4 +1,4 @@
error: Failed to parse package required: jsr:@std/testing/bdd@1
error: Failed to parse package: jsr:@std/testing/bdd@1
Caused by:
Invalid package specifier 'jsr:@std/testing/bdd@1'. Did you mean to write 'jsr:@std/testing@1/bdd'?
Invalid package specifier 'jsr:@std/testing/bdd@1'. Did you mean to write 'jsr:@std/testing@1/bdd'? If not, add a version requirement to the specifier.

View file

@ -1,4 +1,4 @@
error: Failed to parse package required: npm:preact/hooks@10
error: Failed to parse package: npm:preact/hooks@10
Caused by:
Invalid package specifier 'npm:preact/hooks@10'. Did you mean to write 'npm:preact@10/hooks'?
Invalid package specifier 'npm:preact/hooks@10'. Did you mean to write 'npm:preact@10/hooks'? If not, add a version requirement to the specifier.

View file

@ -1,2 +1,2 @@
error: Invalid package specifier 'npm:react-dom/server@18.2.0'. Did you mean to write 'npm:react-dom@18.2.0/server'?
error: Invalid package specifier 'npm:react-dom/server@18.2.0'. Did you mean to write 'npm:react-dom@18.2.0/server'? If not, add a version requirement to the specifier.
at [WILDCARD]/error_version_after_subpath/main.js:1:8