1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-20 20:42:19 -05:00

Compare commits

...

22 commits

Author SHA1 Message Date
Bartek Iwańczuk
e98f1e26f0
Merge 461c5c0659 into 5e9b3712de 2025-01-21 00:04:48 +09:00
Luca Casonato
5e9b3712de
feat(unstable): add basic support for otel trace links (#27727)
Currently only links with no attributes.
2025-01-20 15:39:59 +01:00
Bartek Iwańczuk
395628026f
fix(ext/os): pass SignalState to web worker (#27741)
Closes https://github.com/denoland/deno/issues/27717

Made a mistake in https://github.com/denoland/deno/pull/27655 and
didn't add the `SignalStore` for web worker.
2025-01-20 19:43:15 +05:30
Divy Srivastava
4f27d7cdc0
fix(ext/node): GCM auth tag check on DechiperIv#final (#27733) 2025-01-20 18:16:44 +05:30
Bartek Iwańczuk
461c5c0659
don't allocate 2025-01-03 00:23:01 +01:00
Bartek Iwańczuk
0db1c7965a
lint 2025-01-02 11:23:57 +01:00
Bartek Iwańczuk
916d96103a
Merge branch 'main' into lint_skip_minified_files 2025-01-02 11:08:09 +01:00
Bartek Iwańczuk
8db506ffd4
fix 2024-11-30 03:47:04 +01:00
Bartek Iwańczuk
03ed40abad
tests 2024-11-30 03:07:28 +01:00
Bartek Iwańczuk
ae87b37215
Merge branch 'main' into lint_skip_minified_files 2024-11-30 02:19:30 +01:00
Bartek Iwańczuk
a95ea7e142
lint 2024-11-14 16:39:39 +01:00
Bartek Iwańczuk
61ea7e91a7
test 2024-11-14 14:44:17 +01:00
Bartek Iwańczuk
2a2da281df
wire up diagnostic 2024-11-14 14:41:00 +01:00
Bartek Iwańczuk
4d5f9028fd
add lint result 2024-11-14 12:56:45 +01:00
Bartek Iwańczuk
4b695e2db0
Merge branch 'main' into lint_skip_minified_files 2024-11-14 12:55:43 +01:00
Bartek Iwańczuk
dec220a239
Merge branch 'main' into lint_skip_minified_files 2024-11-07 00:47:30 +01:00
Bartek Iwańczuk
336cda567a
add tests 2024-11-07 00:47:21 +01:00
Bartek Iwańczuk
66a152a0ac
rework 2024-11-07 00:38:04 +01:00
Bartek Iwańczuk
9520620289
wip 2024-11-06 18:13:56 +01:00
Bartek Iwańczuk
207ec468c1
wip 2024-11-05 14:05:18 +01:00
Bartek Iwańczuk
643cae5a39
do fewer allocations 2024-11-05 02:52:33 +01:00
Bartek Iwańczuk
fbc9a65125
fix(lint): skip linting minified files 2024-10-30 00:56:58 +01:00
22 changed files with 516 additions and 70 deletions

View file

@ -45,6 +45,7 @@
"tests/specs/fmt",
"tests/specs/lint/bom",
"tests/specs/lint/default_ts",
"tests/specs/lint/minified",
"tests/specs/lint/syntax_error_reporting",
"tests/specs/publish/no_check_surfaces_syntax_error",
"tests/specs/run/default_ts",

View file

@ -17,12 +17,24 @@ use deno_lint::linter::Linter as DenoLintLinter;
use deno_lint::linter::LinterOptions;
use deno_path_util::fs::atomic_write_file_with_retries;
use super::minified_file;
use super::rules::FileOrPackageLintRule;
use super::rules::PackageLintRule;
use super::ConfiguredRules;
use crate::sys::CliSys;
use crate::util::fs::specifier_from_file_path;
pub enum LintResult {
/// File was linted and optionally produced diagnostics
Linted {
parsed_source: ParsedSource,
diagnostics: Vec<LintDiagnostic>,
},
/// File was not parsed and linted because, eg. it might have
/// been a minified file.
Skipped { reason: String },
}
pub struct CliLinterOptions {
pub configured_rules: ConfiguredRules,
pub fix: bool,
@ -95,8 +107,15 @@ impl CliLinter {
file_path: &Path,
source_code: String,
ext: Option<&str>,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
) -> Result<LintResult, AnyError> {
let specifier = specifier_from_file_path(file_path)?;
if minified_file::is_likely_minified(&source_code) {
return Ok(LintResult::Skipped {
reason: "The file is minified".to_string(),
});
}
let media_type = if let Some(ext) = ext {
MediaType::from_str(&format!("placeholder.{ext}"))
} else if file_path.extension().is_none() {
@ -108,7 +127,7 @@ impl CliLinter {
if self.fix {
self.lint_file_and_fix(&specifier, media_type, source_code, file_path)
} else {
self
let (parsed_source, diagnostics) = self
.linter
.lint_file(LintFileOptions {
specifier,
@ -116,7 +135,11 @@ impl CliLinter {
source_code,
config: self.deno_lint_config.clone(),
})
.map_err(AnyError::from)
.map_err(AnyError::from)?;
Ok(LintResult::Linted {
parsed_source,
diagnostics,
})
}
}
@ -126,7 +149,7 @@ impl CliLinter {
media_type: MediaType,
source_code: String,
file_path: &Path,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), deno_core::anyhow::Error> {
) -> Result<LintResult, deno_core::anyhow::Error> {
// initial lint
let (source, diagnostics) = self.linter.lint_file(LintFileOptions {
specifier: specifier.clone(),
@ -184,7 +207,10 @@ impl CliLinter {
.context("Failed writing fix to file.")?;
}
Ok((source, diagnostics))
Ok(LintResult::Linted {
parsed_source: source,
diagnostics,
})
}
}

View file

@ -0,0 +1,180 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[derive(Debug)]
pub struct FileMetrics {
long_lines_count: usize,
total_lines: usize,
whitespace_ratio: f64,
has_license_comment: bool,
}
impl FileMetrics {
#[inline]
pub fn is_likely_minified(&self) -> bool {
let long_lines_ratio =
self.long_lines_count as f64 / self.total_lines as f64;
(long_lines_ratio >= 0.2 || self.whitespace_ratio < 0.05)
&& !(self.has_license_comment && self.total_lines < 3)
}
}
/// Analyze the content and tell if the file is most likely a minified file or not.
pub fn is_likely_minified(content: &str) -> bool {
const LONG_LINE_LEN: usize = 250;
let mut total_lines = 0;
let mut long_lines_count = 0;
let mut whitespace_count = 0;
let mut total_chars = 0;
let mut has_license = false;
let mut in_multiline_comment = false;
// If total len of a file is shorter than the "long line" length, don't bother analyzing
// and consider non-minified.
if content.len() < LONG_LINE_LEN {
return false;
}
let mut str_ref = content;
// Process the content character by character to avoid line allocations
let mut chars = content.chars().enumerate().peekable();
while let Some((idx, c)) = chars.next() {
total_chars += 1;
if c.is_whitespace() {
whitespace_count += 1;
}
str_ref = &content[..idx];
// Check for end of line or end of content
if c == '\n' || chars.peek().is_none() {
total_lines += 1;
let trimmed = str_ref.trim();
// Check for license/copyright only if we haven't found one yet
if !has_license && !trimmed.is_empty() {
// Avoid allocating a new string for case comparison
has_license = trimmed.chars().any(|c| c.is_ascii_alphabetic())
&& (trimmed.contains("license")
|| trimmed.contains("LICENSE")
|| trimmed.contains("copyright")
|| trimmed.contains("COPYRIGHT")
|| trimmed.contains("(c)")
|| trimmed.contains("(C)"));
}
// Handle comments without allocating new strings
if trimmed.starts_with("/*") {
in_multiline_comment = true;
}
if trimmed.ends_with("*/") {
in_multiline_comment = false;
str_ref = "";
continue;
}
if in_multiline_comment || trimmed.starts_with("//") {
str_ref = "";
continue;
}
// Check line length
if str_ref.len() > LONG_LINE_LEN {
long_lines_count += 1;
}
str_ref = "";
}
}
// Handle case where file doesn't end with newline
if !str_ref.is_empty() {
total_lines += 1;
}
let whitespace_ratio = if total_chars > 0 {
whitespace_count as f64 / total_chars as f64
} else {
0.0
};
let metrics = FileMetrics {
long_lines_count,
total_lines,
whitespace_ratio,
has_license_comment: has_license,
};
metrics.is_likely_minified()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_normal_js() {
let content = r#"
function hello() {
// This is a normal comment
console.log("Hello, world!");
}
// Another comment
const x = 42;
/* Multi-line
comment */
"#;
assert!(!is_likely_minified(content));
}
#[test]
fn empty_file() {
assert!(!is_likely_minified(""));
}
#[test]
fn test_minified_file_col_length() {
let content =
"const LOREM_IPSUM = `Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.`";
assert!(is_likely_minified(content));
}
#[test]
fn test_minified_js() {
let content = "function hello(){console.log(\"Hello, world!\")}const x=42;function veryLongFunction(){return\"This is a very long line that exceeds 250 characters and contains lots of code and stuff and more code and even more stuff until we definitely exceed the limit we set for considering a line to be very long in our minification detection algorithm\"}";
assert!(is_likely_minified(content));
}
#[test]
fn test_minified_file_whitespace() {
let content =
"function f(a,b){return a.concat(b)}var x=function(n){return n+1};";
assert!(!is_likely_minified(content));
}
#[test]
fn test_license_only() {
let content = r#"/*
* Copyright (c) 2024 Example Corp.
* Licensed under MIT License
*/
"#;
assert!(!is_likely_minified(content));
}
#[test]
fn test_normal_file() {
let content = r#"
function concatenateArrays(array1, array2) {
return array1.concat(array2);
}
const incrementNumber = function(number) {
return number + 1;
};"#;
assert!(!is_likely_minified(content));
}
}

View file

@ -12,7 +12,6 @@ use std::rc::Rc;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_config::deno_json::LintRulesConfig;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
@ -26,7 +25,6 @@ use deno_core::serde_json;
use deno_core::unsync::future::LocalFutureExt;
use deno_core::unsync::future::SharedLocal;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintConfig as DenoLintConfig;
use log::debug;
use reporters::create_reporter;
@ -55,6 +53,7 @@ use crate::util::sync::AtomicFlag;
mod ast_buffer;
mod linter;
mod minified_file;
mod reporters;
mod rules;
@ -62,6 +61,7 @@ mod rules;
pub use ast_buffer::serialize_ast_to_buffer;
pub use linter::CliLinter;
pub use linter::CliLinterOptions;
use linter::LintResult;
pub use rules::collect_no_slow_type_diagnostics;
pub use rules::ConfiguredRules;
pub use rules::LintRuleProvider;
@ -316,7 +316,6 @@ impl WorkspaceLinter {
let fut = async move {
let operation = move |file_path: PathBuf| {
let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
// don't bother rechecking this file if it didn't have any diagnostics before
if let Some(incremental_cache) = &maybe_incremental_cache_ {
if incremental_cache.is_file_same(&file_path, &file_text) {
@ -329,14 +328,18 @@ impl WorkspaceLinter {
file_text,
cli_options.ext_flag().as_deref(),
);
if let Ok((file_source, file_diagnostics)) = &r {
if let Ok(LintResult::Linted {
parsed_source,
diagnostics,
}) = &r
{
if let Some(incremental_cache) = &maybe_incremental_cache_ {
if file_diagnostics.is_empty() {
if diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(
&file_path,
// ensure the returned text is used here as it may have been modified via --fix
file_source.text(),
parsed_source.text(),
)
}
}
@ -552,34 +555,43 @@ fn lint_stdin(
fn handle_lint_result(
file_path: &str,
result: Result<(ParsedSource, Vec<LintDiagnostic>), AnyError>,
result: Result<LintResult, AnyError>,
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
) -> bool {
let mut reporter = reporter_lock.lock();
match result {
Ok((source, mut file_diagnostics)) => {
if !source.diagnostics().is_empty() {
for parse_diagnostic in source.diagnostics() {
log::warn!("{}: {}", colors::yellow("warn"), parse_diagnostic);
}
}
file_diagnostics.sort_by(|a, b| match a.specifier.cmp(&b.specifier) {
std::cmp::Ordering::Equal => {
let a_start = a.range.as_ref().map(|r| r.range.start);
let b_start = b.range.as_ref().map(|r| r.range.start);
match a_start.cmp(&b_start) {
std::cmp::Ordering::Equal => a.details.code.cmp(&b.details.code),
other => other,
Ok(lint_result) => match lint_result {
LintResult::Linted {
parsed_source,
mut diagnostics,
} => {
if !parsed_source.diagnostics().is_empty() {
for parse_diagnostic in parsed_source.diagnostics() {
log::warn!("{}: {}", colors::yellow("warn"), parse_diagnostic);
}
}
file_order => file_order,
});
for d in &file_diagnostics {
reporter.visit_diagnostic(d);
diagnostics.sort_by(|a, b| match a.specifier.cmp(&b.specifier) {
std::cmp::Ordering::Equal => {
let a_start = a.range.as_ref().map(|r| r.range.start);
let b_start = b.range.as_ref().map(|r| r.range.start);
match a_start.cmp(&b_start) {
std::cmp::Ordering::Equal => a.details.code.cmp(&b.details.code),
other => other,
}
}
file_order => file_order,
});
for d in &diagnostics {
reporter.visit_diagnostic(d);
}
diagnostics.is_empty()
}
file_diagnostics.is_empty()
}
LintResult::Skipped { reason } => {
reporter.visit_skipped(file_path, &reason);
true
}
},
Err(err) => {
reporter.visit_error(file_path, &err);
false
@ -593,6 +605,12 @@ struct LintError {
message: String,
}
#[derive(Serialize)]
struct LintSkipped {
file_path: String,
message: String,
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;

View file

@ -9,6 +9,7 @@ use log::info;
use serde::Serialize;
use super::LintError;
use super::LintSkipped;
use crate::args::LintReporterKind;
const JSON_SCHEMA_VERSION: u8 = 1;
@ -23,6 +24,7 @@ pub fn create_reporter(kind: LintReporterKind) -> Box<dyn LintReporter + Send> {
pub trait LintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic);
fn visit_skipped(&mut self, file_path: &str, reason: &str);
fn visit_error(&mut self, file_path: &str, err: &AnyError);
fn close(&mut self, check_count: usize);
}
@ -51,6 +53,11 @@ impl LintReporter for PrettyLintReporter {
log::error!("{}\n", d.display());
}
fn visit_skipped(&mut self, file_path: &str, reason: &str) {
log::info!("File was skipped: {file_path}");
log::info!(" {reason}");
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
log::error!(" {err}");
@ -112,6 +119,11 @@ impl LintReporter for CompactLintReporter {
}
}
fn visit_skipped(&mut self, file_path: &str, reason: &str) {
log::info!("File was skipped: {file_path}");
log::info!(" {reason}");
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
log::error!(" {err}");
@ -173,6 +185,7 @@ struct JsonLintDiagnostic {
struct JsonLintReporter {
version: u8,
diagnostics: Vec<JsonLintDiagnostic>,
skipped: Vec<LintSkipped>,
errors: Vec<LintError>,
checked_files: Vec<String>,
}
@ -182,6 +195,7 @@ impl JsonLintReporter {
JsonLintReporter {
version: JSON_SCHEMA_VERSION,
diagnostics: Vec::new(),
skipped: Vec::new(),
errors: Vec::new(),
checked_files: Vec::new(),
}
@ -223,6 +237,13 @@ impl LintReporter for JsonLintReporter {
}
}
fn visit_skipped(&mut self, file_path: &str, reason: &str) {
self.skipped.push(LintSkipped {
file_path: file_path.to_string(),
message: reason.to_string(),
});
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
self.errors.push(LintError {
file_path: file_path.to_string(),

View file

@ -226,7 +226,6 @@ deno_core::extension!(deno_node,
ops::crypto::op_node_decipheriv_decrypt,
ops::crypto::op_node_decipheriv_final,
ops::crypto::op_node_decipheriv_set_aad,
ops::crypto::op_node_decipheriv_take,
ops::crypto::op_node_dh_compute_secret,
ops::crypto::op_node_diffie_hellman,
ops::crypto::op_node_ecdh_compute_public_key,

View file

@ -500,6 +500,11 @@ impl Decipher {
auth_tag: &[u8],
) -> Result<(), DecipherError> {
use Decipher::*;
if input.is_empty() && !matches!(self, Aes128Gcm(_) | Aes256Gcm(_)) {
return Ok(());
}
match (self, auto_pad) {
(Aes128Cbc(decryptor), true) => {
assert!(input.len() == 16);

View file

@ -332,17 +332,6 @@ pub fn op_node_decipheriv_decrypt(
true
}
#[op2(fast)]
pub fn op_node_decipheriv_take(
state: &mut OpState,
#[smi] rid: u32,
) -> Result<(), cipher::DecipherContextError> {
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
Rc::try_unwrap(context)
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
Ok(())
}
#[op2]
pub fn op_node_decipheriv_final(
state: &mut OpState,

View file

@ -18,7 +18,6 @@ import {
op_node_decipheriv_decrypt,
op_node_decipheriv_final,
op_node_decipheriv_set_aad,
op_node_decipheriv_take,
op_node_private_decrypt,
op_node_private_encrypt,
op_node_public_encrypt,
@ -352,14 +351,6 @@ export class Decipheriv extends Transform implements Cipher {
}
final(encoding: string = getDefaultEncoding()): Buffer | string {
if (!this.#needsBlockCache || this.#cache.cache.byteLength === 0) {
op_node_decipheriv_take(this.#context);
return encoding === "buffer" ? Buffer.from([]) : "";
}
if (this.#cache.cache.byteLength != 16) {
throw new Error("Invalid final block size");
}
let buf = new Buffer(16);
op_node_decipheriv_final(
this.#context,
@ -369,6 +360,13 @@ export class Decipheriv extends Transform implements Cipher {
this.#authTag || NO_TAG,
);
if (!this.#needsBlockCache || this.#cache.cache.byteLength === 0) {
return encoding === "buffer" ? Buffer.from([]) : "";
}
if (this.#cache.cache.byteLength != 16) {
throw new Error("Invalid final block size");
}
buf = buf.subarray(0, 16 - buf.at(-1)); // Padded in Pkcs7 mode
return encoding === "buffer" ? buf : buf.toString(encoding);
}

View file

@ -116,6 +116,12 @@ deno_core::extension!(
"op_exit" | "op_set_exit_code" | "op_get_exit_code" =>
op.with_implementation_from(&deno_core::op_void_sync()),
_ => op,
},
state = |state| {
#[cfg(unix)]
{
state.put(ops::signal::SignalState::default());
}
}
);

View file

@ -42,6 +42,7 @@ use opentelemetry::metrics::InstrumentBuilder;
use opentelemetry::metrics::MeterProvider as _;
use opentelemetry::otel_debug;
use opentelemetry::otel_error;
use opentelemetry::trace::Link;
use opentelemetry::trace::SpanContext;
use opentelemetry::trace::SpanId;
use opentelemetry::trace::SpanKind;
@ -94,6 +95,7 @@ deno_core::extension!(
op_otel_span_attribute1,
op_otel_span_attribute2,
op_otel_span_attribute3,
op_otel_span_add_link,
op_otel_span_update_name,
op_otel_metric_attribute3,
op_otel_metric_record0,
@ -1324,17 +1326,6 @@ impl OtelSpan {
}
}
#[fast]
fn drop_link(&self) {
let mut state = self.0.borrow_mut();
match &mut **state {
OtelSpanState::Recording(span) => {
span.links.dropped_count += 1;
}
OtelSpanState::Done(_) => {}
}
}
#[fast]
fn end(&self, end_time: f64) {
let end_time = if end_time.is_nan() {
@ -1448,6 +1439,48 @@ fn op_otel_span_update_name<'s>(
}
}
#[op2(fast)]
fn op_otel_span_add_link<'s>(
scope: &mut v8::HandleScope<'s>,
span: v8::Local<'s, v8::Value>,
trace_id: v8::Local<'s, v8::Value>,
span_id: v8::Local<'s, v8::Value>,
#[smi] trace_flags: u8,
is_remote: bool,
#[smi] dropped_attributes_count: u32,
) -> bool {
let trace_id = parse_trace_id(scope, trace_id);
if trace_id == TraceId::INVALID {
return false;
};
let span_id = parse_span_id(scope, span_id);
if span_id == SpanId::INVALID {
return false;
};
let span_context = SpanContext::new(
trace_id,
span_id,
TraceFlags::new(trace_flags),
is_remote,
TraceState::NONE,
);
let Some(span) =
deno_core::_ops::try_unwrap_cppgc_object::<OtelSpan>(scope, span)
else {
return true;
};
let mut state = span.0.borrow_mut();
if let OtelSpanState::Recording(span) = &mut **state {
span.links.links.push(Link::new(
span_context,
vec![],
dropped_attributes_count,
));
}
true
}
struct OtelMeter(opentelemetry::metrics::Meter);
impl deno_core::GarbageCollected for OtelMeter {}

View file

@ -15,6 +15,7 @@ import {
op_otel_metric_record2,
op_otel_metric_record3,
op_otel_metric_wait_to_observe,
op_otel_span_add_link,
op_otel_span_attribute1,
op_otel_span_attribute2,
op_otel_span_attribute3,
@ -186,7 +187,6 @@ interface OtelSpan {
spanContext(): SpanContext;
setStatus(status: SpanStatusCode, errorDescription: string): void;
dropEvent(): void;
dropLink(): void;
end(endTime: number): void;
}
@ -359,14 +359,24 @@ class Span {
return this;
}
addLink(_link: Link): Span {
this.#otelSpan?.dropLink();
addLink(link: Link): Span {
const droppedAttributeCount = (link.droppedAttributesCount ?? 0) +
(link.attributes ? ObjectKeys(link.attributes).length : 0);
const valid = op_otel_span_add_link(
this.#otelSpan,
link.context.traceId,
link.context.spanId,
link.context.traceFlags,
link.context.isRemote ?? false,
droppedAttributeCount,
);
if (!valid) return this;
return this;
}
addLinks(links: Link[]): Span {
for (let i = 0; i < links.length; i++) {
this.#otelSpan?.dropLink();
this.addLink(links[i]);
}
return this;
}

View file

@ -22,6 +22,10 @@
},
"args": "run -A main.ts metric.ts",
"output": "metric.out"
},
"links": {
"args": "run -A main.ts links.ts",
"output": "links.out"
}
}
}

View file

@ -0,0 +1,96 @@
{
"spans": [
{
"traceId": "00000000000000000000000000000001",
"spanId": "0000000000000001",
"traceState": "",
"parentSpanId": "",
"flags": 1,
"name": "example span",
"kind": 1,
"startTimeUnixNano": "[WILDCARD]",
"endTimeUnixNano": "[WILDCARD]",
"attributes": [],
"droppedAttributesCount": 0,
"events": [],
"droppedEventsCount": 0,
"links": [
{
"traceId": "1234567890abcdef1234567890abcdef",
"spanId": "1234567890abcdef",
"traceState": "",
"attributes": [],
"droppedAttributesCount": 0,
"flags": 1
}
],
"droppedLinksCount": 0,
"status": {
"message": "",
"code": 0
}
},
{
"traceId": "00000000000000000000000000000002",
"spanId": "0000000000000002",
"traceState": "",
"parentSpanId": "",
"flags": 1,
"name": "example span",
"kind": 1,
"startTimeUnixNano": "[WILDCARD]",
"endTimeUnixNano": "[WILDCARD]",
"attributes": [],
"droppedAttributesCount": 0,
"events": [],
"droppedEventsCount": 0,
"links": [
{
"traceId": "1234567890abcdef1234567890abcdef",
"spanId": "1234567890abcdef",
"traceState": "",
"attributes": [],
"droppedAttributesCount": 0,
"flags": 1
}
],
"droppedLinksCount": 0,
"status": {
"message": "",
"code": 0
}
},
{
"traceId": "00000000000000000000000000000003",
"spanId": "0000000000000003",
"traceState": "",
"parentSpanId": "",
"flags": 1,
"name": "example span",
"kind": 1,
"startTimeUnixNano": "[WILDCARD]",
"endTimeUnixNano": "[WILDCARD]",
"attributes": [],
"droppedAttributesCount": 0,
"events": [],
"droppedEventsCount": 0,
"links": [
{
"traceId": "1234567890abcdef1234567890abcdef",
"spanId": "1234567890abcdef",
"traceState": "",
"attributes": [],
"droppedAttributesCount": 2,
"flags": 1
}
],
"droppedLinksCount": 0,
"status": {
"message": "",
"code": 0
}
}
],
"logs": [],
"metrics": []
}

View file

@ -0,0 +1,40 @@
// Copyright 2018-2025 the Deno authors. MIT license.
import { trace } from "npm:@opentelemetry/api@1.9.0";
const tracer = trace.getTracer("example-tracer");
const span1 = tracer.startSpan("example span", {
links: [{
context: {
traceId: "1234567890abcdef1234567890abcdef",
spanId: "1234567890abcdef",
traceFlags: 1,
},
}],
});
span1.end();
const span2 = tracer.startSpan("example span");
span2.addLink({
context: {
traceId: "1234567890abcdef1234567890abcdef",
spanId: "1234567890abcdef",
traceFlags: 1,
},
});
span2.end();
const span3 = tracer.startSpan("example span");
span3.addLink({
context: {
traceId: "1234567890abcdef1234567890abcdef",
spanId: "1234567890abcdef",
traceFlags: 1,
},
attributes: {
key: "value",
},
droppedAttributesCount: 1,
});
span3.end();

View file

@ -56,6 +56,7 @@
"hint": [WILDCARD]
}
],
"skipped": [],
"errors": [
{
"file_path": "[WILDCARD]malformed.js",

View file

@ -0,0 +1,5 @@
{
"args": ["lint", "minified.js"],
"output": "minified.out",
"exitCode": 0
}

View file

@ -0,0 +1,5 @@
/*
* Copyright (c) 2024 Example Corp.
* Licensed under MIT License
*/
function f(a,b){return a.concat(b)}var x=function(n){return n+1};function g(a,b){return a.concat(b)}var x=function(n){return n+1};function h(a,b){return a.concat(b)}var x=function(n){return n+1};function i(a,b){return a.concat(b)}var x=function(n){return n+1};function j(a,b){return a.concat(b)}var x=function(n){return n+1};

View file

@ -0,0 +1,3 @@
File was skipped: [WILDCARD]minified.js
The file is minified
Checked 1 file

View file

@ -20,6 +20,7 @@
"hint": [WILDCARD]
}
],
"skipped": [],
"errors": [],
"checked_files": [
"[WILDCARD]main.ts"

View file

@ -38,6 +38,7 @@
"hint": "If this is intentional, prefix it with an underscore like `_add`"
}
],
"skipped": [],
"errors": [],
"checked_files": [
"[WILDCARD]a.ts"

View file

@ -4,7 +4,7 @@ import crypto from "node:crypto";
import { Buffer } from "node:buffer";
import testVectors128 from "./gcmEncryptExtIV128.json" with { type: "json" };
import testVectors256 from "./gcmEncryptExtIV256.json" with { type: "json" };
import { assertEquals } from "@std/assert";
import { assertEquals, assertThrows } from "@std/assert";
const aesGcm = (bits: string, key: Uint8Array) => {
const ALGO = bits == "128" ? `aes-128-gcm` : `aes-256-gcm`;
@ -123,7 +123,7 @@ Deno.test({
// Issue #27441
// https://github.com/denoland/deno/issues/27441
Deno.test({
name: "aes-256-gcm supports IV of non standard length",
name: "aes-256-gcm supports IV of non standard length and auth tag check",
fn() {
const decipher = crypto.createDecipheriv(
"aes-256-gcm",
@ -136,6 +136,10 @@ Deno.test({
"utf-8",
);
assertEquals(decrypted, "this is a secret");
decipher.final();
assertThrows(
() => decipher.final(),
TypeError,
"Failed to authenticate data",
);
},
});