0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-12 08:49:19 -05:00

feat(lint): add JavaScript plugin support (#27203)

This commit adds an unstable lint plugin API.

Plugins are specified in the `deno.json` file under
`lint.plugins` option like so:

```
{
  "lint": {
    "plugins": [
      "./plugins/my-plugin.ts",
      "jsr:@deno/lint-plugin1",
      "npm:@deno/lint-plugin2"
    ]
  }
}
```

The API is considered unstable and might be subject
to changes in the future.

Plugin API was modelled after ESLint API for the 
most part, but there are no guarantees for compatibility.
The AST format exposed to plugins is closely modelled
after the AST that `typescript-eslint` uses.

Lint plugins use the visitor pattern and can add
diagnostics like so:

```
export default {
  name: "lint-plugin",
  rules: {
    "plugin-rule": {
      create(context) {
        return {
          Identifier(node) {
            if (node.name === "a") {
              context.report({
                node,
                message: "should be b",
                fix(fixer) {
                  return fixer.replaceText(node, "_b");
                },
              });
            }
          },
        };
      },
    },
  },
} satisfies Deno.lint.Plugin;
```

Besides reporting errors (diagnostics) plugins can provide
automatic fixes that use text replacement to apply changes.

---------

Co-authored-by: Marvin Hagemeister <marvin@deno.com>
Co-authored-by: David Sherret <dsherret@gmail.com>
This commit is contained in:
Bartek Iwańczuk 2025-02-05 16:59:24 +01:00 committed by GitHub
parent 8a07d38a53
commit f08ca6414b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
53 changed files with 4219 additions and 2494 deletions

8
Cargo.lock generated
View file

@ -2010,9 +2010,9 @@ dependencies = [
[[package]]
name = "deno_lint"
version = "0.70.0"
version = "0.71.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac94db8d8597b96c92d30a68b11d4bec6822dcbb3e8675ab1e0136816a301a34"
checksum = "810d0f4b19cd44061bbe7252ad37cf7a81753540f97f88e1548ac9f03b3a18cc"
dependencies = [
"anyhow",
"deno_ast",
@ -4187,9 +4187,9 @@ dependencies = [
[[package]]
name = "hstr"
version = "0.2.9"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a9de2bdef6354361892492bab5e316b2d78a0ee9971db4d36da9b1eb0e11999"
checksum = "dae404c0c5d4e95d4858876ab02eecd6a196bb8caa42050dfa809938833fc412"
dependencies = [
"hashbrown 0.14.5",
"new_debug_unreachable",

View file

@ -74,7 +74,7 @@ deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] }
deno_error.workspace = true
deno_graph = { version = "=0.87.2" }
deno_lib.workspace = true
deno_lint = { version = "0.70.0" }
deno_lint = { version = "0.71.0" }
deno_lockfile.workspace = true
deno_media_type = { workspace = true, features = ["data_url", "decoding", "module_specifier"] }
deno_npm.workspace = true

View file

@ -499,6 +499,7 @@ impl DenoSubcommand {
| Self::Jupyter(_)
| Self::Repl(_)
| Self::Bench(_)
| Self::Lint(_)
| Self::Lsp
)
}

View file

@ -366,6 +366,7 @@ pub struct LintOptions {
pub rules: LintRulesConfig,
pub files: FilePatterns,
pub fix: bool,
pub plugins: Vec<Url>,
}
impl Default for LintOptions {
@ -380,20 +381,41 @@ impl LintOptions {
rules: Default::default(),
files: FilePatterns::new_with_base(base),
fix: false,
plugins: vec![],
}
}
pub fn resolve(lint_config: LintConfig, lint_flags: &LintFlags) -> Self {
Self {
pub fn resolve(
dir_path: PathBuf,
lint_config: LintConfig,
lint_flags: &LintFlags,
) -> Result<Self, AnyError> {
let rules = resolve_lint_rules_options(
lint_config.options.rules,
lint_flags.maybe_rules_tags.clone(),
lint_flags.maybe_rules_include.clone(),
lint_flags.maybe_rules_exclude.clone(),
);
let plugins = {
let plugin_specifiers = lint_config.options.plugins;
let mut plugins = Vec::with_capacity(plugin_specifiers.len());
for plugin in &plugin_specifiers {
// TODO(bartlomieju): handle import-mapped specifiers
let url = resolve_url_or_path(plugin, &dir_path)?;
plugins.push(url);
}
// ensure stability for hasher
plugins.sort_unstable();
plugins
};
Ok(Self {
files: lint_config.files,
rules: resolve_lint_rules_options(
lint_config.options.rules,
lint_flags.maybe_rules_tags.clone(),
lint_flags.maybe_rules_include.clone(),
lint_flags.maybe_rules_exclude.clone(),
),
rules,
fix: lint_flags.fix,
}
plugins,
})
}
}
@ -759,7 +781,7 @@ impl CliOptions {
.resolve_lint_config_for_members(&cli_arg_patterns)?;
let mut result = Vec::with_capacity(member_configs.len());
for (ctx, config) in member_configs {
let options = LintOptions::resolve(config, lint_flags);
let options = LintOptions::resolve(ctx.dir_path(), config, lint_flags)?;
result.push((ctx, options));
}
Ok(result)

View file

@ -10,9 +10,14 @@ import {
import { core, internals } from "ext:core/mod.js";
const {
op_lint_get_source,
op_lint_report,
op_lint_create_serialized_ast,
op_is_cancelled,
} = core.ops;
let doReport = op_lint_report;
// Keep these in sync with Rust
const AST_IDX_INVALID = 0;
const AST_GROUP_TYPE = 1;
@ -72,29 +77,133 @@ const PropFlags = {
/** @typedef {import("./40_lint_types.d.ts").VisitorFn} VisitorFn */
/** @typedef {import("./40_lint_types.d.ts").CompiledVisitor} CompiledVisitor */
/** @typedef {import("./40_lint_types.d.ts").LintState} LintState */
/** @typedef {import("./40_lint_types.d.ts").RuleContext} RuleContext */
/** @typedef {import("./40_lint_types.d.ts").NodeFacade} NodeFacade */
/** @typedef {import("./40_lint_types.d.ts").LintPlugin} LintPlugin */
/** @typedef {import("./40_lint_types.d.ts").TransformFn} TransformFn */
/** @typedef {import("./40_lint_types.d.ts").MatchContext} MatchContext */
/** @typedef {import("./40_lint_types.d.ts").Node} Node */
/** @type {LintState} */
const state = {
plugins: [],
installedPlugins: new Set(),
ignoredRules: new Set(),
};
function resetState() {
state.plugins = [];
state.installedPlugins.clear();
state.ignoredRules.clear();
}
/**
* This implementation calls into Rust to check if Tokio's cancellation token
* has already been canceled.
*/
class CancellationToken {
isCancellationRequested() {
return op_is_cancelled();
}
}
/** @implements {Deno.lint.Fixer} */
class Fixer {
/**
* @param {Deno.lint.Node} node
* @param {string} text
*/
insertTextAfter(node, text) {
return {
range: /** @type {[number, number]} */ ([node.range[1], node.range[1]]),
text,
};
}
/**
* @param {Deno.lint.Node["range"]} range
* @param {string} text
*/
insertTextAfterRange(range, text) {
return {
range: /** @type {[number, number]} */ ([range[1], range[1]]),
text,
};
}
/**
* @param {Deno.lint.Node} node
* @param {string} text
*/
insertTextBefore(node, text) {
return {
range: /** @type {[number, number]} */ ([node.range[0], node.range[0]]),
text,
};
}
/**
* @param {Deno.lint.Node["range"]} range
* @param {string} text
*/
insertTextBeforeRange(range, text) {
return {
range: /** @type {[number, number]} */ ([range[0], range[0]]),
text,
};
}
/**
* @param {Deno.lint.Node} node
*/
remove(node) {
return {
range: node.range,
text: "",
};
}
/**
* @param {Deno.lint.Node["range"]} range
*/
removeRange(range) {
return {
range,
text: "",
};
}
/**
* @param {Deno.lint.Node} node
* @param {string} text
*/
replaceText(node, text) {
return {
range: node.range,
text,
};
}
/**
* @param {Deno.lint.Node["range"]} range
* @param {string} text
*/
replaceTextRange(range, text) {
return {
range,
text,
};
}
}
/**
* Every rule gets their own instance of this class. This is the main
* API lint rules interact with.
* @implements {RuleContext}
* @implements {Deno.lint.RuleContext}
*/
export class Context {
id;
fileName;
#source = null;
/**
* @param {string} id
* @param {string} fileName
@ -103,18 +212,85 @@ export class Context {
this.id = id;
this.fileName = fileName;
}
source() {
if (this.#source === null) {
this.#source = op_lint_get_source();
}
return /** @type {*} */ (this.#source);
}
/**
* @param {Deno.lint.ReportData} data
*/
report(data) {
const range = data.node ? data.node.range : data.range ? data.range : null;
if (range == null) {
throw new Error(
"Either `node` or `range` must be provided when reporting an error",
);
}
const start = range[0];
const end = range[1];
let fix;
if (typeof data.fix === "function") {
const fixer = new Fixer();
fix = data.fix(fixer);
}
doReport(
this.id,
data.message,
data.hint,
start,
end,
fix,
);
}
}
/**
* @param {LintPlugin} plugin
* @param {Deno.lint.Plugin[]} plugins
* @param {string[]} exclude
*/
export function installPlugin(plugin) {
export function installPlugins(plugins, exclude) {
if (Array.isArray(exclude)) {
for (let i = 0; i < exclude.length; i++) {
state.ignoredRules.add(exclude[i]);
}
}
return plugins.map((plugin) => installPlugin(plugin));
}
/**
* @param {Deno.lint.Plugin} plugin
*/
function installPlugin(plugin) {
if (typeof plugin !== "object") {
throw new Error("Linter plugin must be an object");
}
if (typeof plugin.name !== "string") {
throw new Error("Linter plugin name must be a string");
}
if (!/^[a-z-]+$/.test(plugin.name)) {
throw new Error(
"Linter plugin name must only contain lowercase letters (a-z) or hyphens (-).",
);
}
if (plugin.name.startsWith("-") || plugin.name.endsWith("-")) {
throw new Error(
"Linter plugin name must start and end with a lowercase letter.",
);
}
if (plugin.name.includes("--")) {
throw new Error(
"Linter plugin name must not have consequtive hyphens.",
);
}
if (typeof plugin.rules !== "object") {
throw new Error("Linter plugin rules must be an object");
}
@ -123,6 +299,11 @@ export function installPlugin(plugin) {
}
state.plugins.push(plugin);
state.installedPlugins.add(plugin.name);
return {
name: plugin.name,
ruleNames: Object.keys(plugin.rules),
};
}
/**
@ -285,7 +466,7 @@ function readType(buf, idx) {
/**
* @param {AstContext} ctx
* @param {number} idx
* @returns {Node["range"]}
* @returns {Deno.lint.Node["range"]}
*/
function readSpan(ctx, idx) {
let offset = ctx.spansOffset + (idx * SPAN_SIZE);
@ -765,6 +946,12 @@ export function runPluginsForFile(fileName, serializedAst) {
for (const name of Object.keys(plugin.rules)) {
const rule = plugin.rules[name];
const id = `${plugin.name}/${name}`;
// Check if this rule is excluded
if (state.ignoredRules.has(id)) {
continue;
}
const ctx = new Context(id, fileName);
const visitor = rule.create(ctx);
@ -852,10 +1039,11 @@ export function runPluginsForFile(fileName, serializedAst) {
visitors.push({ info, matcher });
}
const token = new CancellationToken();
// Traverse ast with all visitors at the same time to avoid traversing
// multiple times.
try {
traverse(ctx, visitors, ctx.rootOffset);
traverse(ctx, visitors, ctx.rootOffset, token);
} finally {
ctx.nodes.clear();
@ -870,9 +1058,11 @@ export function runPluginsForFile(fileName, serializedAst) {
* @param {AstContext} ctx
* @param {CompiledVisitor[]} visitors
* @param {number} idx
* @param {CancellationToken} cancellationToken
*/
function traverse(ctx, visitors, idx) {
function traverse(ctx, visitors, idx, cancellationToken) {
if (idx === AST_IDX_INVALID) return;
if (cancellationToken.isCancellationRequested()) return;
const { buf } = ctx;
const nodeType = readType(ctx.buf, idx);
@ -905,12 +1095,12 @@ function traverse(ctx, visitors, idx) {
try {
const childIdx = readChild(buf, idx);
if (childIdx > AST_IDX_INVALID) {
traverse(ctx, visitors, childIdx);
traverse(ctx, visitors, childIdx, cancellationToken);
}
const nextIdx = readNext(buf, idx);
if (nextIdx > AST_IDX_INVALID) {
traverse(ctx, visitors, nextIdx);
traverse(ctx, visitors, nextIdx, cancellationToken);
}
} finally {
if (exits !== null) {
@ -1064,8 +1254,12 @@ function _dump(ctx) {
}
}
// TODO(bartlomieju): this is temporary, until we get plugins plumbed through
// the CLI linter
// These are captured by Rust and called when plugins need to be loaded
// or run.
internals.installPlugins = installPlugins;
internals.runPluginsForFile = runPluginsForFile;
internals.resetState = resetState;
/**
* @param {LintPlugin} plugin
* @param {string} fileName
@ -1074,16 +1268,25 @@ function _dump(ctx) {
function runLintPlugin(plugin, fileName, sourceText) {
installPlugin(plugin);
const diagnostics = [];
doReport = (id, message, hint, start, end, fix) => {
diagnostics.push({
id,
message,
hint,
range: [start, end],
fix,
});
};
try {
const serializedAst = op_lint_create_serialized_ast(fileName, sourceText);
runPluginsForFile(fileName, serializedAst);
} finally {
// During testing we don't want to keep plugins around
state.installedPlugins.clear();
resetState();
}
doReport = op_lint_report;
return diagnostics;
}
// TODO(bartlomieju): this is temporary, until we get plugins plumbed through
// the CLI linter
internals.runLintPlugin = runLintPlugin;
Deno.lint.runPlugin = runLintPlugin;

View file

@ -1,17 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license.
export interface NodeFacade {
type: string;
range: [number, number];
[key: string]: unknown;
}
export interface AstContext {
buf: Uint8Array;
strTable: Map<number, string>;
strTableOffset: number;
rootOffset: number;
nodes: Map<number, NodeFacade>;
nodes: Map<number, Deno.lint.Node>;
spansOffset: number;
propsOffset: number;
strByType: number[];
@ -21,32 +15,11 @@ export interface AstContext {
matcher: MatchContext;
}
export interface Node {
range: Range;
}
export type Range = [number, number];
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface RuleContext {
id: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintRule {
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
destroy?(ctx: RuleContext): void;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintPlugin {
name: string;
rules: Record<string, LintRule>;
}
export interface LintState {
plugins: LintPlugin[];
plugins: Deno.lint.Plugin[];
installedPlugins: Set<string>;
/** format: `<plugin>/<rule>` */
ignoredRules: Set<string>;
}
export type VisitorFn = (node: unknown) => void;

View file

@ -38,6 +38,7 @@ use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Regex;
use text_lines::LineAndColumnIndex;
use tokio_util::sync::CancellationToken;
use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Range;
@ -186,8 +187,9 @@ fn as_lsp_range(
pub fn get_lint_references(
parsed_source: &deno_ast::ParsedSource,
linter: &CliLinter,
token: CancellationToken,
) -> Result<Vec<Reference>, AnyError> {
let lint_diagnostics = linter.lint_with_ast(parsed_source);
let lint_diagnostics = linter.lint_with_ast(parsed_source, token)?;
Ok(
lint_diagnostics

View file

@ -1629,14 +1629,46 @@ impl ConfigData {
sloppy_imports_resolver.clone(),
Some(resolver.clone()),
);
let lint_options = LintOptions::resolve(
member_dir.dir_path(),
(*lint_config).clone(),
&LintFlags::default(),
)
.inspect_err(|err| lsp_warn!(" Failed to resolve linter options: {}", err))
.ok()
.unwrap_or_default();
let mut plugin_runner = None;
if !lint_options.plugins.is_empty() {
fn logger_printer(msg: &str, _is_err: bool) {
lsp_log!("pluggin runner - {}", msg);
}
let logger = crate::tools::lint::PluginLogger::new(logger_printer);
let plugin_load_result =
crate::tools::lint::create_runner_and_load_plugins(
lint_options.plugins.clone(),
logger,
lint_options.rules.exclude.clone(),
)
.await;
match plugin_load_result {
Ok(runner) => {
plugin_runner = Some(Arc::new(runner));
}
Err(err) => {
lsp_warn!("Failed to load lint plugins: {}", err);
}
}
}
let linter = Arc::new(CliLinter::new(CliLinterOptions {
configured_rules: lint_rule_provider.resolve_lint_rules(
LintOptions::resolve((*lint_config).clone(), &LintFlags::default())
.rules,
lint_options.rules,
member_dir.maybe_deno_json().map(|c| c.as_ref()),
),
fix: false,
deno_lint_config,
maybe_plugin_runner: plugin_runner,
}));
ConfigData {

View file

@ -1021,6 +1021,8 @@ fn generate_lint_diagnostics(
default_jsx_factory: None,
default_jsx_fragment_factory: None,
},
// TODO(bartlomieju): handle linter plugins here before landing
maybe_plugin_runner: None,
})),
)
});
@ -1032,6 +1034,7 @@ fn generate_lint_diagnostics(
&document,
&lint_config,
&linter,
token.clone(),
),
},
});
@ -1043,6 +1046,7 @@ fn generate_document_lint_diagnostics(
document: &Document,
lint_config: &LintConfig,
linter: &CliLinter,
token: CancellationToken,
) -> Vec<lsp::Diagnostic> {
if !lint_config.files.matches_specifier(document.specifier()) {
return Vec::new();
@ -1050,7 +1054,7 @@ fn generate_document_lint_diagnostics(
match document.maybe_parsed_source() {
Some(Ok(parsed_source)) => {
if let Ok(references) =
analysis::get_lint_references(parsed_source, linter)
analysis::get_lint_references(parsed_source, linter, token)
{
references
.into_iter()

View file

@ -3,11 +3,187 @@
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnostic;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_ast::SourceTextProvider;
use deno_core::op2;
use deno_core::OpState;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::diagnostic::LintDiagnosticDetails;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_lint::diagnostic::LintFix;
use deno_lint::diagnostic::LintFixChange;
use tokio_util::sync::CancellationToken;
use crate::tools::lint;
use crate::tools::lint::PluginLogger;
use crate::util::text_encoding::Utf16Map;
deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],);
deno_core::extension!(
deno_lint_ext,
ops = [
op_lint_create_serialized_ast,
op_lint_report,
op_lint_get_source,
op_is_cancelled
],
options = {
logger: PluginLogger,
},
// TODO(bartlomieju): this should only be done,
// if not in the "test worker".
middleware = |op| match op.name {
"op_print" => op_print(),
_ => op,
},
state = |state, options| {
state.put(options.logger);
state.put(LintPluginContainer::default());
},
);
deno_core::extension!(
deno_lint_ext_for_test,
ops = [op_lint_create_serialized_ast, op_is_cancelled],
state = |state| {
state.put(LintPluginContainer::default());
},
);
#[derive(Default)]
pub struct LintPluginContainer {
pub diagnostics: Vec<LintDiagnostic>,
pub source_text_info: Option<SourceTextInfo>,
pub utf_16_map: Option<Utf16Map>,
pub specifier: Option<ModuleSpecifier>,
pub token: CancellationToken,
}
impl LintPluginContainer {
pub fn set_cancellation_token(
&mut self,
maybe_token: Option<CancellationToken>,
) {
let token = maybe_token.unwrap_or_default();
self.token = token;
}
pub fn set_info_for_file(
&mut self,
specifier: ModuleSpecifier,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
) {
self.specifier = Some(specifier);
self.utf_16_map = Some(utf16_map);
self.source_text_info = Some(source_text_info);
}
fn report(
&mut self,
id: String,
message: String,
hint: Option<String>,
start_utf16: usize,
end_utf16: usize,
fix: Option<LintReportFix>,
) -> Result<(), LintReportError> {
fn out_of_range_err(
map: &Utf16Map,
start_utf16: usize,
end_utf16: usize,
) -> LintReportError {
LintReportError::IncorrectRange {
start: start_utf16,
end: end_utf16,
source_end: map.text_content_length_utf16().into(),
}
}
fn utf16_to_utf8_range(
utf16_map: &Utf16Map,
source_text_info: &SourceTextInfo,
start_utf16: usize,
end_utf16: usize,
) -> Result<SourceRange, LintReportError> {
let Some(start) =
utf16_map.utf16_to_utf8_offset((start_utf16 as u32).into())
else {
return Err(out_of_range_err(utf16_map, start_utf16, end_utf16));
};
let Some(end) = utf16_map.utf16_to_utf8_offset((end_utf16 as u32).into())
else {
return Err(out_of_range_err(utf16_map, start_utf16, end_utf16));
};
let start_pos = source_text_info.start_pos();
Ok(SourceRange::new(
start_pos + start.into(),
start_pos + end.into(),
))
}
let source_text_info = self.source_text_info.as_ref().unwrap();
let utf16_map = self.utf_16_map.as_ref().unwrap();
let specifier = self.specifier.clone().unwrap();
let diagnostic_range =
utf16_to_utf8_range(utf16_map, source_text_info, start_utf16, end_utf16)?;
let range = LintDiagnosticRange {
range: diagnostic_range,
description: None,
text_info: source_text_info.clone(),
};
let mut fixes: Vec<LintFix> = vec![];
if let Some(fix) = fix {
let fix_range = utf16_to_utf8_range(
utf16_map,
source_text_info,
fix.range.0,
fix.range.1,
)?;
fixes.push(LintFix {
changes: vec![LintFixChange {
new_text: fix.text.into(),
range: fix_range,
}],
description: format!("Fix this {} problem", id).into(),
});
}
let lint_diagnostic = LintDiagnostic {
specifier,
range: Some(range),
details: LintDiagnosticDetails {
message,
code: id,
hint,
fixes,
custom_docs_url: None,
info: vec![],
},
};
self.diagnostics.push(lint_diagnostic);
Ok(())
}
}
#[op2(fast)]
pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) {
let logger = state.borrow::<PluginLogger>();
if is_err {
logger.error(msg);
} else {
logger.log(msg);
}
}
#[op2(fast)]
fn op_is_cancelled(state: &mut OpState) -> bool {
let container = state.borrow::<LintPluginContainer>();
container.token.is_cancelled()
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LintError {
@ -41,5 +217,50 @@ fn op_lint_create_serialized_ast(
scope_analysis: false,
maybe_syntax: None,
})?;
Ok(lint::serialize_ast_to_buffer(&parsed_source))
let utf16_map = Utf16Map::new(parsed_source.text().as_ref());
Ok(lint::serialize_ast_to_buffer(&parsed_source, &utf16_map))
}
#[derive(serde::Deserialize)]
struct LintReportFix {
text: String,
range: (usize, usize),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LintReportError {
#[class(type)]
#[error("Invalid range [{start}, {end}], the source has a range of [0, {source_end}]")]
IncorrectRange {
start: usize,
end: usize,
source_end: u32,
},
}
#[op2]
fn op_lint_report(
state: &mut OpState,
#[string] id: String,
#[string] message: String,
#[string] hint: Option<String>,
#[smi] start_utf16: usize,
#[smi] end_utf16: usize,
#[serde] fix: Option<LintReportFix>,
) -> Result<(), LintReportError> {
let container = state.borrow_mut::<LintPluginContainer>();
container.report(id, message, hint, start_utf16, end_utf16, fix)?;
Ok(())
}
#[op2]
#[string]
fn op_lint_get_source(state: &mut OpState) -> String {
let container = state.borrow_mut::<LintPluginContainer>();
container
.source_text_info
.as_ref()
.unwrap()
.text_str()
.to_string()
}

View file

@ -1,127 +1,135 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"enum": [
"adjacent-overload-signatures",
"ban-ts-comment",
"ban-types",
"ban-unknown-rule-code",
"ban-untagged-ignore",
"ban-untagged-todo",
"ban-unused-ignore",
"camelcase",
"constructor-super",
"default-param-last",
"eqeqeq",
"explicit-function-return-type",
"explicit-module-boundary-types",
"for-direction",
"fresh-handler-export",
"fresh-server-event-handlers",
"getter-return",
"guard-for-in",
"jsx-boolean-value",
"jsx-button-has-type",
"jsx-curly-braces",
"jsx-key",
"jsx-no-children-prop",
"jsx-no-comment-text-nodes",
"jsx-no-duplicate-props",
"jsx-no-unescaped-entities",
"jsx-no-useless-fragment",
"jsx-props-no-spread-multi",
"jsx-void-dom-elements-no-children",
"no-array-constructor",
"no-async-promise-executor",
"no-await-in-loop",
"no-await-in-sync-fn",
"no-boolean-literal-for-arguments",
"no-case-declarations",
"no-class-assign",
"no-compare-neg-zero",
"no-cond-assign",
"no-console",
"no-const-assign",
"no-constant-condition",
"no-control-regex",
"no-debugger",
"no-delete-var",
"no-deprecated-deno-api",
"no-dupe-args",
"no-dupe-class-members",
"no-dupe-else-if",
"no-dupe-keys",
"no-duplicate-case",
"no-empty",
"no-empty-character-class",
"no-empty-enum",
"no-empty-interface",
"no-empty-pattern",
"no-eval",
"no-ex-assign",
"no-explicit-any",
"no-external-import",
"no-extra-boolean-cast",
"no-extra-non-null-assertion",
"no-fallthrough",
"no-func-assign",
"no-global-assign",
"no-implicit-declare-namespace-export",
"no-import-assertions",
"no-import-assign",
"no-inferrable-types",
"no-inner-declarations",
"no-invalid-regexp",
"no-invalid-triple-slash-reference",
"no-irregular-whitespace",
"no-misused-new",
"no-namespace",
"no-new-symbol",
"no-node-globals",
"no-non-null-asserted-optional-chain",
"no-non-null-assertion",
"no-obj-calls",
"no-octal",
"no-process-global",
"no-prototype-builtins",
"no-redeclare",
"no-regex-spaces",
"no-self-assign",
"no-self-compare",
"no-setter-return",
"no-shadow-restricted-names",
"no-sloppy-imports",
"no-slow-types",
"no-sparse-arrays",
"no-sync-fn-in-async-fn",
"no-this-alias",
"no-this-before-super",
"no-throw-literal",
"no-top-level-await",
"no-undef",
"no-unreachable",
"no-unsafe-finally",
"no-unsafe-negation",
"no-unused-labels",
"no-unused-vars",
"no-useless-rename",
"no-var",
"no-window",
"no-window-prefix",
"no-with",
"prefer-as-const",
"prefer-ascii",
"prefer-const",
"prefer-namespace-keyword",
"prefer-primordials",
"react-no-danger",
"react-no-danger-with-children",
"react-rules-of-hooks",
"require-await",
"require-yield",
"single-var-declarator",
"triple-slash-reference",
"use-isnan",
"valid-typeof",
"verbatim-module-syntax"
"oneOf": [
{
"type": "string",
"pattern": "^[a-z0-9-]+\\/[a-z0-9-]+$"
},
{
"enum": [
"adjacent-overload-signatures",
"ban-ts-comment",
"ban-types",
"ban-unknown-rule-code",
"ban-untagged-ignore",
"ban-untagged-todo",
"ban-unused-ignore",
"camelcase",
"constructor-super",
"default-param-last",
"eqeqeq",
"explicit-function-return-type",
"explicit-module-boundary-types",
"for-direction",
"fresh-handler-export",
"fresh-server-event-handlers",
"getter-return",
"guard-for-in",
"jsx-boolean-value",
"jsx-button-has-type",
"jsx-curly-braces",
"jsx-key",
"jsx-no-children-prop",
"jsx-no-comment-text-nodes",
"jsx-no-duplicate-props",
"jsx-no-unescaped-entities",
"jsx-no-useless-fragment",
"jsx-props-no-spread-multi",
"jsx-void-dom-elements-no-children",
"no-array-constructor",
"no-async-promise-executor",
"no-await-in-loop",
"no-await-in-sync-fn",
"no-boolean-literal-for-arguments",
"no-case-declarations",
"no-class-assign",
"no-compare-neg-zero",
"no-cond-assign",
"no-console",
"no-const-assign",
"no-constant-condition",
"no-control-regex",
"no-debugger",
"no-delete-var",
"no-deprecated-deno-api",
"no-dupe-args",
"no-dupe-class-members",
"no-dupe-else-if",
"no-dupe-keys",
"no-duplicate-case",
"no-empty",
"no-empty-character-class",
"no-empty-enum",
"no-empty-interface",
"no-empty-pattern",
"no-eval",
"no-ex-assign",
"no-explicit-any",
"no-external-import",
"no-extra-boolean-cast",
"no-extra-non-null-assertion",
"no-fallthrough",
"no-func-assign",
"no-global-assign",
"no-implicit-declare-namespace-export",
"no-import-assertions",
"no-import-assign",
"no-inferrable-types",
"no-inner-declarations",
"no-invalid-regexp",
"no-invalid-triple-slash-reference",
"no-irregular-whitespace",
"no-misused-new",
"no-namespace",
"no-new-symbol",
"no-node-globals",
"no-non-null-asserted-optional-chain",
"no-non-null-assertion",
"no-obj-calls",
"no-octal",
"no-process-global",
"no-prototype-builtins",
"no-redeclare",
"no-regex-spaces",
"no-self-assign",
"no-self-compare",
"no-setter-return",
"no-shadow-restricted-names",
"no-sloppy-imports",
"no-slow-types",
"no-sparse-arrays",
"no-sync-fn-in-async-fn",
"no-this-alias",
"no-this-before-super",
"no-throw-literal",
"no-top-level-await",
"no-undef",
"no-unreachable",
"no-unsafe-finally",
"no-unsafe-negation",
"no-unused-labels",
"no-unused-vars",
"no-useless-rename",
"no-var",
"no-window",
"no-window-prefix",
"no-with",
"prefer-as-const",
"prefer-ascii",
"prefer-const",
"prefer-namespace-keyword",
"prefer-primordials",
"react-no-danger",
"react-no-danger-with-children",
"react-rules-of-hooks",
"require-await",
"require-yield",
"single-var-declarator",
"triple-slash-reference",
"use-isnan",
"valid-typeof",
"verbatim-module-syntax"
]
}
]
}

View file

@ -6,6 +6,8 @@ use deno_ast::swc::common::Span;
use deno_ast::swc::common::DUMMY_SP;
use indexmap::IndexMap;
use crate::util::text_encoding::Utf16Map;
/// Each property has this flag to mark what kind of value it holds-
/// Plain objects and arrays are not supported yet, but could be easily
/// added if needed.
@ -212,6 +214,15 @@ impl SerializeCtx {
self.root_idx = idx;
}
pub fn map_utf8_spans_to_utf16(&mut self, map: &Utf16Map) {
for value in &mut self.spans {
*value = map
.utf8_to_utf16_offset((*value).into())
.unwrap_or_else(|| panic!("Failed converting '{value}' to utf16."))
.into();
}
}
/// Allocate a node's header
fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags)
where
@ -274,7 +285,13 @@ impl SerializeCtx {
where
K: Into<u8> + Display + Clone,
{
self.append_inner(kind, span.lo.0, span.hi.0)
let (start, end) = if *span == DUMMY_SP {
(0, 0)
} else {
// -1 is because swc stores spans 1-indexed
(span.lo.0 - 1, span.hi.0 - 1)
};
self.append_inner(kind, start, end)
}
pub fn append_inner<K>(

View file

@ -3,11 +3,16 @@
use deno_ast::ParsedSource;
use swc::serialize_swc_to_buffer;
use crate::util::text_encoding::Utf16Map;
mod buffer;
mod swc;
mod ts_estree;
pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec<u8> {
pub fn serialize_ast_to_buffer(
parsed_source: &ParsedSource,
utf16_map: &Utf16Map,
) -> Vec<u8> {
// TODO: We could support multiple languages here
serialize_swc_to_buffer(parsed_source)
serialize_swc_to_buffer(parsed_source, utf16_map)
}

View file

@ -93,8 +93,12 @@ use super::buffer::NodeRef;
use super::ts_estree::AstNode;
use super::ts_estree::TsEsTreeBuilder;
use super::ts_estree::TsKeywordKind;
use crate::util::text_encoding::Utf16Map;
pub fn serialize_swc_to_buffer(parsed_source: &ParsedSource) -> Vec<u8> {
pub fn serialize_swc_to_buffer(
parsed_source: &ParsedSource,
utf16_map: &Utf16Map,
) -> Vec<u8> {
let mut ctx = TsEsTreeBuilder::new();
let program = &parsed_source.program();
@ -125,6 +129,7 @@ pub fn serialize_swc_to_buffer(parsed_source: &ParsedSource) -> Vec<u8> {
}
}
ctx.map_utf8_spans_to_utf16(utf16_map);
ctx.serialize()
}

View file

@ -10,6 +10,7 @@ use deno_ast::view::TruePlusMinus;
use super::buffer::AstBufSerializer;
use super::buffer::NodeRef;
use super::buffer::SerializeCtx;
use crate::util::text_encoding::Utf16Map;
#[derive(Debug, Clone, PartialEq)]
pub enum AstNode {
@ -488,6 +489,10 @@ impl TsEsTreeBuilder {
}
}
pub fn map_utf8_spans_to_utf16(&mut self, map: &Utf16Map) {
self.ctx.map_utf8_spans_to_utf16(map);
}
pub fn write_program(
&mut self,
span: &Span,

View file

@ -1,32 +1,45 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use ::tokio_util::sync::CancellationToken;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt as _;
use deno_core::parking_lot::Mutex;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::ExternalLinterCb;
use deno_lint::linter::ExternalLinterResult;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_lint::linter::LintFileOptions;
use deno_lint::linter::Linter as DenoLintLinter;
use deno_lint::linter::LinterOptions;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_runtime::tokio_util;
use super::plugins;
use super::plugins::PluginHostProxy;
use super::rules::FileOrPackageLintRule;
use super::rules::PackageLintRule;
use super::ConfiguredRules;
use crate::sys::CliSys;
use crate::util::fs::specifier_from_file_path;
use crate::util::text_encoding::Utf16Map;
pub struct CliLinterOptions {
pub configured_rules: ConfiguredRules,
pub fix: bool,
pub deno_lint_config: DenoLintConfig,
pub maybe_plugin_runner: Option<Arc<PluginHostProxy>>,
}
#[derive(Debug)]
@ -35,6 +48,7 @@ pub struct CliLinter {
package_rules: Vec<Box<dyn PackageLintRule>>,
linter: DenoLintLinter,
deno_lint_config: DenoLintConfig,
maybe_plugin_runner: Option<Arc<PluginHostProxy>>,
}
impl CliLinter {
@ -62,6 +76,7 @@ impl CliLinter {
custom_ignore_diagnostic_directive: None,
}),
deno_lint_config: options.deno_lint_config,
maybe_plugin_runner: options.maybe_plugin_runner,
}
}
@ -84,10 +99,22 @@ impl CliLinter {
pub fn lint_with_ast(
&self,
parsed_source: &ParsedSource,
) -> Vec<LintDiagnostic> {
self
.linter
.lint_with_ast(parsed_source, self.deno_lint_config.clone())
token: CancellationToken,
) -> Result<Vec<LintDiagnostic>, AnyError> {
let external_linter_container = ExternalLinterContainer::new(
self.maybe_plugin_runner.clone(),
Some(token),
);
let d = self.linter.lint_with_ast(
parsed_source,
self.deno_lint_config.clone(),
external_linter_container.get_callback(),
);
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
Ok(d)
}
pub fn lint_file(
@ -105,18 +132,34 @@ impl CliLinter {
MediaType::from_specifier(&specifier)
};
let external_linter_container =
ExternalLinterContainer::new(self.maybe_plugin_runner.clone(), None);
if self.fix {
self.lint_file_and_fix(&specifier, media_type, source_code, file_path)
self.lint_file_and_fix(
&specifier,
media_type,
source_code,
file_path,
external_linter_container,
)
} else {
self
let (source, diagnostics) = self
.linter
.lint_file(LintFileOptions {
specifier,
media_type,
source_code,
config: self.deno_lint_config.clone(),
external_linter: external_linter_container.get_callback(),
})
.map_err(AnyError::from)
.map_err(AnyError::from)?;
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
Ok((source, diagnostics))
}
}
@ -126,6 +169,7 @@ impl CliLinter {
media_type: MediaType,
source_code: String,
file_path: &Path,
external_linter_container: ExternalLinterContainer,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), deno_core::anyhow::Error> {
// initial lint
let (source, diagnostics) = self.linter.lint_file(LintFileOptions {
@ -133,8 +177,13 @@ impl CliLinter {
media_type,
source_code,
config: self.deno_lint_config.clone(),
external_linter: external_linter_container.get_callback(),
})?;
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
// Try applying fixes repeatedly until the file has none left or
// a maximum number of iterations is reached. This is necessary
// because lint fixes may overlap and so we can't always apply
@ -148,8 +197,9 @@ impl CliLinter {
media_type,
&self.linter,
self.deno_lint_config.clone(),
source.text_info_lazy(),
&source,
&diagnostics,
&external_linter_container,
)?;
match change {
Some(change) => {
@ -165,7 +215,7 @@ impl CliLinter {
log::warn!(
concat!(
"Reached maximum number of fix iterations for '{}'. There's ",
"probably a bug in Deno. Please fix this file manually.",
"probably a bug in the lint rule. Please fix this file manually.",
),
specifier,
);
@ -193,23 +243,81 @@ fn apply_lint_fixes_and_relint(
media_type: MediaType,
linter: &DenoLintLinter,
config: DenoLintConfig,
text_info: &SourceTextInfo,
original_source: &ParsedSource,
diagnostics: &[LintDiagnostic],
external_linter_container: &ExternalLinterContainer,
) -> Result<Option<(ParsedSource, Vec<LintDiagnostic>)>, AnyError> {
let text_info = original_source.text_info_lazy();
let Some(new_text) = apply_lint_fixes(text_info, diagnostics) else {
return Ok(None);
};
linter
.lint_file(LintFileOptions {
let lint_with_text = |new_text: String| {
let (source, diagnostics) = linter.lint_file(LintFileOptions {
specifier: specifier.clone(),
source_code: new_text,
media_type,
config,
})
.map(Some)
.context(
"An applied lint fix caused a syntax error. Please report this bug.",
)
config: config.clone(),
external_linter: external_linter_container.get_callback(),
})?;
let mut new_diagnostics = source.diagnostics().clone();
new_diagnostics.retain(|d| !original_source.diagnostics().contains(d));
if let Some(diagnostic) = new_diagnostics.pop() {
return Err(AnyError::from(diagnostic));
}
Ok((source, diagnostics))
};
let (source, diagnostics) = match lint_with_text(new_text) {
Ok(result) => result,
Err(err) => {
let utf16_map = Utf16Map::new(text_info.text_str());
// figure out which diagnostic caused a syntax error
let mut diagnostics = diagnostics.to_vec();
while let Some(last_diagnostic) = diagnostics.pop() {
let Some(lint_fix) = last_diagnostic.details.fixes.first() else {
continue;
};
let success = match apply_lint_fixes(text_info, &diagnostics) {
Some(new_text) => lint_with_text(new_text).is_ok(),
None => true,
};
if success {
let mut changes_text = String::new();
for change in &lint_fix.changes {
let utf8_start =
(change.range.start - text_info.range().start) as u32;
let utf8_end = (change.range.end - text_info.range().start) as u32;
let utf16_start = utf16_map
.utf8_to_utf16_offset(utf8_start.into())
.unwrap_or(utf8_start.into());
let utf16_end = utf16_map
.utf8_to_utf16_offset(utf8_end.into())
.unwrap_or(utf8_end.into());
changes_text.push_str(&format!(
"Range: [{}, {}]\n",
u32::from(utf16_start),
u32::from(utf16_end)
));
changes_text.push_str(&format!("Text: {:?}\n\n", &change.new_text));
}
return Err(err).context(format!(
"The '{}' rule caused a syntax error applying '{}'.\n\n{}",
last_diagnostic.details.code, lint_fix.description, changes_text
));
}
}
return Err(err).context(
"A lint fix caused a syntax error. This is a bug in a lint rule.",
);
}
};
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
Ok(Some((source, diagnostics)))
}
fn apply_lint_fixes(
@ -258,3 +366,94 @@ fn apply_lint_fixes(
deno_ast::apply_text_changes(text_info.text_str(), quick_fixes);
Some(new_text)
}
fn run_plugins(
plugin_runner: Arc<PluginHostProxy>,
parsed_source: ParsedSource,
file_path: PathBuf,
maybe_token: Option<CancellationToken>,
) -> Result<ExternalLinterResult, AnyError> {
let source_text_info = parsed_source.text_info_lazy().clone();
let plugin_info = plugin_runner
.get_plugin_rules()
.into_iter()
.map(Cow::from)
.collect();
let fut = async move {
let utf16_map = Utf16Map::new(parsed_source.text().as_ref());
let serialized_ast =
plugin_runner.serialize_ast(&parsed_source, &utf16_map)?;
plugins::run_rules_for_ast(
&plugin_runner,
&file_path,
serialized_ast,
source_text_info,
utf16_map,
maybe_token,
)
.await
}
.boxed_local();
let plugin_diagnostics = tokio_util::create_and_run_current_thread(fut)?;
Ok(ExternalLinterResult {
diagnostics: plugin_diagnostics,
rules: plugin_info,
})
}
struct ExternalLinterContainer {
cb: Option<ExternalLinterCb>,
error: Option<Arc<Mutex<Option<AnyError>>>>,
}
impl ExternalLinterContainer {
pub fn new(
maybe_plugin_runner: Option<Arc<PluginHostProxy>>,
maybe_token: Option<CancellationToken>,
) -> Self {
let mut s = Self {
cb: None,
error: None,
};
if let Some(plugin_runner) = maybe_plugin_runner {
s.error = Some(Arc::new(Mutex::new(None)));
let error_ = s.error.clone();
let cb = Arc::new(move |parsed_source: ParsedSource| {
let token_ = maybe_token.clone();
let file_path =
match deno_path_util::url_to_file_path(parsed_source.specifier()) {
Ok(path) => path,
Err(err) => {
*error_.as_ref().unwrap().lock() = Some(err.into());
return None;
}
};
let r =
run_plugins(plugin_runner.clone(), parsed_source, file_path, token_);
match r {
Ok(d) => Some(d),
Err(err) => {
*error_.as_ref().unwrap().lock() = Some(err);
None
}
}
});
s.cb = Some(cb);
}
s
}
pub fn get_callback(&self) -> Option<ExternalLinterCb> {
self.cb.clone()
}
pub fn take_error(&self) -> Option<AnyError> {
self.error.as_ref().and_then(|e| e.lock().take())
}
}

View file

@ -26,6 +26,7 @@ use deno_core::serde_json;
use deno_core::unsync::future::LocalFutureExt;
use deno_core::unsync::future::SharedLocal;
use deno_graph::ModuleGraph;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lint::diagnostic::LintDiagnostic;
use log::debug;
use reporters::create_reporter;
@ -55,6 +56,7 @@ use crate::util::sync::AtomicFlag;
mod ast_buffer;
mod linter;
mod plugins;
mod reporters;
mod rules;
@ -62,6 +64,8 @@ mod rules;
pub use ast_buffer::serialize_ast_to_buffer;
pub use linter::CliLinter;
pub use linter::CliLinterOptions;
pub use plugins::create_runner_and_load_plugins;
pub use plugins::PluginLogger;
pub use rules::collect_no_slow_type_diagnostics;
pub use rules::ConfiguredRules;
pub use rules::LintRuleProvider;
@ -282,18 +286,52 @@ impl WorkspaceLinter {
) -> Result<(), AnyError> {
self.file_count += paths.len();
let exclude = lint_options.rules.exclude.clone();
let plugin_specifiers = lint_options.plugins.clone();
let lint_rules = self.lint_rule_provider.resolve_lint_rules_err_empty(
lint_options.rules,
member_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let maybe_incremental_cache =
lint_rules.incremental_cache_state().map(|state| {
Arc::new(IncrementalCache::new(
self.caches.lint_incremental_cache_db(),
CacheDBHash::from_hashable(&state),
&paths,
))
});
let mut maybe_incremental_cache = None;
// TODO(bartlomieju): how do we decide if plugins support incremental cache?
if lint_rules.supports_incremental_cache() {
let mut hasher = FastInsecureHasher::new_deno_versioned();
hasher.write_hashable(lint_rules.incremental_cache_state());
if !plugin_specifiers.is_empty() {
hasher.write_hashable(&plugin_specifiers);
}
let state_hash = hasher.finish();
maybe_incremental_cache = Some(Arc::new(IncrementalCache::new(
self.caches.lint_incremental_cache_db(),
CacheDBHash::new(state_hash),
&paths,
)));
}
#[allow(clippy::print_stdout)]
#[allow(clippy::print_stderr)]
fn logger_printer(msg: &str, is_err: bool) {
if is_err {
eprint!("{}", msg);
} else {
print!("{}", msg);
}
}
let mut plugin_runner = None;
if !plugin_specifiers.is_empty() {
let logger = plugins::PluginLogger::new(logger_printer);
let runner = plugins::create_runner_and_load_plugins(
plugin_specifiers,
logger,
exclude,
)
.await?;
plugin_runner = Some(Arc::new(runner));
}
let linter = Arc::new(CliLinter::new(CliLinterOptions {
configured_rules: lint_rules,
@ -301,6 +339,7 @@ impl WorkspaceLinter {
deno_lint_config: self
.tsconfig_resolver
.deno_lint_config(member_dir.dir_url())?,
maybe_plugin_runner: plugin_runner,
}));
let has_error = self.has_error.clone();
@ -543,7 +582,8 @@ fn lint_stdin(
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let deno_lint_config =
tsconfig_resolver.deno_lint_config(start_dir.dir_url())?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
let lint_options =
LintOptions::resolve(start_dir.dir_path(), lint_config, &lint_flags)?;
let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty(
lint_options.rules,
start_dir.maybe_deno_json().map(|c| c.as_ref()),
@ -561,6 +601,7 @@ fn lint_stdin(
fix: false,
configured_rules,
deno_lint_config,
maybe_plugin_runner: None,
});
let r = linter
@ -624,13 +665,24 @@ mod tests {
use super::*;
#[derive(Serialize, Deserialize)]
struct RulesPattern {
r#type: String,
pattern: String,
}
#[derive(Serialize, Deserialize)]
struct RulesEnum {
r#enum: Vec<String>,
}
#[derive(Serialize, Deserialize)]
struct RulesSchema {
#[serde(rename = "$schema")]
schema: String,
#[serde(rename = "enum")]
rules: Vec<String>,
#[serde(rename = "oneOf")]
one_of: (RulesPattern, RulesEnum),
}
fn get_all_rules() -> Vec<String> {
@ -661,25 +713,25 @@ mod tests {
const UPDATE_ENV_VAR_NAME: &str = "UPDATE_EXPECTED";
let rules_list = schema.one_of.1.r#enum;
if std::env::var(UPDATE_ENV_VAR_NAME).ok().is_none() {
assert_eq!(
schema.rules, all_rules,
rules_list, all_rules,
"Lint rules schema file not up to date. Run again with {}=1 to update the expected output",
UPDATE_ENV_VAR_NAME
);
return;
}
let new_schema = RulesSchema {
schema: schema.schema,
one_of: (schema.one_of.0, RulesEnum { r#enum: all_rules }),
};
std::fs::write(
&rules_schema_path,
format!(
"{}\n",
serde_json::to_string_pretty(&RulesSchema {
schema: schema.schema,
rules: all_rules,
})
.unwrap(),
),
format!("{}\n", serde_json::to_string_pretty(&new_schema).unwrap(),),
)
.unwrap();
}

543
cli/tools/lint/plugins.rs Normal file
View file

@ -0,0 +1,543 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use ::tokio_util::sync::CancellationToken;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::error::JsError;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::resolve_url_or_path;
use deno_core::v8;
use deno_core::PollEventLoopOptions;
use deno_lint::diagnostic::LintDiagnostic;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::tokio_util;
use deno_runtime::worker::MainWorker;
use deno_runtime::WorkerExecutionMode;
use tokio::sync::mpsc::channel;
use tokio::sync::mpsc::Receiver;
use tokio::sync::mpsc::Sender;
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::LintFlags;
use crate::factory::CliFactory;
use crate::ops::lint::LintPluginContainer;
use crate::tools::lint::serialize_ast_to_buffer;
use crate::util::text_encoding::Utf16Map;
#[derive(Debug)]
pub enum PluginHostRequest {
LoadPlugins {
specifiers: Vec<ModuleSpecifier>,
exclude_rules: Option<Vec<String>>,
},
Run {
serialized_ast: Vec<u8>,
file_path: PathBuf,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
},
}
pub enum PluginHostResponse {
// TODO: write to structs
LoadPlugin(Result<Vec<PluginInfo>, AnyError>),
Run(Result<Vec<LintDiagnostic>, AnyError>),
}
impl std::fmt::Debug for PluginHostResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::LoadPlugin(_arg0) => f.debug_tuple("LoadPlugin").finish(),
Self::Run(_arg0) => f.debug_tuple("Run").finish(),
}
}
}
#[derive(Clone, Debug)]
pub struct PluginLogger {
print: fn(&str, bool),
}
impl PluginLogger {
pub fn new(print: fn(&str, bool)) -> Self {
Self { print }
}
pub fn log(&self, msg: &str) {
(self.print)(msg, false);
}
pub fn error(&self, msg: &str) {
(self.print)(msg, true);
}
}
macro_rules! v8_static_strings {
($($ident:ident = $str:literal),* $(,)?) => {
$(
pub static $ident: deno_core::FastStaticString = deno_core::ascii_str!($str);
)*
};
}
v8_static_strings! {
DEFAULT = "default",
INSTALL_PLUGINS = "installPlugins",
RUN_PLUGINS_FOR_FILE = "runPluginsForFile",
}
#[derive(Debug)]
pub struct PluginHostProxy {
tx: Sender<PluginHostRequest>,
rx: Arc<tokio::sync::Mutex<Receiver<PluginHostResponse>>>,
pub(crate) plugin_info: Arc<Mutex<Vec<PluginInfo>>>,
#[allow(unused)]
join_handle: std::thread::JoinHandle<Result<(), AnyError>>,
}
impl PluginHostProxy {
pub fn get_plugin_rules(&self) -> Vec<String> {
let infos = self.plugin_info.lock();
let mut all_names = vec![];
for info in infos.iter() {
all_names.extend_from_slice(&info.get_rules());
}
all_names
}
}
pub struct PluginHost {
worker: MainWorker,
install_plugins_fn: Rc<v8::Global<v8::Function>>,
run_plugins_for_file_fn: Rc<v8::Global<v8::Function>>,
tx: Sender<PluginHostResponse>,
rx: Receiver<PluginHostRequest>,
}
async fn create_plugin_runner_inner(
logger: PluginLogger,
rx_req: Receiver<PluginHostRequest>,
tx_res: Sender<PluginHostResponse>,
) -> Result<PluginHost, AnyError> {
let flags = Flags {
subcommand: DenoSubcommand::Lint(LintFlags::default()),
..Default::default()
};
let flags = Arc::new(flags);
let factory = CliFactory::from_flags(flags.clone());
let cli_options = factory.cli_options()?;
let main_module =
resolve_url_or_path("./$deno$lint.mts", cli_options.initial_cwd()).unwrap();
let perm_parser = factory.permission_desc_parser()?;
let permissions = Permissions::from_options(
perm_parser.as_ref(),
&cli_options.permissions_options(),
)?;
let permissions = PermissionsContainer::new(perm_parser.clone(), permissions);
// let npm_resolver = factory.npm_resolver().await?.clone();
// let resolver = factory.resolver().await?.clone();
let worker_factory = factory.create_cli_main_worker_factory().await?;
let worker = worker_factory
.create_custom_worker(
// TODO(bartlomieju): add "lint" execution mode
WorkerExecutionMode::Run,
main_module.clone(),
permissions,
vec![crate::ops::lint::deno_lint_ext::init_ops(logger.clone())],
Default::default(),
)
.await?;
let mut worker = worker.into_main_worker();
let runtime = &mut worker.js_runtime;
let obj = runtime.execute_script("lint.js", "Deno[Deno.internal]")?;
log::debug!("Lint plugins loaded, capturing default exports");
let (install_plugins_fn, run_plugins_for_file_fn) = {
let scope = &mut runtime.handle_scope();
let module_exports: v8::Local<v8::Object> =
v8::Local::new(scope, obj).try_into().unwrap();
let install_plugins_fn_name = INSTALL_PLUGINS.v8_string(scope).unwrap();
let install_plugins_fn_val = module_exports
.get(scope, install_plugins_fn_name.into())
.unwrap();
let install_plugins_fn: v8::Local<v8::Function> =
install_plugins_fn_val.try_into().unwrap();
let run_plugins_for_file_fn_name =
RUN_PLUGINS_FOR_FILE.v8_string(scope).unwrap();
let run_plugins_for_file_fn_val = module_exports
.get(scope, run_plugins_for_file_fn_name.into())
.unwrap();
let run_plugins_for_file_fn: v8::Local<v8::Function> =
run_plugins_for_file_fn_val.try_into().unwrap();
(
Rc::new(v8::Global::new(scope, install_plugins_fn)),
Rc::new(v8::Global::new(scope, run_plugins_for_file_fn)),
)
};
Ok(PluginHost {
worker,
install_plugins_fn,
run_plugins_for_file_fn,
tx: tx_res,
rx: rx_req,
})
}
#[derive(Debug, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PluginInfo {
pub name: String,
pub rule_names: Vec<String>,
}
impl PluginInfo {
pub fn get_rules(&self) -> Vec<String> {
let mut rules = Vec::with_capacity(self.rule_names.len());
for rule_name in &self.rule_names {
rules.push(format!("{}/{}", self.name, rule_name));
}
rules
}
}
impl PluginHost {
fn create(logger: PluginLogger) -> Result<PluginHostProxy, AnyError> {
let (tx_req, rx_req) = channel(10);
let (tx_res, rx_res) = channel(10);
let logger_ = logger.clone();
let join_handle = std::thread::spawn(move || {
let logger = logger_;
log::debug!("Lint PluginHost thread spawned");
let start = std::time::Instant::now();
let fut = async move {
let runner =
create_plugin_runner_inner(logger.clone(), rx_req, tx_res).await?;
log::debug!("Lint PlugibnHost running loop");
runner.run_loop().await?;
log::debug!(
"Lint PluginHost thread finished, took {:?}",
std::time::Instant::now() - start
);
Ok(())
}
.boxed_local();
tokio_util::create_and_run_current_thread(fut)
});
let proxy = PluginHostProxy {
tx: tx_req,
rx: Arc::new(tokio::sync::Mutex::new(rx_res)),
plugin_info: Arc::new(Mutex::new(vec![])),
join_handle,
};
Ok(proxy)
}
async fn run_loop(mut self) -> Result<(), AnyError> {
log::debug!("Lint PluginHost is waiting for message");
while let Some(req) = self.rx.recv().await {
log::debug!("Lint PluginHost has received a message");
match req {
PluginHostRequest::LoadPlugins {
specifiers,
exclude_rules,
} => {
let r = self.load_plugins(specifiers, exclude_rules).await;
let _ = self.tx.send(PluginHostResponse::LoadPlugin(r)).await;
}
PluginHostRequest::Run {
serialized_ast,
file_path,
source_text_info,
utf16_map,
maybe_token,
} => {
let start = std::time::Instant::now();
let r = match self.run_plugins(
&file_path,
serialized_ast,
source_text_info,
utf16_map,
maybe_token,
) {
Ok(()) => Ok(self.take_diagnostics()),
Err(err) => Err(err),
};
log::debug!(
"Running plugins lint rules took {:?}",
std::time::Instant::now() - start
);
let _ = self.tx.send(PluginHostResponse::Run(r)).await;
}
}
}
log::debug!("Lint PluginHost run loop finished");
Ok(())
}
fn take_diagnostics(&mut self) -> Vec<LintDiagnostic> {
let op_state = self.worker.js_runtime.op_state();
let mut state = op_state.borrow_mut();
let container = state.borrow_mut::<LintPluginContainer>();
std::mem::take(&mut container.diagnostics)
}
fn run_plugins(
&mut self,
file_path: &Path,
serialized_ast: Vec<u8>,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
) -> Result<(), AnyError> {
{
let state = self.worker.js_runtime.op_state();
let mut state = state.borrow_mut();
let container = state.borrow_mut::<LintPluginContainer>();
container.set_info_for_file(
ModuleSpecifier::from_file_path(file_path).unwrap(),
source_text_info,
utf16_map,
);
container.set_cancellation_token(maybe_token);
}
let scope = &mut self.worker.js_runtime.handle_scope();
let file_name_v8: v8::Local<v8::Value> =
v8::String::new(scope, &file_path.display().to_string())
.unwrap()
.into();
let store = v8::ArrayBuffer::new_backing_store_from_vec(serialized_ast);
let ast_buf =
v8::ArrayBuffer::with_backing_store(scope, &store.make_shared());
let ast_bin_v8: v8::Local<v8::Value> =
v8::Uint8Array::new(scope, ast_buf, 0, ast_buf.byte_length())
.unwrap()
.into();
let run_plugins_for_file =
v8::Local::new(scope, &*self.run_plugins_for_file_fn);
let undefined = v8::undefined(scope);
let mut tc_scope = v8::TryCatch::new(scope);
let _run_plugins_result = run_plugins_for_file.call(
&mut tc_scope,
undefined.into(),
&[file_name_v8, ast_bin_v8],
);
if let Some(exception) = tc_scope.exception() {
let error = JsError::from_v8_exception(&mut tc_scope, exception);
let core_err = CoreError::Js(error);
return Err(core_err.into());
}
drop(tc_scope);
Ok(())
}
async fn load_plugins(
&mut self,
plugin_specifiers: Vec<ModuleSpecifier>,
exclude: Option<Vec<String>>,
) -> Result<Vec<PluginInfo>, AnyError> {
let mut load_futures = Vec::with_capacity(plugin_specifiers.len());
for specifier in plugin_specifiers {
let mod_id = self
.worker
.js_runtime
.load_side_es_module(&specifier)
.await?;
let mod_future =
self.worker.js_runtime.mod_evaluate(mod_id).boxed_local();
load_futures.push((mod_future, mod_id));
}
self
.worker
.js_runtime
.run_event_loop(PollEventLoopOptions::default())
.await?;
let mut plugin_handles = Vec::with_capacity(load_futures.len());
for (fut, mod_id) in load_futures {
fut.await?;
let module = self.worker.js_runtime.get_module_namespace(mod_id).unwrap();
let scope = &mut self.worker.js_runtime.handle_scope();
let module_local = v8::Local::new(scope, module);
let default_export_str = DEFAULT.v8_string(scope).unwrap();
let default_export =
module_local.get(scope, default_export_str.into()).unwrap();
let default_export_global = v8::Global::new(scope, default_export);
plugin_handles.push(default_export_global);
}
let scope = &mut self.worker.js_runtime.handle_scope();
let install_plugins_local =
v8::Local::new(scope, &*self.install_plugins_fn.clone());
let exclude_v8: v8::Local<v8::Value> =
exclude.map_or(v8::null(scope).into(), |v| {
let elems = v
.iter()
.map(|item| v8::String::new(scope, item).unwrap().into())
.collect::<Vec<_>>();
v8::Array::new_with_elements(scope, elems.as_slice()).into()
});
let undefined = v8::undefined(scope);
let local_handles = {
let arr = v8::Array::new(scope, plugin_handles.len().try_into().unwrap());
for (idx, plugin_handle) in plugin_handles.into_iter().enumerate() {
let handle = v8::Local::new(scope, plugin_handle);
arr
.set_index(scope, idx.try_into().unwrap(), handle)
.unwrap();
}
arr
};
let args = &[local_handles.into(), exclude_v8];
log::debug!("Installing lint plugins...");
let mut tc_scope = v8::TryCatch::new(scope);
let plugins_info_result =
install_plugins_local.call(&mut tc_scope, undefined.into(), args);
if let Some(exception) = tc_scope.exception() {
let error = JsError::from_v8_exception(&mut tc_scope, exception);
return Err(error.into());
}
drop(tc_scope);
let plugins_info = plugins_info_result.unwrap();
let infos: Vec<PluginInfo> =
deno_core::serde_v8::from_v8(scope, plugins_info)?;
log::debug!("Plugins installed: {}", infos.len());
Ok(infos)
}
}
impl PluginHostProxy {
pub async fn load_plugins(
&self,
specifiers: Vec<ModuleSpecifier>,
exclude_rules: Option<Vec<String>>,
) -> Result<(), AnyError> {
self
.tx
.send(PluginHostRequest::LoadPlugins {
specifiers,
exclude_rules,
})
.await?;
let mut rx = self.rx.lock().await;
if let Some(val) = rx.recv().await {
let PluginHostResponse::LoadPlugin(result) = val else {
unreachable!()
};
let infos = result?;
*self.plugin_info.lock() = infos;
return Ok(());
}
bail!("Plugin host has closed")
}
pub async fn run_rules(
&self,
specifier: &Path,
serialized_ast: Vec<u8>,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
) -> Result<Vec<LintDiagnostic>, AnyError> {
self
.tx
.send(PluginHostRequest::Run {
serialized_ast,
file_path: specifier.to_path_buf(),
source_text_info,
utf16_map,
maybe_token,
})
.await?;
let mut rx = self.rx.lock().await;
if let Some(PluginHostResponse::Run(diagnostics_result)) = rx.recv().await {
return diagnostics_result;
}
bail!("Plugin host has closed")
}
pub fn serialize_ast(
&self,
parsed_source: &ParsedSource,
utf16_map: &Utf16Map,
) -> Result<Vec<u8>, AnyError> {
let start = std::time::Instant::now();
let r = serialize_ast_to_buffer(parsed_source, utf16_map);
log::debug!(
"Serializing an AST took {:?}",
std::time::Instant::now() - start
);
Ok(r)
}
}
pub async fn create_runner_and_load_plugins(
plugin_specifiers: Vec<ModuleSpecifier>,
logger: PluginLogger,
exclude: Option<Vec<String>>,
) -> Result<PluginHostProxy, AnyError> {
let host_proxy = PluginHost::create(logger)?;
host_proxy.load_plugins(plugin_specifiers, exclude).await?;
Ok(host_proxy)
}
pub async fn run_rules_for_ast(
host_proxy: &PluginHostProxy,
specifier: &Path,
serialized_ast: Vec<u8>,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
) -> Result<Vec<LintDiagnostic>, AnyError> {
let d = host_proxy
.run_rules(
specifier,
serialized_ast,
source_text_info,
utf16_map,
maybe_token,
)
.await?;
Ok(d)
}

View file

@ -2,9 +2,11 @@
use deno_ast::diagnostics::Diagnostic;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::serde_json;
use deno_lint::diagnostic::LintDiagnostic;
use deno_runtime::colors;
use deno_runtime::fmt_errors::format_js_error;
use log::info;
use serde::Serialize;
@ -53,7 +55,19 @@ impl LintReporter for PrettyLintReporter {
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
log::error!(" {err}");
let text =
if let Some(CoreError::Js(js_error)) = err.downcast_ref::<CoreError>() {
format_js_error(js_error)
} else {
format!("{err:#}")
};
for line in text.split('\n') {
if line.is_empty() {
log::error!("");
} else {
log::error!(" {}", line);
}
}
}
fn close(&mut self, check_count: usize) {

View file

@ -122,16 +122,16 @@ impl CliLintRule {
#[derive(Debug)]
pub struct ConfiguredRules {
pub all_rule_codes: HashSet<&'static str>,
pub all_rule_codes: HashSet<Cow<'static, str>>,
pub rules: Vec<CliLintRule>,
}
impl ConfiguredRules {
pub fn incremental_cache_state(&self) -> Option<impl std::hash::Hash> {
if self.rules.iter().any(|r| !r.supports_incremental_cache()) {
return None;
}
pub fn supports_incremental_cache(&self) -> bool {
self.rules.iter().all(|r| r.supports_incremental_cache())
}
pub fn incremental_cache_state(&self) -> impl std::hash::Hash {
// use a hash of the rule names in order to bust the cache
let mut codes = self.rules.iter().map(|r| r.code()).collect::<Vec<_>>();
// ensure this is stable by sorting it
@ -195,7 +195,7 @@ impl LintRuleProvider {
let all_rules = self.all_rules();
let mut all_rule_names = HashSet::with_capacity(all_rules.len());
for rule in &all_rules {
all_rule_names.insert(rule.code());
all_rule_names.insert(rule.code().into());
}
let rules = filtered_rules(
all_rules.into_iter(),

View file

@ -626,7 +626,7 @@ async fn configure_main_worker(
permissions_container,
vec![
ops::testing::deno_test::init_ops(worker_sender.sender),
ops::lint::deno_lint::init_ops(),
ops::lint::deno_lint_ext_for_test::init_ops(),
],
Stdio {
stdin: StdioPipe::inherit(),

View file

@ -1344,6 +1344,139 @@ declare namespace Deno {
export {}; // only export exports
}
/**
* @category Linter
* @experimental
*/
export namespace lint {
/**
* @category Linter
* @experimental
*/
export type Range = [number, number];
/**
* @category Linter
* @experimental
*/
export interface Node {
type: string;
range: Range;
[key: string]: unknown;
}
/**
* @category Linter
* @experimental
*/
export interface FixData {
range: Range;
text?: string;
}
/**
* @category Linter
* @experimental
*/
export interface Fixer {
insertTextAfter(node: Node, text: string): FixData;
insertTextAfterRange(range: Range, text: string): FixData;
insertTextBefore(node: Node, text: string): FixData;
insertTextBeforeRange(range: Range, text: string): FixData;
remove(node: Node): FixData;
removeRange(range: Range): FixData;
replaceText(node: Node, text: string): FixData;
replaceTextRange(range: Range, text: string): FixData;
}
/**
* @category Linter
* @experimental
*/
export interface ReportData {
node?: Node;
range?: Range;
message: string;
hint?: string;
fix?(fixer: Fixer): FixData;
}
/**
* @category Linter
* @experimental
*/
export interface RuleContext {
id: string;
report(data: ReportData): void;
}
/**
* @category Linter
* @experimental
*/
export interface Rule {
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
destroy?(ctx: RuleContext): void;
}
/**
* In your plugins file do something like
*
* ```ts
* export default {
* name: "my-plugin",
* rules: {
* "no-foo": {
* create(ctx) {
* return {
* VariableDeclaration(node) {}
* }
* }
* }
* }
* } satisfies Deno.lint.Plugin
* ```
* @category Linter
* @experimental
*/
export interface Plugin {
name: string;
rules: Record<string, Rule>;
}
/**
* @category Linter
* @experimental
*/
export interface Fix {
range: Range;
text?: string;
}
/**
* @category Linter
* @experimental
*/
export interface Diagnostic {
id: string;
message: string;
hint?: string;
range: Range;
fix?: Fix;
}
/**
* This API is a noop in `deno run`...
* @category Linter
* @experimental
*/
export function runPlugin(
plugin: Plugin,
fileName: string,
source: string,
): Diagnostic[];
}
export {}; // only export exports
}

View file

@ -230,6 +230,52 @@ impl Utf16Map {
column_index: col.into(),
}
}
/// Convert a UTF-16 byte offset to UTF-8 byte offset
pub fn utf16_to_utf8_offset(
&self,
utf16_offset: TextSize,
) -> Option<TextSize> {
if utf16_offset > self.text_content_length_utf16() {
return None;
}
let pos = self.position_utf16(utf16_offset);
let line_start_utf8 = self.utf8_offsets[pos.line_index];
let col_utf8 =
self.utf16_to_utf8_col(pos.line_index as u32, pos.column_index as u32);
Some(line_start_utf8 + col_utf8)
}
/// Convert a UTF-8 byte offset to UTF-16 byte offset
pub fn utf8_to_utf16_offset(
&self,
utf8_offset: TextSize,
) -> Option<TextSize> {
if utf8_offset > *self.utf8_offsets.last()? {
return None;
}
let line = partition_point(&self.utf8_offsets, |&it| it <= utf8_offset) - 1;
let line_start_utf8 = self.utf8_offsets[line];
let col_utf8 = utf8_offset - line_start_utf8;
let col_utf16 = self.utf8_to_utf16_col(line as u32, col_utf8);
Some(self.utf16_offsets[line] + TextSize::from(col_utf16))
}
fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> u32 {
let mut utf16_col = u32::from(col);
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {
if col > c.start {
utf16_col -= u32::from(c.len()) - c.len_utf16() as u32;
} else {
break;
}
}
}
utf16_col
}
}
fn partition_point<T, P>(slice: &[T], mut predicate: P) -> usize
@ -490,4 +536,47 @@ const C: char = \"メ メ\";
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
}
#[test]
fn test_offset_out_of_range() {
let text = "hello";
let map = Utf16Map::new(text);
assert_eq!(map.utf8_to_utf16_offset(TextSize::from(10)), None);
assert_eq!(map.utf16_to_utf8_offset(TextSize::from(10)), None);
}
#[test]
fn test_offset_basic_ascii() {
let text = "hello\nworld";
let map = Utf16Map::new(text);
let utf8_offset = TextSize::from(7);
let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap();
assert_eq!(utf16_offset, TextSize::from(7));
let result = map.utf16_to_utf8_offset(utf16_offset).unwrap();
assert_eq!(result, utf8_offset);
}
#[test]
fn test_offset_emoji() {
let text = "hi 👋\nbye";
let map = Utf16Map::new(text);
let utf8_offset = TextSize::from(3);
let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap();
assert_eq!(utf16_offset, TextSize::from(3));
let utf8_offset_after = TextSize::from(7);
let utf16_offset_after =
map.utf8_to_utf16_offset(utf8_offset_after).unwrap();
assert_eq!(utf16_offset_after, TextSize::from(5));
for (utf8_offset, _) in text.char_indices() {
let utf8_offset = TextSize::from(utf8_offset as u32);
let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap();
let reverse_ut8_offset = map.utf16_to_utf8_offset(utf16_offset).unwrap();
assert_eq!(reverse_ut8_offset, utf8_offset);
}
}
}

View file

@ -94,6 +94,7 @@ import { bootstrap as bootstrapOtel } from "ext:deno_telemetry/telemetry.ts";
if (Symbol.metadata) {
throw "V8 supports Symbol.metadata now, no need to shim it";
}
ObjectDefineProperties(Symbol, {
dispose: {
__proto__: null,
@ -533,7 +534,10 @@ const NOT_IMPORTED_OPS = [
"op_base64_encode",
// Used in the lint API
"op_lint_report",
"op_lint_get_source",
"op_lint_create_serialized_ast",
"op_is_cancelled",
// Related to `Deno.test()` API
"op_test_event_step_result_failed",
@ -575,11 +579,14 @@ const finalDenoNs = {
internal: internalSymbol,
[internalSymbol]: internals,
...denoNs,
// Deno.test and Deno.bench are noops here, but kept for compatibility; so
// that they don't cause errors when used outside of `deno test`/`deno bench`
// Deno.test, Deno.bench, Deno.lint are noops here, but kept for compatibility; so
// that they don't cause errors when used outside of `deno test`/`deno bench`/`deno lint`
// contexts.
test: () => {},
bench: () => {},
lint: {
runPlugin: () => {},
},
};
ObjectDefineProperties(finalDenoNs, {

View file

@ -165,10 +165,14 @@ fn js_unit_test(test: String) {
let mut deno = deno
.arg("-A")
.arg(util::tests_path().join("unit").join(format!("{test}.ts")))
.piped_output()
.spawn()
.expect("failed to spawn script");
.arg(util::tests_path().join("unit").join(format!("{test}.ts")));
// update the snapshots if when `UPDATE=1`
if std::env::var_os("UPDATE") == Some("1".into()) {
deno = deno.arg("--").arg("--update");
}
let mut deno = deno.piped_output().spawn().expect("failed to spawn script");
let now = Instant::now();
let stdout = deno.stdout.take().unwrap();

View file

@ -0,0 +1,17 @@
{
"steps": [
{
"args": "lint a.ts",
"output": "lint.out",
"exitCode": 1
},
{
"args": "lint -c deno_exclude.json a.ts",
"output": "lint_exclude.out"
},
{
"args": "lint --fix a.ts",
"output": "lint_fixed.out"
}
]
}

View file

@ -0,0 +1 @@
const _a = "foo";

View file

@ -0,0 +1,5 @@
{
"lint": {
"plugins": ["./plugin.ts"]
}
}

View file

@ -0,0 +1,10 @@
{
"lint": {
"plugins": ["./plugin.ts"],
"rules": {
"exclude": [
"test-plugin/my-rule"
]
}
}
}

View file

@ -0,0 +1,2 @@
[WILDCARD]Found 1 problem (1 fixable via --fix)
Checked 1 file

View file

@ -0,0 +1 @@
Checked 1 file

View file

@ -0,0 +1 @@
Checked 1 file

View file

@ -0,0 +1,22 @@
export default {
name: "test-plugin",
rules: {
"my-rule": {
create(context) {
return {
Identifier(node) {
if (node.name === "_a") {
context.report({
node,
message: "should be _b",
fix(fixer) {
return fixer.replaceText(node, "_b");
},
});
}
},
};
},
},
},
};

View file

@ -0,0 +1,6 @@
{
"tempDir": true,
"args": "lint --fix",
"output": "fix.out",
"exitCode": 1
}

View file

@ -0,0 +1,5 @@
{
"lint": {
"plugins": ["./plugin.ts"]
}
}

View file

@ -0,0 +1,11 @@
Error linting: [WILDLINE]main.ts
The 'test-plugin/my-rule' rule caused a syntax error applying 'Fix this test-plugin/my-rule problem'.
Range: [14, 18]
Text: "garbage test test"
: Expected a semicolon at file:///[WILDLINE]/main.ts:1:23
const value = garbage test test;
~~~~
Checked 2 files

View file

@ -0,0 +1,2 @@
const value = "𝄞";
console.log(value);

View file

@ -0,0 +1,20 @@
export default {
name: "test-plugin",
rules: {
"my-rule": {
create(context) {
return {
VariableDeclarator(node) {
context.report({
node: node.init,
message: 'should be equal to string "1"',
fix(fixer) {
return fixer.replaceText(node.init, "garbage test test");
},
});
},
};
},
},
},
};

View file

@ -0,0 +1,6 @@
{
"tempDir": true,
"args": "lint --fix",
"output": "fix.out",
"exitCode": 1
}

View file

@ -0,0 +1,5 @@
{
"lint": {
"plugins": ["./plugin.ts"]
}
}

View file

@ -0,0 +1,12 @@
Reached maximum number of fix iterations for 'file:///[WILDLINE]/main.ts'. There's probably a bug in the lint rule. Please fix this file manually.
error[test-plugin/my-rule]: should be equal to string "1"
--> [WILDLINE]main.ts:1:15
|
1 | const value = [WILDLINE];
| [WILDLINE]
docs: https://docs.deno.com/lint/rules/test-plugin/my-rule
Found 1 problem (1 fixable via --fix)
Checked 2 files

View file

@ -0,0 +1,2 @@
const value = "𝄞";
console.log(value);

View file

@ -0,0 +1,20 @@
export default {
name: "test-plugin",
rules: {
"my-rule": {
create(context) {
return {
VariableDeclarator(node) {
context.report({
node: node.init,
message: 'should be equal to string "1"',
fix(fixer) {
return fixer.replaceText(node.init, Date.now().toString());
},
});
},
};
},
},
},
};

View file

@ -0,0 +1,22 @@
{
"tests": {
"lint": {
"args": "lint",
"output": "lint.out",
"exitCode": 1
},
"fix": {
"tempDir": true,
"steps": [{
"args": "lint --fix",
"output": "fix.out"
}, {
"args": [
"eval",
"console.log(Deno.readTextFileSync('main.ts').trim())"
],
"output": "fixed.out"
}]
}
}
}

View file

@ -0,0 +1,5 @@
{
"lint": {
"plugins": ["./plugin.ts"]
}
}

View file

@ -0,0 +1 @@
Checked 2 files

View file

@ -0,0 +1,2 @@
const value = "1";
console.log(value);

View file

@ -0,0 +1,11 @@
error[test-plugin/my-rule]: should be equal to string "1"
--> [WILDLINE]main.ts:1:15
|
1 | const value = "𝄞";
| ^^^
docs: https://docs.deno.com/lint/rules/test-plugin/my-rule
Found 1 problem (1 fixable via --fix)
Checked 2 files

View file

@ -0,0 +1,2 @@
const value = "𝄞";
console.log(value);

View file

@ -0,0 +1,22 @@
export default {
name: "test-plugin",
rules: {
"my-rule": {
create(context) {
return {
VariableDeclarator(node) {
if (node.init.type !== "Literal" || node.init.value !== "1") {
context.report({
node: node.init,
message: 'should be equal to string "1"',
fix(fixer) {
return fixer.replaceText(node.init, '"1"');
},
});
}
},
};
},
},
},
};

File diff suppressed because it is too large Load diff

View file

@ -3,45 +3,14 @@
import { assertEquals } from "./test_util.ts";
import { assertSnapshot } from "@std/testing/snapshot";
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintReportData {
// deno-lint-ignore no-explicit-any
node: any;
message: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
interface LintContext {
id: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
// deno-lint-ignore no-explicit-any
type LintVisitor = Record<string, (node: any) => void>;
// TODO(@marvinhagemeister) Remove once we land "official" types
interface LintRule {
create(ctx: LintContext): LintVisitor;
destroy?(): void;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
interface LintPlugin {
name: string;
rules: Record<string, LintRule>;
}
function runLintPlugin(plugin: LintPlugin, fileName: string, source: string) {
// deno-lint-ignore no-explicit-any
return (Deno as any)[(Deno as any).internal].runLintPlugin(
plugin,
fileName,
source,
);
}
function testPlugin(
source: string,
rule: LintRule,
) {
rule: Deno.lint.Rule,
): Deno.lint.Diagnostic[] {
const plugin = {
name: "test-plugin",
rules: {
@ -49,7 +18,11 @@ function testPlugin(
},
};
return runLintPlugin(plugin, "source.tsx", source);
return Deno.lint.runPlugin(
plugin,
"source.tsx",
source,
);
}
interface VisitResult {

View file

@ -1,6 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
const EXPECTED_OP_COUNT = 13;
const EXPECTED_OP_COUNT = 14;
Deno.test(function checkExposedOps() {
// @ts-ignore TS doesn't allow to index with symbol