mirror of
https://github.com/denoland/deno.git
synced 2025-03-03 17:34:47 -05:00
chore: update to rust 1.85 (#28236)
Updates to use rust 1.85. Doesn't move to the 2024 edition, as that's a fair bit more involved. A nice side benefit is that the new rustc version seems to lead to a slight reduction in binary size (at least on mac): ``` FILE SIZE -------------- +4.3% +102Ki __DATA_CONST,__const [NEW] +69.3Ki __TEXT,__literals [NEW] +68.5Ki Rebase Info +5.0% +39.9Ki __TEXT,__unwind_info +57% +8.85Ki [__TEXT] [NEW] +8.59Ki Lazy Binding Info [NEW] +5.16Ki __TEXT,__stub_helper [NEW] +3.58Ki Export Info [NEW] +3.42Ki __DATA,__la_symbol_ptr -0.1% -726 [12 Others] -21.4% -3.10Ki [__DATA_CONST] -95.8% -3.39Ki __DATA_CONST,__got -20.9% -3.43Ki [__DATA] -0.5% -4.52Ki Code Signature -100.0% -11.6Ki [__LINKEDIT] -1.0% -43.5Ki Symbol Table -1.6% -44.0Ki __TEXT,__gcc_except_tab -0.2% -48.1Ki __TEXT,__const -3.3% -78.6Ki __TEXT,__eh_frame -0.7% -320Ki __TEXT,__text -1.5% -334Ki String Table -0.5% -586Ki TOTAL ```
This commit is contained in:
parent
e66ef32a8f
commit
ee4c14a550
66 changed files with 158 additions and 176 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -1,6 +1,6 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
|
|
|
@ -1428,7 +1428,7 @@ fn enable_unstable(command: Command) -> Command {
|
|||
fn enable_full(command: Command) -> Command {
|
||||
command.mut_args(|arg| {
|
||||
let long_help = arg.get_long_help();
|
||||
if !long_help.is_some_and(|s| s.to_string() == "false") {
|
||||
if long_help.is_none_or(|s| s.to_string() != "false") {
|
||||
arg.hide(false)
|
||||
} else {
|
||||
arg
|
||||
|
|
|
@ -46,7 +46,7 @@ pub struct Guard<'a, T> {
|
|||
guard: MutexGuard<'a, T>,
|
||||
}
|
||||
|
||||
impl<'a, T> std::ops::Deref for Guard<'a, T> {
|
||||
impl<T> std::ops::Deref for Guard<'_, T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
|
@ -54,7 +54,7 @@ impl<'a, T> std::ops::Deref for Guard<'a, T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
|
||||
impl<T> std::ops::DerefMut for Guard<'_, T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.guard
|
||||
}
|
||||
|
|
|
@ -188,7 +188,7 @@ fn main() {
|
|||
let target = env::var("TARGET").unwrap();
|
||||
let host = env::var("HOST").unwrap();
|
||||
let skip_cross_check =
|
||||
env::var("DENO_SKIP_CROSS_BUILD_CHECK").map_or(false, |v| v == "1");
|
||||
env::var("DENO_SKIP_CROSS_BUILD_CHECK").is_ok_and(|v| v == "1");
|
||||
if !skip_cross_check && target != host {
|
||||
panic!("Cross compiling with snapshot is not supported.");
|
||||
}
|
||||
|
|
4
cli/cache/module_info.rs
vendored
4
cli/cache/module_info.rs
vendored
|
@ -141,7 +141,7 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> {
|
|||
parsed_source_cache: &'a Arc<ParsedSourceCache>,
|
||||
}
|
||||
|
||||
impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
|
||||
impl ModuleInfoCacheModuleAnalyzer<'_> {
|
||||
fn load_cached_module_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
@ -220,7 +220,7 @@ impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
|
|||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
|
||||
impl deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'_> {
|
||||
async fn analyze(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -332,7 +332,7 @@ pub enum EmitParsedSourceHelperError {
|
|||
/// Helper to share code between async and sync emit_parsed_source methods.
|
||||
struct EmitParsedSourceHelper<'a>(&'a Emitter);
|
||||
|
||||
impl<'a> EmitParsedSourceHelper<'a> {
|
||||
impl EmitParsedSourceHelper<'_> {
|
||||
pub fn pre_emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -159,7 +159,7 @@ pub struct MainModuleGraphUpdatePermit<'a> {
|
|||
graph: ModuleGraph,
|
||||
}
|
||||
|
||||
impl<'a> ModuleGraphUpdatePermit for MainModuleGraphUpdatePermit<'a> {
|
||||
impl ModuleGraphUpdatePermit for MainModuleGraphUpdatePermit<'_> {
|
||||
fn graph_mut(&mut self) -> &mut ModuleGraph {
|
||||
&mut self.graph
|
||||
}
|
||||
|
|
|
@ -652,17 +652,17 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn build_graph_with_npm_resolution<'a>(
|
||||
pub async fn build_graph_with_npm_resolution(
|
||||
&self,
|
||||
graph: &mut ModuleGraph,
|
||||
options: CreateGraphOptions<'a>,
|
||||
options: CreateGraphOptions<'_>,
|
||||
) -> Result<(), BuildGraphWithNpmResolutionError> {
|
||||
enum MutLoaderRef<'a> {
|
||||
Borrowed(&'a mut dyn Loader),
|
||||
Owned(cache::FetchCacher),
|
||||
}
|
||||
|
||||
impl<'a> MutLoaderRef<'a> {
|
||||
impl MutLoaderRef<'_> {
|
||||
pub fn as_mut_loader(&mut self) -> &mut dyn Loader {
|
||||
match self {
|
||||
Self::Borrowed(loader) => *loader,
|
||||
|
@ -673,7 +673,7 @@ impl ModuleGraphBuilder {
|
|||
|
||||
struct LockfileLocker<'a>(&'a CliLockfile);
|
||||
|
||||
impl<'a> deno_graph::source::Locker for LockfileLocker<'a> {
|
||||
impl deno_graph::source::Locker for LockfileLocker<'_> {
|
||||
fn get_remote_checksum(
|
||||
&self,
|
||||
specifier: &deno_ast::ModuleSpecifier,
|
||||
|
@ -1357,7 +1357,7 @@ struct CliGraphResolver<'a> {
|
|||
BTreeMap<Arc<ModuleSpecifier>, Option<JsxImportSourceConfig>>,
|
||||
}
|
||||
|
||||
impl<'a> CliGraphResolver<'a> {
|
||||
impl CliGraphResolver<'_> {
|
||||
fn resolve_jsx_import_source_config(
|
||||
&self,
|
||||
referrer: &ModuleSpecifier,
|
||||
|
@ -1371,7 +1371,7 @@ impl<'a> CliGraphResolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
|
||||
impl deno_graph::source::Resolver for CliGraphResolver<'_> {
|
||||
fn default_jsx_import_source(
|
||||
&self,
|
||||
referrer: &ModuleSpecifier,
|
||||
|
|
|
@ -1699,7 +1699,7 @@ pub struct OpenDocumentsGraphLoader<'a> {
|
|||
pub open_docs: &'a HashMap<ModuleSpecifier, Arc<Document>>,
|
||||
}
|
||||
|
||||
impl<'a> OpenDocumentsGraphLoader<'a> {
|
||||
impl OpenDocumentsGraphLoader<'_> {
|
||||
fn load_from_docs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
@ -1720,7 +1720,7 @@ impl<'a> OpenDocumentsGraphLoader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
|
||||
impl deno_graph::source::Loader for OpenDocumentsGraphLoader<'_> {
|
||||
fn load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -1457,6 +1457,7 @@ impl Inner {
|
|||
.options
|
||||
.clone();
|
||||
let config_data = self.config.tree.data_for_specifier(&specifier);
|
||||
#[allow(clippy::nonminimal_bool)] // clippy's suggestion is more confusing
|
||||
if !config_data.is_some_and(|d| d.maybe_deno_json().is_some()) {
|
||||
fmt_options.use_tabs = Some(!params.options.insert_spaces);
|
||||
fmt_options.indent_width = Some(params.options.tab_size as u8);
|
||||
|
|
|
@ -969,7 +969,7 @@ pub struct SingleReferrerGraphResolver<'a> {
|
|||
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
|
||||
impl deno_graph::source::Resolver for SingleReferrerGraphResolver<'_> {
|
||||
fn default_jsx_import_source(
|
||||
&self,
|
||||
_referrer: &ModuleSpecifier,
|
||||
|
|
|
@ -4365,10 +4365,7 @@ impl TscSpecifierMap {
|
|||
let specifier_str = original
|
||||
.replace(".d.ts.d.ts", ".d.ts")
|
||||
.replace("$node_modules", "node_modules");
|
||||
let specifier = match ModuleSpecifier::parse(&specifier_str) {
|
||||
Ok(s) => s,
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
let specifier = ModuleSpecifier::parse(&specifier_str)?;
|
||||
if specifier.as_str() != original {
|
||||
self
|
||||
.denormalized_specifiers
|
||||
|
|
|
@ -421,7 +421,7 @@ pub enum EntrySetupOutcome<'a> {
|
|||
Success,
|
||||
}
|
||||
|
||||
impl<'a> EntrySetupOutcome<'a> {
|
||||
impl EntrySetupOutcome<'_> {
|
||||
pub fn warn_if_failed(&self) {
|
||||
match self {
|
||||
EntrySetupOutcome::MissingEntrypoint {
|
||||
|
|
|
@ -137,8 +137,8 @@ impl<'a> GlobalLifecycleScripts<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
|
||||
for GlobalLifecycleScripts<'a>
|
||||
impl super::common::lifecycle_scripts::LifecycleScriptsStrategy
|
||||
for GlobalLifecycleScripts<'_>
|
||||
{
|
||||
fn can_run_scripts(&self) -> bool {
|
||||
false
|
||||
|
|
|
@ -697,7 +697,7 @@ struct LocalLifecycleScripts<'a> {
|
|||
deno_local_registry_dir: &'a Path,
|
||||
}
|
||||
|
||||
impl<'a> LocalLifecycleScripts<'a> {
|
||||
impl LocalLifecycleScripts<'_> {
|
||||
/// `node_modules/.deno/<package>/.scripts-run`
|
||||
fn ran_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
local_node_modules_package_folder(self.deno_local_registry_dir, package)
|
||||
|
@ -711,8 +711,8 @@ impl<'a> LocalLifecycleScripts<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
|
||||
for LocalLifecycleScripts<'a>
|
||||
impl super::common::lifecycle_scripts::LifecycleScriptsStrategy
|
||||
for LocalLifecycleScripts<'_>
|
||||
{
|
||||
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
local_node_modules_package_contents_path(
|
||||
|
@ -784,7 +784,7 @@ struct SetupCacheDep<'a> {
|
|||
current: &'a mut BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl<'a> SetupCacheDep<'a> {
|
||||
impl SetupCacheDep<'_> {
|
||||
pub fn insert(&mut self, name: &str, target_folder_name: &str) -> bool {
|
||||
self
|
||||
.current
|
||||
|
|
|
@ -137,10 +137,10 @@ impl NpmInstaller {
|
|||
.dependencies_result
|
||||
}
|
||||
|
||||
pub async fn add_package_reqs_raw<'a>(
|
||||
pub async fn add_package_reqs_raw(
|
||||
&self,
|
||||
packages: &[PackageReq],
|
||||
caching: Option<PackageCaching<'a>>,
|
||||
caching: Option<PackageCaching<'_>>,
|
||||
) -> AddPkgReqsResult {
|
||||
if packages.is_empty() {
|
||||
return AddPkgReqsResult {
|
||||
|
|
|
@ -506,6 +506,7 @@ fn deserialize_npm_snapshot(
|
|||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn parse_root_package<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
|
||||
|
@ -519,6 +520,7 @@ fn deserialize_npm_snapshot(
|
|||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn parse_package_dep<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
|
||||
|
|
|
@ -79,7 +79,7 @@ pub enum StandaloneRelativeFileBaseUrl<'a> {
|
|||
Path(&'a Url),
|
||||
}
|
||||
|
||||
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
|
||||
impl StandaloneRelativeFileBaseUrl<'_> {
|
||||
/// Gets the module map key of the provided specifier.
|
||||
///
|
||||
/// * Descendant file specifiers will be made relative to the base.
|
||||
|
|
|
@ -69,7 +69,7 @@ fn vfs_as_display_tree(
|
|||
Symlink(&'a VirtualSymlinkParts),
|
||||
}
|
||||
|
||||
impl<'a> EntryOutput<'a> {
|
||||
impl EntryOutput<'_> {
|
||||
pub fn size(&self) -> Size {
|
||||
match self {
|
||||
EntryOutput::All(size) => *size,
|
||||
|
@ -85,7 +85,7 @@ fn vfs_as_display_tree(
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> EntryOutput<'a> {
|
||||
impl EntryOutput<'_> {
|
||||
pub fn as_display_tree(&self, name: String) -> DisplayTreeNode {
|
||||
fn format_size(size: Size) -> String {
|
||||
if size.unique == size.total {
|
||||
|
@ -136,7 +136,7 @@ fn vfs_as_display_tree(
|
|||
output: EntryOutput<'a>,
|
||||
}
|
||||
|
||||
impl<'a> DirEntryOutput<'a> {
|
||||
impl DirEntryOutput<'_> {
|
||||
/// Collapses leaf nodes so they don't take up so much space when being
|
||||
/// displayed.
|
||||
///
|
||||
|
|
|
@ -110,7 +110,7 @@ pub mod cpu {
|
|||
|
||||
sysctl.arg("-n");
|
||||
sysctl.arg("machdep.cpu.brand_string");
|
||||
return std::str::from_utf8(
|
||||
std::str::from_utf8(
|
||||
&sysctl
|
||||
.output()
|
||||
.map(|x| x.stdout)
|
||||
|
@ -118,7 +118,7 @@ pub mod cpu {
|
|||
)
|
||||
.unwrap()
|
||||
.trim()
|
||||
.to_string();
|
||||
.to_string()
|
||||
}
|
||||
|
||||
pub fn windows() -> String {
|
||||
|
@ -128,14 +128,14 @@ pub mod cpu {
|
|||
wmi.arg("get");
|
||||
wmi.arg("name");
|
||||
|
||||
return match wmi.output() {
|
||||
match wmi.output() {
|
||||
Err(_) => String::from("unknown"),
|
||||
|
||||
Ok(x) => {
|
||||
let x = String::from_utf8_lossy(&x.stdout);
|
||||
return x.lines().nth(1).unwrap_or("unknown").trim().to_string();
|
||||
x.lines().nth(1).unwrap_or("unknown").trim().to_string()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn linux() -> String {
|
||||
|
|
|
@ -157,7 +157,9 @@ struct StartEvent<'a> {
|
|||
trees: Vec<(usize, &'a mut RangeTree<'a>)>,
|
||||
}
|
||||
|
||||
fn into_start_events<'a>(trees: Vec<&'a mut RangeTree<'a>>) -> Vec<StartEvent> {
|
||||
fn into_start_events<'a>(
|
||||
trees: Vec<&'a mut RangeTree<'a>>,
|
||||
) -> Vec<StartEvent<'a>> {
|
||||
let mut result: BTreeMap<usize, Vec<(usize, &'a mut RangeTree<'a>)>> =
|
||||
BTreeMap::new();
|
||||
for (parent_index, tree) in trees.into_iter().enumerate() {
|
||||
|
|
|
@ -583,9 +583,8 @@ pub fn cover_files(
|
|||
cli_options.initial_cwd(),
|
||||
)?;
|
||||
|
||||
let maybe_file_result = file_fetcher
|
||||
.get_cached_source_or_local(&module_specifier)
|
||||
.map_err(AnyError::from);
|
||||
let maybe_file_result =
|
||||
file_fetcher.get_cached_source_or_local(&module_specifier);
|
||||
let file = match maybe_file_result {
|
||||
Ok(Some(file)) => TextDecodedFile::decode(file)?,
|
||||
Ok(None) => return Err(anyhow!("{}", get_message(&module_specifier))),
|
||||
|
|
|
@ -35,7 +35,7 @@ pub struct RangeTree<'a> {
|
|||
pub children: Vec<&'a mut RangeTree<'a>>,
|
||||
}
|
||||
|
||||
impl<'rt> RangeTree<'rt> {
|
||||
impl RangeTree<'_> {
|
||||
pub fn new<'a>(
|
||||
start: usize,
|
||||
end: usize,
|
||||
|
|
|
@ -51,7 +51,7 @@ pub trait CoverageReporter {
|
|||
fn collect_summary<'a>(
|
||||
&'a self,
|
||||
file_reports: &'a [(CoverageReport, String)],
|
||||
) -> CoverageSummary {
|
||||
) -> CoverageSummary<'a> {
|
||||
let urls = file_reports.iter().map(|rep| &rep.0.url).collect();
|
||||
let root = match util::find_root(urls)
|
||||
.and_then(|root_path| root_path.to_file_path().ok())
|
||||
|
|
|
@ -609,9 +609,7 @@ fn lint_stdin(
|
|||
maybe_plugin_runner: None,
|
||||
});
|
||||
|
||||
let r = linter
|
||||
.lint_file(&file_path, deno_ast::strip_bom(source_code), None)
|
||||
.map_err(AnyError::from);
|
||||
let r = linter.lint_file(&file_path, deno_ast::strip_bom(source_code), None);
|
||||
|
||||
let success =
|
||||
handle_lint_result(&file_path.to_string_lossy(), r, reporter_lock.clone());
|
||||
|
|
|
@ -174,7 +174,7 @@ struct SloppyImportCaptureResolver<'a> {
|
|||
>,
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
|
||||
impl deno_graph::source::Resolver for SloppyImportCaptureResolver<'_> {
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier_text: &str,
|
||||
|
|
|
@ -85,14 +85,14 @@ impl DepLocation {
|
|||
|
||||
struct DebugAdapter<T>(T);
|
||||
|
||||
impl<'a> std::fmt::Debug for DebugAdapter<&'a ConfigFileRc> {
|
||||
impl std::fmt::Debug for DebugAdapter<&ConfigFileRc> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ConfigFile")
|
||||
.field("specifier", &self.0.specifier)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Debug for DebugAdapter<&'a PackageJsonRc> {
|
||||
impl std::fmt::Debug for DebugAdapter<&PackageJsonRc> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("PackageJson")
|
||||
.field("path", &self.0.path)
|
||||
|
|
|
@ -593,7 +593,7 @@ trait PackageInfoProvider {
|
|||
/// The help to return if a package is found by this provider
|
||||
const HELP: NotFoundHelp;
|
||||
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv>;
|
||||
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version>;
|
||||
async fn latest_version(&self, req: &PackageReq) -> Option<Version>;
|
||||
}
|
||||
|
||||
impl PackageInfoProvider for Arc<JsrFetchResolver> {
|
||||
|
@ -603,7 +603,7 @@ impl PackageInfoProvider for Arc<JsrFetchResolver> {
|
|||
(**self).req_to_nv(req).await
|
||||
}
|
||||
|
||||
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
|
||||
async fn latest_version(&self, req: &PackageReq) -> Option<Version> {
|
||||
let info = self.package_info(&req.name).await?;
|
||||
best_version(
|
||||
info
|
||||
|
@ -623,7 +623,7 @@ impl PackageInfoProvider for Arc<NpmFetchResolver> {
|
|||
(**self).req_to_nv(req).await
|
||||
}
|
||||
|
||||
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
|
||||
async fn latest_version(&self, req: &PackageReq) -> Option<Version> {
|
||||
let info = self.package_info(&req.name).await?;
|
||||
best_version(info.versions.keys()).cloned()
|
||||
}
|
||||
|
|
|
@ -1168,7 +1168,7 @@ async fn check_if_git_repo_dirty(cwd: &Path) -> Option<String> {
|
|||
.stdout(Stdio::null())
|
||||
.status()
|
||||
.await
|
||||
.map_or(false, |status| status.success());
|
||||
.is_ok_and(|status| status.success());
|
||||
|
||||
if !git_exists {
|
||||
return None; // Git is not installed
|
||||
|
|
|
@ -112,7 +112,7 @@ impl crate::worker::HmrRunner for HmrRunner {
|
|||
continue;
|
||||
};
|
||||
|
||||
let filtered_paths: Vec<PathBuf> = changed_paths.into_iter().filter(|p| p.extension().map_or(false, |ext| {
|
||||
let filtered_paths: Vec<PathBuf> = changed_paths.into_iter().filter(|p| p.extension().is_some_and(|ext| {
|
||||
let ext_str = ext.to_str().unwrap();
|
||||
matches!(ext_str, "js" | "ts" | "jsx" | "tsx")
|
||||
})).collect();
|
||||
|
|
|
@ -368,7 +368,7 @@ pub struct DiagnosticsByFolderIterator<'a>(
|
|||
DiagnosticsByFolderIteratorInner<'a>,
|
||||
);
|
||||
|
||||
impl<'a> DiagnosticsByFolderIterator<'a> {
|
||||
impl DiagnosticsByFolderIterator<'_> {
|
||||
pub fn into_graph(self) -> Arc<ModuleGraph> {
|
||||
match self.0 {
|
||||
DiagnosticsByFolderIteratorInner::Empty(module_graph) => module_graph,
|
||||
|
@ -377,7 +377,7 @@ impl<'a> DiagnosticsByFolderIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DiagnosticsByFolderIterator<'a> {
|
||||
impl Iterator for DiagnosticsByFolderIterator<'_> {
|
||||
type Item = Result<Diagnostics, CheckError>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -409,7 +409,7 @@ struct DiagnosticsByFolderRealIterator<'a> {
|
|||
code_cache: Option<Arc<crate::cache::CodeCache>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DiagnosticsByFolderRealIterator<'a> {
|
||||
impl Iterator for DiagnosticsByFolderRealIterator<'_> {
|
||||
type Item = Result<Diagnostics, CheckError>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -610,7 +610,7 @@ struct Transform<'a> {
|
|||
wrap_kind: WrapKind,
|
||||
}
|
||||
|
||||
impl<'a> VisitMut for Transform<'a> {
|
||||
impl VisitMut for Transform<'_> {
|
||||
fn visit_mut_program(&mut self, node: &mut ast::Program) {
|
||||
let new_module_items = match node {
|
||||
ast::Program::Module(module) => {
|
||||
|
|
|
@ -348,11 +348,10 @@ struct LaxSingleProcessFsFlagInner {
|
|||
|
||||
impl Drop for LaxSingleProcessFsFlagInner {
|
||||
fn drop(&mut self) {
|
||||
use fs3::FileExt;
|
||||
// kill the poll thread
|
||||
self.finished_token.cancel();
|
||||
// release the file lock
|
||||
if let Err(err) = self.fs_file.unlock() {
|
||||
if let Err(err) = fs3::FileExt::unlock(&self.fs_file) {
|
||||
log::debug!(
|
||||
"Failed releasing lock for {}. {:#}",
|
||||
self.file_path.display(),
|
||||
|
|
|
@ -45,10 +45,10 @@ pub fn is_importable_ext(path: &Path) -> bool {
|
|||
|
||||
/// Get the extension of a file in lowercase.
|
||||
pub fn get_extension(file_path: &Path) -> Option<String> {
|
||||
return file_path
|
||||
file_path
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.map(|e| e.to_lowercase());
|
||||
.map(|e| e.to_lowercase())
|
||||
}
|
||||
|
||||
/// TypeScript figures out the type of file based on the extension, but we take
|
||||
|
|
|
@ -83,7 +83,7 @@ impl TaskQueue {
|
|||
/// A permit that when dropped will allow another task to proceed.
|
||||
pub struct TaskQueuePermit<'a>(&'a TaskQueue);
|
||||
|
||||
impl<'a> Drop for TaskQueuePermit<'a> {
|
||||
impl Drop for TaskQueuePermit<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.0.raise_next();
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ impl<'a> TaskQueuePermitAcquireFuture<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Drop for TaskQueuePermitAcquireFuture<'a> {
|
||||
impl Drop for TaskQueuePermitAcquireFuture<'_> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(task_queue) = self.task_queue.take() {
|
||||
if self.item.is_ready.is_raised() {
|
||||
|
|
|
@ -101,7 +101,7 @@ impl CliMainWorker {
|
|||
let result;
|
||||
select! {
|
||||
hmr_result = hmr_future => {
|
||||
result = hmr_result.map_err(Into::into);
|
||||
result = hmr_result;
|
||||
},
|
||||
event_loop_result = event_loop_future => {
|
||||
result = event_loop_result;
|
||||
|
|
|
@ -468,7 +468,7 @@ where
|
|||
let connecting = connector.call(proxy_dst);
|
||||
let tls = TlsConnector::from(self.tls.clone());
|
||||
Box::pin(async move {
|
||||
let mut io = connecting.await.map_err(Into::<BoxError>::into)?;
|
||||
let mut io = connecting.await?;
|
||||
|
||||
if is_https {
|
||||
tunnel(&mut io, &orig_dst, user_agent, auth).await?;
|
||||
|
|
|
@ -243,6 +243,7 @@ where
|
|||
let cstr =
|
||||
// SAFETY: Pointer and offset are user provided.
|
||||
unsafe { CStr::from_ptr(ptr.offset(offset) as *const c_char) }.to_bytes();
|
||||
#[allow(clippy::unnecessary_lazy_evaluations)]
|
||||
let value = v8::String::new_from_utf8(scope, cstr, v8::NewStringType::Normal)
|
||||
.ok_or_else(|| ReprError::CStringTooLong)?;
|
||||
Ok(value)
|
||||
|
|
|
@ -161,17 +161,17 @@ impl FileSystem for RealFs {
|
|||
}
|
||||
|
||||
fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||
stat(path).map(Into::into)
|
||||
stat(path)
|
||||
}
|
||||
async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||
spawn_blocking(move || stat(&path)).await?.map(Into::into)
|
||||
spawn_blocking(move || stat(&path)).await?
|
||||
}
|
||||
|
||||
fn lstat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||
lstat(path).map(Into::into)
|
||||
lstat(path)
|
||||
}
|
||||
async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||
spawn_blocking(move || lstat(&path)).await?.map(Into::into)
|
||||
spawn_blocking(move || lstat(&path)).await?
|
||||
}
|
||||
|
||||
fn exists_sync(&self, path: &Path) -> bool {
|
||||
|
@ -404,7 +404,6 @@ impl FileSystem for RealFs {
|
|||
Ok::<_, FsError>(Cow::Owned(buf))
|
||||
})
|
||||
.await?
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -515,7 +515,7 @@ impl HttpRecord {
|
|||
fn response_ready(&self) -> impl Future<Output = ()> + '_ {
|
||||
struct HttpRecordReady<'a>(&'a HttpRecord);
|
||||
|
||||
impl<'a> Future for HttpRecordReady<'a> {
|
||||
impl Future for HttpRecordReady<'_> {
|
||||
type Output = ();
|
||||
|
||||
fn poll(
|
||||
|
@ -540,7 +540,7 @@ impl HttpRecord {
|
|||
pub fn response_body_finished(&self) -> impl Future<Output = bool> + '_ {
|
||||
struct HttpRecordFinished<'a>(&'a HttpRecord);
|
||||
|
||||
impl<'a> Future for HttpRecordFinished<'a> {
|
||||
impl Future for HttpRecordFinished<'_> {
|
||||
type Output = bool;
|
||||
|
||||
fn poll(
|
||||
|
|
|
@ -238,7 +238,7 @@ mod tests {
|
|||
size: usize,
|
||||
expected: Result<ExpectedResponseAndHead, WebSocketUpgradeError>,
|
||||
) {
|
||||
let chunk_info = Some((s.as_bytes().len(), size));
|
||||
let chunk_info = Some((s.len(), size));
|
||||
let mut upgrade = WebSocketUpgrade::default();
|
||||
let mut result = Ok(None);
|
||||
for chunk in s.as_bytes().chunks(size) {
|
||||
|
|
|
@ -884,7 +884,7 @@ impl crate::fs::File for StdFileResourceInner {
|
|||
if exclusive {
|
||||
file.lock_exclusive()?;
|
||||
} else {
|
||||
file.lock_shared()?;
|
||||
fs3::FileExt::lock_shared(file)?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
@ -895,7 +895,7 @@ impl crate::fs::File for StdFileResourceInner {
|
|||
if exclusive {
|
||||
file.lock_exclusive()?;
|
||||
} else {
|
||||
file.lock_shared()?;
|
||||
fs3::FileExt::lock_shared(file)?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
@ -903,11 +903,11 @@ impl crate::fs::File for StdFileResourceInner {
|
|||
}
|
||||
|
||||
fn unlock_sync(self: Rc<Self>) -> FsResult<()> {
|
||||
self.with_sync(|file| Ok(file.unlock()?))
|
||||
self.with_sync(|file| Ok(fs3::FileExt::unlock(file)?))
|
||||
}
|
||||
async fn unlock_async(self: Rc<Self>) -> FsResult<()> {
|
||||
self
|
||||
.with_inner_blocking_task(|file| Ok(file.unlock()?))
|
||||
.with_inner_blocking_task(|file| Ok(fs3::FileExt::unlock(file)?))
|
||||
.await
|
||||
}
|
||||
|
||||
|
|
|
@ -316,7 +316,7 @@ impl Drop for NapiState {
|
|||
.env_cleanup_hooks
|
||||
.borrow()
|
||||
.iter()
|
||||
.any(|pair| pair.0 == hook.0 && pair.1 == hook.1)
|
||||
.any(|pair| std::ptr::fn_addr_eq(pair.0, hook.0) && pair.1 == hook.1)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
@ -326,10 +326,9 @@ impl Drop for NapiState {
|
|||
}
|
||||
|
||||
{
|
||||
self
|
||||
.env_cleanup_hooks
|
||||
.borrow_mut()
|
||||
.retain(|pair| !(pair.0 == hook.0 && pair.1 == hook.1));
|
||||
self.env_cleanup_hooks.borrow_mut().retain(|pair| {
|
||||
!(std::ptr::fn_addr_eq(pair.0, hook.0) && pair.1 == hook.1)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -473,7 +472,10 @@ impl Env {
|
|||
data: *mut c_void,
|
||||
) {
|
||||
let mut hooks = self.cleanup_hooks.borrow_mut();
|
||||
if hooks.iter().any(|pair| pair.0 == hook && pair.1 == data) {
|
||||
if hooks
|
||||
.iter()
|
||||
.any(|pair| std::ptr::fn_addr_eq(pair.0, hook) && pair.1 == data)
|
||||
{
|
||||
panic!("Cannot register cleanup hook with same data twice");
|
||||
}
|
||||
hooks.push((hook, data));
|
||||
|
@ -487,7 +489,7 @@ impl Env {
|
|||
let mut hooks = self.cleanup_hooks.borrow_mut();
|
||||
match hooks
|
||||
.iter()
|
||||
.rposition(|&pair| pair.0 == hook && pair.1 == data)
|
||||
.rposition(|&pair| std::ptr::fn_addr_eq(pair.0, hook) && pair.1 == data)
|
||||
{
|
||||
Some(index) => {
|
||||
hooks.remove(index);
|
||||
|
|
|
@ -210,7 +210,7 @@ impl<T> Nullable for Option<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'s> Nullable for napi_value<'s> {
|
||||
impl Nullable for napi_value<'_> {
|
||||
fn is_null(&self) -> bool {
|
||||
self.is_none()
|
||||
}
|
||||
|
|
|
@ -851,8 +851,8 @@ pub type NodeResolverRc<TInNpmPackageChecker, TNpmPackageFolderResolver, TSys> =
|
|||
deno_fs::sync::MaybeArc<
|
||||
NodeResolver<TInNpmPackageChecker, TNpmPackageFolderResolver, TSys>,
|
||||
>;
|
||||
#[allow(clippy::disallowed_types)]
|
||||
|
||||
#[allow(clippy::disallowed_types)]
|
||||
pub fn create_host_defined_options<'s>(
|
||||
scope: &mut v8::HandleScope<'s>,
|
||||
) -> v8::Local<'s, v8::Data> {
|
||||
|
|
|
@ -340,7 +340,7 @@ impl<'a> TryFrom<rsa::pkcs8::der::asn1::AnyRef<'a>> for RsaPssParameters<'a> {
|
|||
|
||||
fn try_from(
|
||||
any: rsa::pkcs8::der::asn1::AnyRef<'a>,
|
||||
) -> rsa::pkcs8::der::Result<RsaPssParameters> {
|
||||
) -> rsa::pkcs8::der::Result<RsaPssParameters<'a>> {
|
||||
any.sequence(|decoder| {
|
||||
let hash_algorithm = decoder
|
||||
.context_specific::<rsa::pkcs8::AlgorithmIdentifierRef>(
|
||||
|
|
|
@ -276,7 +276,7 @@ pub fn op_node_cipheriv_final(
|
|||
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
|
||||
context.r#final(auto_pad, input, output).map_err(Into::into)
|
||||
context.r#final(auto_pad, input, output)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -344,9 +344,7 @@ pub fn op_node_decipheriv_final(
|
|||
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
|
||||
context
|
||||
.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(Into::into)
|
||||
context.r#final(auto_pad, input, output, auth_tag)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
|
|
@ -33,7 +33,7 @@ mod impl_ {
|
|||
v8::Local<'a, v8::Value>,
|
||||
);
|
||||
|
||||
impl<'a, 'b> Serialize for SerializeWrapper<'a, 'b> {
|
||||
impl Serialize for SerializeWrapper<'_, '_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
|
|
|
@ -36,14 +36,14 @@ pub fn cpu_info() -> Option<Vec<CpuInfo>> {
|
|||
let ticks = libc::sysconf(libc::_SC_CLK_TCK);
|
||||
let multiplier = 1000u64 / ticks as u64;
|
||||
if libc::sysctlbyname(
|
||||
"machdep.cpu.brand_string\0".as_ptr() as *const libc::c_char,
|
||||
c"machdep.cpu.brand_string".as_ptr() as *const libc::c_char,
|
||||
model.as_mut_ptr() as _,
|
||||
&mut size,
|
||||
std::ptr::null_mut(),
|
||||
0,
|
||||
) != 0
|
||||
&& libc::sysctlbyname(
|
||||
"hw.model\0".as_ptr() as *const libc::c_char,
|
||||
c"hw.model".as_ptr() as *const libc::c_char,
|
||||
model.as_mut_ptr() as _,
|
||||
&mut size,
|
||||
std::ptr::null_mut(),
|
||||
|
@ -57,7 +57,7 @@ pub fn cpu_info() -> Option<Vec<CpuInfo>> {
|
|||
let mut cpu_speed_size = std::mem::size_of_val(&cpu_speed);
|
||||
|
||||
libc::sysctlbyname(
|
||||
"hw.cpufrequency\0".as_ptr() as *const libc::c_char,
|
||||
c"hw.cpufrequency".as_ptr() as *const libc::c_char,
|
||||
&mut cpu_speed as *mut _ as *mut libc::c_void,
|
||||
&mut cpu_speed_size,
|
||||
std::ptr::null_mut(),
|
||||
|
|
|
@ -48,7 +48,7 @@ pub struct SerializerDelegate {
|
|||
obj: v8::Global<v8::Object>,
|
||||
}
|
||||
|
||||
impl<'a> v8::cppgc::GarbageCollected for Serializer<'a> {
|
||||
impl v8::cppgc::GarbageCollected for Serializer<'_> {
|
||||
fn trace(&self, _visitor: &v8::cppgc::Visitor) {}
|
||||
}
|
||||
|
||||
|
@ -226,7 +226,7 @@ pub struct Deserializer<'a> {
|
|||
inner: v8::ValueDeserializer<'a>,
|
||||
}
|
||||
|
||||
impl<'a> deno_core::GarbageCollected for Deserializer<'a> {}
|
||||
impl deno_core::GarbageCollected for Deserializer<'_> {}
|
||||
|
||||
pub struct DeserializerDelegate {
|
||||
obj: v8::Global<v8::Object>,
|
||||
|
|
|
@ -101,7 +101,7 @@ pub fn os_release() -> String {
|
|||
}
|
||||
|
||||
// without the NUL terminator
|
||||
return String::from_utf8_lossy(&s[..len - 1]).to_string();
|
||||
String::from_utf8_lossy(&s[..len - 1]).to_string()
|
||||
}
|
||||
#[cfg(target_family = "windows")]
|
||||
{
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![expect(unexpected_cfgs)]
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
|
@ -789,7 +790,7 @@ pub fn handle_log(record: &log::Record) {
|
|||
|
||||
struct Visitor<'s>(&'s mut LogRecord);
|
||||
|
||||
impl<'s, 'kvs> log::kv::VisitSource<'kvs> for Visitor<'s> {
|
||||
impl<'kvs> log::kv::VisitSource<'kvs> for Visitor<'_> {
|
||||
fn visit_pair(
|
||||
&mut self,
|
||||
key: log::kv::Key<'kvs>,
|
||||
|
|
|
@ -136,7 +136,7 @@ impl Blob {
|
|||
#[async_trait]
|
||||
pub trait BlobPart: Debug {
|
||||
// TODO(lucacsonato): this should be a stream!
|
||||
async fn read(&self) -> &[u8];
|
||||
async fn read<'a>(&'a self) -> &'a [u8];
|
||||
fn size(&self) -> usize;
|
||||
}
|
||||
|
||||
|
@ -151,7 +151,7 @@ impl From<Vec<u8>> for InMemoryBlobPart {
|
|||
|
||||
#[async_trait]
|
||||
impl BlobPart for InMemoryBlobPart {
|
||||
async fn read(&self) -> &[u8] {
|
||||
async fn read<'a>(&'a self) -> &'a [u8] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
|
@ -169,7 +169,7 @@ pub struct SlicedBlobPart {
|
|||
|
||||
#[async_trait]
|
||||
impl BlobPart for SlicedBlobPart {
|
||||
async fn read(&self) -> &[u8] {
|
||||
async fn read<'a>(&'a self) -> &'a [u8] {
|
||||
let original = self.part.read().await;
|
||||
&original[self.start..self.start + self.len]
|
||||
}
|
||||
|
|
|
@ -547,7 +547,7 @@ impl<
|
|||
pub fn find_definitely_typed_package<'a>(
|
||||
nv: &'a PackageNv,
|
||||
packages: impl IntoIterator<Item = (&'a PackageReq, &'a PackageNv)>,
|
||||
) -> Option<(&PackageReq, &PackageNv)> {
|
||||
) -> Option<(&'a PackageReq, &'a PackageNv)> {
|
||||
let types_name = types_package_name(&nv.name);
|
||||
let mut best_patch = 0;
|
||||
let mut highest: Option<(&PackageReq, &PackageNv)> = None;
|
||||
|
|
|
@ -226,6 +226,7 @@ impl<
|
|||
Ok(Cow::Owned(translated_source))
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)]
|
||||
async fn analyze_reexports<'a>(
|
||||
&'a self,
|
||||
entry_specifier: &url::Url,
|
||||
|
|
|
@ -3388,7 +3388,7 @@ impl<'de> Deserialize<'de> for ChildUnitPermissionArg {
|
|||
D: Deserializer<'de>,
|
||||
{
|
||||
struct ChildUnitPermissionArgVisitor;
|
||||
impl<'de> de::Visitor<'de> for ChildUnitPermissionArgVisitor {
|
||||
impl de::Visitor<'_> for ChildUnitPermissionArgVisitor {
|
||||
type Value = ChildUnitPermissionArg;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[toolchain]
|
||||
channel = "1.82.0"
|
||||
channel = "1.85.0"
|
||||
components = ["rustfmt", "clippy"]
|
||||
|
|
|
@ -3,8 +3,10 @@
|
|||
#![allow(clippy::print_stdout)]
|
||||
#![allow(clippy::print_stderr)]
|
||||
#![allow(clippy::undocumented_unsafe_blocks)]
|
||||
#![allow(non_upper_case_globals)]
|
||||
|
||||
use std::os::raw::c_void;
|
||||
use std::sync::Mutex;
|
||||
use std::thread::sleep;
|
||||
use std::time::Duration;
|
||||
|
||||
|
@ -194,12 +196,13 @@ pub extern "C" fn call_fn_ptr_return_buffer(
|
|||
println!("buf: {buf:?}");
|
||||
}
|
||||
|
||||
static mut STORED_FUNCTION: Option<extern "C" fn()> = None;
|
||||
static mut STORED_FUNCTION_2: Option<extern "C" fn(u8) -> u8> = None;
|
||||
static STORED_FUNCTION: Mutex<Option<extern "C" fn()>> = Mutex::new(None);
|
||||
static STORED_FUNCTION_2: Mutex<Option<extern "C" fn(u8) -> u8>> =
|
||||
Mutex::new(None);
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn store_function(func: Option<extern "C" fn()>) {
|
||||
unsafe { STORED_FUNCTION = func };
|
||||
*STORED_FUNCTION.lock().unwrap() = func;
|
||||
if func.is_none() {
|
||||
println!("STORED_FUNCTION cleared");
|
||||
}
|
||||
|
@ -207,7 +210,7 @@ pub extern "C" fn store_function(func: Option<extern "C" fn()>) {
|
|||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn store_function_2(func: Option<extern "C" fn(u8) -> u8>) {
|
||||
unsafe { STORED_FUNCTION_2 = func };
|
||||
*STORED_FUNCTION_2.lock().unwrap() = func;
|
||||
if func.is_none() {
|
||||
println!("STORED_FUNCTION_2 cleared");
|
||||
}
|
||||
|
@ -215,21 +218,17 @@ pub extern "C" fn store_function_2(func: Option<extern "C" fn(u8) -> u8>) {
|
|||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn call_stored_function() {
|
||||
unsafe {
|
||||
if STORED_FUNCTION.is_none() {
|
||||
return;
|
||||
}
|
||||
STORED_FUNCTION.unwrap()();
|
||||
let f = *STORED_FUNCTION.lock().unwrap();
|
||||
if let Some(f) = f {
|
||||
f();
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn call_stored_function_2(arg: u8) {
|
||||
unsafe {
|
||||
if STORED_FUNCTION_2.is_none() {
|
||||
return;
|
||||
}
|
||||
println!("{}", STORED_FUNCTION_2.unwrap()(arg));
|
||||
let f = *STORED_FUNCTION_2.lock().unwrap();
|
||||
if let Some(f) = f {
|
||||
println!("{}", f(arg));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,11 +236,9 @@ pub extern "C" fn call_stored_function_2(arg: u8) {
|
|||
pub extern "C" fn call_stored_function_thread_safe() {
|
||||
std::thread::spawn(move || {
|
||||
std::thread::sleep(std::time::Duration::from_millis(1500));
|
||||
unsafe {
|
||||
if STORED_FUNCTION.is_none() {
|
||||
return;
|
||||
}
|
||||
STORED_FUNCTION.unwrap()();
|
||||
let f = *STORED_FUNCTION.lock().unwrap();
|
||||
if let Some(f) = f {
|
||||
f();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -250,11 +247,9 @@ pub extern "C" fn call_stored_function_thread_safe() {
|
|||
pub extern "C" fn call_stored_function_thread_safe_and_log() {
|
||||
std::thread::spawn(move || {
|
||||
std::thread::sleep(std::time::Duration::from_millis(1500));
|
||||
unsafe {
|
||||
if STORED_FUNCTION.is_none() {
|
||||
return;
|
||||
}
|
||||
STORED_FUNCTION.unwrap()();
|
||||
let f = *STORED_FUNCTION.lock().unwrap();
|
||||
if let Some(f) = f {
|
||||
f();
|
||||
println!("STORED_FUNCTION called");
|
||||
}
|
||||
});
|
||||
|
@ -264,12 +259,10 @@ pub extern "C" fn call_stored_function_thread_safe_and_log() {
|
|||
pub extern "C" fn call_stored_function_2_thread_safe(arg: u8) {
|
||||
std::thread::spawn(move || {
|
||||
std::thread::sleep(std::time::Duration::from_millis(1500));
|
||||
unsafe {
|
||||
if STORED_FUNCTION_2.is_none() {
|
||||
return;
|
||||
}
|
||||
let f = *STORED_FUNCTION_2.lock().unwrap();
|
||||
if let Some(f) = f {
|
||||
println!("Calling");
|
||||
STORED_FUNCTION_2.unwrap()(arg);
|
||||
f(arg);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -4,5 +4,5 @@
|
|||
"envs": {
|
||||
"RUST_BACKTRACE": "0"
|
||||
},
|
||||
"output": "\n============================================================\nDeno has panicked. This is a bug in Deno. Please report this\nat https://github.com/denoland/deno/issues/new.\nIf you can reliably reproduce this panic, include the\nreproduction steps and re-run with the RUST_BACKTRACE=1 env\nvar set and include the backtrace in your report.\n\nPlatform: [WILDCARD]\nVersion: [WILDCARD]\nArgs: [[WILDCARD], \"test\", \"main.js\"]\n\nthread 'tokio-runtime-worker' panicked at [WILDCARD]testing.rs:[WILDCARD]:\npledge test permissions called before restoring previous pledge\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\n"
|
||||
"output": "\n============================================================\nDeno has panicked. This is a bug in Deno. Please report this\nat https://github.com/denoland/deno/issues/new.\nIf you can reliably reproduce this panic, include the\nreproduction steps and re-run with the RUST_BACKTRACE=1 env\nvar set and include the backtrace in your report.\n\nPlatform: [WILDCARD]\nVersion: [WILDCARD]\nArgs: [[WILDCARD], \"test\", \"main.js\"]\n\n\nthread 'tokio-runtime-worker' panicked at [WILDCARD]testing.rs:[WILDCARD]:\npledge test permissions called before restoring previous pledge\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\n"
|
||||
}
|
||||
|
|
|
@ -249,26 +249,19 @@ Deno.test(
|
|||
() => {
|
||||
Deno.renameSync(olddir, fulldir);
|
||||
},
|
||||
Deno.errors.PermissionDenied,
|
||||
"Access is denied",
|
||||
Error,
|
||||
"The directory is not empty",
|
||||
);
|
||||
assertThrows(
|
||||
() => {
|
||||
Deno.renameSync(olddir, emptydir);
|
||||
},
|
||||
Deno.errors.PermissionDenied,
|
||||
"Access is denied",
|
||||
);
|
||||
assertThrows(
|
||||
() => {
|
||||
Deno.renameSync(olddir, emptydir);
|
||||
Deno.renameSync(olddir, file);
|
||||
},
|
||||
Error,
|
||||
`rename '${olddir}' -> '${emptydir}'`,
|
||||
"The directory name is invalid",
|
||||
);
|
||||
|
||||
// should succeed on Windows
|
||||
Deno.renameSync(olddir, file);
|
||||
assertDirectory(file);
|
||||
Deno.renameSync(olddir, emptydir);
|
||||
assertDirectory(emptydir);
|
||||
},
|
||||
);
|
||||
|
|
|
@ -636,7 +636,7 @@ pub struct CheckOutputIntegrationTest<'a> {
|
|||
pub cwd: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> CheckOutputIntegrationTest<'a> {
|
||||
impl CheckOutputIntegrationTest<'_> {
|
||||
pub fn output(&self) -> TestCommandOutput {
|
||||
let mut context_builder = TestContextBuilder::default();
|
||||
if self.temp_cwd {
|
||||
|
|
|
@ -1094,11 +1094,8 @@ impl LspClient {
|
|||
|
||||
fn write(&mut self, value: Value) {
|
||||
let value_str = value.to_string();
|
||||
let msg = format!(
|
||||
"Content-Length: {}\r\n\r\n{}",
|
||||
value_str.as_bytes().len(),
|
||||
value_str
|
||||
);
|
||||
let msg =
|
||||
format!("Content-Length: {}\r\n\r\n{}", value_str.len(), value_str);
|
||||
self.writer.write_all(msg.as_bytes()).unwrap();
|
||||
self.writer.flush().unwrap();
|
||||
}
|
||||
|
|
|
@ -353,6 +353,7 @@ fn create_pty(
|
|||
|
||||
// SAFETY: Posix APIs
|
||||
unsafe {
|
||||
#[allow(clippy::zombie_processes)]
|
||||
let cmd = std::process::Command::new(program)
|
||||
.current_dir(cwd)
|
||||
.args(args)
|
||||
|
|
|
@ -69,7 +69,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn run_server_with_acceptor<'a, A, F, S>(
|
||||
pub async fn run_server_with_acceptor<A, F, S>(
|
||||
mut acceptor: Pin<Box<A>>,
|
||||
handler: F,
|
||||
error_msg: &'static str,
|
||||
|
|
|
@ -450,7 +450,7 @@ async fn absolute_redirect(
|
|||
async fn main_server(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
) -> Result<Response<UnsyncBoxBody<Bytes, Infallible>>, anyhow::Error> {
|
||||
return match (req.method(), req.uri().path()) {
|
||||
match (req.method(), req.uri().path()) {
|
||||
(_, "/echo_server") => {
|
||||
let (parts, body) = req.into_parts();
|
||||
let mut response = Response::new(UnsyncBoxBody::new(Full::new(
|
||||
|
@ -1098,30 +1098,30 @@ console.log("imported", import.meta.url);
|
|||
}
|
||||
(&Method::GET, "/upgrade/sleep/release-latest.txt") => {
|
||||
tokio::time::sleep(Duration::from_secs(95)).await;
|
||||
return Ok(
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(string_body("99999.99.99"))
|
||||
.unwrap(),
|
||||
);
|
||||
)
|
||||
}
|
||||
(&Method::GET, "/upgrade/sleep/canary-latest.txt") => {
|
||||
tokio::time::sleep(Duration::from_secs(95)).await;
|
||||
return Ok(
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(string_body("bda3850f84f24b71e02512c1ba2d6bf2e3daa2fd"))
|
||||
.unwrap(),
|
||||
);
|
||||
)
|
||||
}
|
||||
(&Method::GET, "/release-latest.txt") => {
|
||||
return Ok(
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
// use a deno version that will never happen
|
||||
.body(string_body("99999.99.99"))
|
||||
.unwrap(),
|
||||
);
|
||||
)
|
||||
}
|
||||
(
|
||||
&Method::GET,
|
||||
|
@ -1133,14 +1133,12 @@ console.log("imported", import.meta.url);
|
|||
| "/canary-x86_64-unknown-linux-musl-latest.txt"
|
||||
| "/canary-aarch64-unknown-linux-musl-latest.txt"
|
||||
| "/canary-x86_64-pc-windows-msvc-latest.txt",
|
||||
) => {
|
||||
return Ok(
|
||||
) => Ok(
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(string_body("bda3850f84f24b71e02512c1ba2d6bf2e3daa2fd"))
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
),
|
||||
_ => {
|
||||
let uri_path = req.uri().path();
|
||||
let mut file_path = testdata_path().to_path_buf();
|
||||
|
@ -1171,7 +1169,7 @@ console.log("imported", import.meta.url);
|
|||
.body(empty_body())
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async fn wrap_redirect_server(port: u16) {
|
||||
|
|
|
@ -69,7 +69,7 @@ fn run_npm_server<F, S>(
|
|||
port: u16,
|
||||
error_msg: &'static str,
|
||||
handler: F,
|
||||
) -> Vec<LocalBoxFuture<()>>
|
||||
) -> Vec<LocalBoxFuture<'static, ()>>
|
||||
where
|
||||
F: Fn(Request<hyper::body::Incoming>) -> S + Copy + 'static,
|
||||
S: Future<Output = HandlerOutput> + 'static,
|
||||
|
|
Loading…
Add table
Reference in a new issue