mirror of
https://github.com/denoland/deno.git
synced 2025-02-01 20:25:12 -05:00
BREAKING: Include limited metadata in 'DirEntry' objects (#4941)
This change is to prevent needed a separate stat syscall for each file when using readdir. For consistency, this PR also modifies std's `WalkEntry` interface to extend `DirEntry` with an additional `path` field.
This commit is contained in:
parent
721a4ad59d
commit
3e6ea62841
14 changed files with 193 additions and 206 deletions
5
cli/js/lib.deno.ns.d.ts
vendored
5
cli/js/lib.deno.ns.d.ts
vendored
|
@ -1386,8 +1386,11 @@ declare namespace Deno {
|
||||||
* Requires `allow-read` permission. */
|
* Requires `allow-read` permission. */
|
||||||
export function realpath(path: string): Promise<string>;
|
export function realpath(path: string): Promise<string>;
|
||||||
|
|
||||||
export interface DirEntry extends FileInfo {
|
export interface DirEntry {
|
||||||
name: string;
|
name: string;
|
||||||
|
isFile: boolean;
|
||||||
|
isDirectory: boolean;
|
||||||
|
isSymlink: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Synchronously reads the directory given by `path` and returns an iterable
|
/** Synchronously reads the directory given by `path` and returns an iterable
|
||||||
|
|
|
@ -1,21 +1,19 @@
|
||||||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||||
import { sendSync, sendAsync } from "../dispatch_json.ts";
|
import { sendSync, sendAsync } from "../dispatch_json.ts";
|
||||||
import { FileInfo, StatResponse, parseFileInfo } from "./stat.ts";
|
|
||||||
|
|
||||||
export interface DirEntry extends FileInfo {
|
export interface DirEntry {
|
||||||
name: string;
|
name: string;
|
||||||
|
isFile: boolean;
|
||||||
|
isDirectory: boolean;
|
||||||
|
isSymlink: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ReadDirResponse {
|
interface ReadDirResponse {
|
||||||
entries: StatResponse[];
|
entries: DirEntry[];
|
||||||
}
|
}
|
||||||
|
|
||||||
function res(response: ReadDirResponse): DirEntry[] {
|
function res(response: ReadDirResponse): DirEntry[] {
|
||||||
return response.entries.map(
|
return response.entries;
|
||||||
(statRes: StatResponse): DirEntry => {
|
|
||||||
return { ...parseFileInfo(statRes), name: statRes.name! };
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function readdirSync(path: string): Iterable<DirEntry> {
|
export function readdirSync(path: string): Iterable<DirEntry> {
|
||||||
|
|
|
@ -29,8 +29,6 @@ export interface StatResponse {
|
||||||
mtime: number | null;
|
mtime: number | null;
|
||||||
atime: number | null;
|
atime: number | null;
|
||||||
birthtime: number | null;
|
birthtime: number | null;
|
||||||
// Null for stat(), but exists for readdir().
|
|
||||||
name: string | null;
|
|
||||||
// Unix only members
|
// Unix only members
|
||||||
dev: number;
|
dev: number;
|
||||||
ino: number;
|
ino: number;
|
||||||
|
|
|
@ -4,19 +4,14 @@ import { unitTest, assert, assertEquals } from "./test_util.ts";
|
||||||
function assertSameContent(files: Deno.DirEntry[]): void {
|
function assertSameContent(files: Deno.DirEntry[]): void {
|
||||||
let counter = 0;
|
let counter = 0;
|
||||||
|
|
||||||
for (const file of files) {
|
for (const entry of files) {
|
||||||
if (file.name === "subdir") {
|
if (entry.name === "subdir") {
|
||||||
assert(file.isDirectory);
|
assert(entry.isDirectory);
|
||||||
counter++;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file.name === "002_hello.ts") {
|
|
||||||
assertEquals(file.mode!, Deno.statSync(`cli/tests/${file.name}`).mode!);
|
|
||||||
counter++;
|
counter++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assertEquals(counter, 2);
|
assertEquals(counter, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
unitTest({ perms: { read: true } }, function readdirSyncSuccess(): void {
|
unitTest({ perms: { read: true } }, function readdirSyncSuccess(): void {
|
||||||
|
|
|
@ -460,10 +460,7 @@ fn to_msec(maybe_time: Result<SystemTime, io::Error>) -> serde_json::Value {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn get_stat_json(
|
fn get_stat_json(metadata: std::fs::Metadata) -> JsonResult {
|
||||||
metadata: std::fs::Metadata,
|
|
||||||
maybe_name: Option<String>,
|
|
||||||
) -> JsonResult {
|
|
||||||
// Unix stat member (number types only). 0 if not on unix.
|
// Unix stat member (number types only). 0 if not on unix.
|
||||||
macro_rules! usm {
|
macro_rules! usm {
|
||||||
($member: ident) => {{
|
($member: ident) => {{
|
||||||
|
@ -480,7 +477,7 @@ fn get_stat_json(
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
use std::os::unix::fs::MetadataExt;
|
use std::os::unix::fs::MetadataExt;
|
||||||
let mut json_val = json!({
|
let json_val = json!({
|
||||||
"isFile": metadata.is_file(),
|
"isFile": metadata.is_file(),
|
||||||
"isDirectory": metadata.is_dir(),
|
"isDirectory": metadata.is_dir(),
|
||||||
"isSymlink": metadata.file_type().is_symlink(),
|
"isSymlink": metadata.file_type().is_symlink(),
|
||||||
|
@ -502,14 +499,6 @@ fn get_stat_json(
|
||||||
"blksize": usm!(blksize),
|
"blksize": usm!(blksize),
|
||||||
"blocks": usm!(blocks),
|
"blocks": usm!(blocks),
|
||||||
});
|
});
|
||||||
|
|
||||||
// "name" is an optional field by our design.
|
|
||||||
if let Some(name) = maybe_name {
|
|
||||||
if let serde_json::Value::Object(ref mut m) = json_val {
|
|
||||||
m.insert("name".to_owned(), json!(name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(json_val)
|
Ok(json_val)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -540,7 +529,7 @@ fn op_stat(
|
||||||
} else {
|
} else {
|
||||||
std::fs::metadata(&path)?
|
std::fs::metadata(&path)?
|
||||||
};
|
};
|
||||||
get_stat_json(metadata, None)
|
get_stat_json(metadata)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -599,10 +588,15 @@ fn op_read_dir(
|
||||||
let entries: Vec<_> = std::fs::read_dir(path)?
|
let entries: Vec<_> = std::fs::read_dir(path)?
|
||||||
.filter_map(|entry| {
|
.filter_map(|entry| {
|
||||||
let entry = entry.unwrap();
|
let entry = entry.unwrap();
|
||||||
let metadata = entry.metadata().unwrap();
|
let file_type = entry.file_type().unwrap();
|
||||||
// Not all filenames can be encoded as UTF-8. Skip those for now.
|
// Not all filenames can be encoded as UTF-8. Skip those for now.
|
||||||
if let Ok(filename) = into_string(entry.file_name()) {
|
if let Ok(name) = into_string(entry.file_name()) {
|
||||||
Some(get_stat_json(metadata, Some(filename)).unwrap())
|
Some(json!({
|
||||||
|
"name": name,
|
||||||
|
"isFile": file_type.is_file(),
|
||||||
|
"isDirectory": file_type.is_dir(),
|
||||||
|
"isSymlink": file_type.is_symlink()
|
||||||
|
}))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,8 +156,8 @@ for (const fileInfo of walkSync(".")) {
|
||||||
|
|
||||||
// Async
|
// Async
|
||||||
async function printFilesNames() {
|
async function printFilesNames() {
|
||||||
for await (const fileInfo of walk()) {
|
for await (const entry of walk()) {
|
||||||
console.log(fileInfo.filename);
|
console.log(entry.path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -157,15 +157,15 @@ async function copyDir(
|
||||||
await Deno.utime(dest, srcStatInfo.atime, srcStatInfo.mtime);
|
await Deno.utime(dest, srcStatInfo.atime, srcStatInfo.mtime);
|
||||||
}
|
}
|
||||||
|
|
||||||
for await (const file of Deno.readdir(src)) {
|
for await (const entry of Deno.readdir(src)) {
|
||||||
const srcPath = path.join(src, file.name);
|
const srcPath = path.join(src, entry.name);
|
||||||
const destPath = path.join(dest, path.basename(srcPath as string));
|
const destPath = path.join(dest, path.basename(srcPath as string));
|
||||||
if (file.isDirectory) {
|
if (entry.isSymlink) {
|
||||||
await copyDir(srcPath, destPath, options);
|
|
||||||
} else if (file.isFile) {
|
|
||||||
await copyFile(srcPath, destPath, options);
|
|
||||||
} else if (file.isSymlink) {
|
|
||||||
await copySymLink(srcPath, destPath, options);
|
await copySymLink(srcPath, destPath, options);
|
||||||
|
} else if (entry.isDirectory) {
|
||||||
|
await copyDir(srcPath, destPath, options);
|
||||||
|
} else if (entry.isFile) {
|
||||||
|
await copyFile(srcPath, destPath, options);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -185,16 +185,16 @@ function copyDirSync(src: string, dest: string, options: CopyOptions): void {
|
||||||
Deno.utimeSync(dest, srcStatInfo.atime, srcStatInfo.mtime);
|
Deno.utimeSync(dest, srcStatInfo.atime, srcStatInfo.mtime);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const file of Deno.readdirSync(src)) {
|
for (const entry of Deno.readdirSync(src)) {
|
||||||
assert(file.name != null, "file.name must be set");
|
assert(entry.name != null, "file.name must be set");
|
||||||
const srcPath = path.join(src, file.name);
|
const srcPath = path.join(src, entry.name);
|
||||||
const destPath = path.join(dest, path.basename(srcPath as string));
|
const destPath = path.join(dest, path.basename(srcPath as string));
|
||||||
if (file.isDirectory) {
|
if (entry.isSymlink) {
|
||||||
copyDirSync(srcPath, destPath, options);
|
|
||||||
} else if (file.isFile) {
|
|
||||||
copyFileSync(srcPath, destPath, options);
|
|
||||||
} else if (file.isSymlink) {
|
|
||||||
copySymlinkSync(srcPath, destPath, options);
|
copySymlinkSync(srcPath, destPath, options);
|
||||||
|
} else if (entry.isDirectory) {
|
||||||
|
copyDirSync(srcPath, destPath, options);
|
||||||
|
} else if (entry.isFile) {
|
||||||
|
copyFileSync(srcPath, destPath, options);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -229,12 +229,12 @@ export async function copy(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (srcStat.isDirectory) {
|
if (srcStat.isSymlink) {
|
||||||
|
await copySymLink(src, dest, options);
|
||||||
|
} else if (srcStat.isDirectory) {
|
||||||
await copyDir(src, dest, options);
|
await copyDir(src, dest, options);
|
||||||
} else if (srcStat.isFile) {
|
} else if (srcStat.isFile) {
|
||||||
await copyFile(src, dest, options);
|
await copyFile(src, dest, options);
|
||||||
} else if (srcStat.isSymlink) {
|
|
||||||
await copySymLink(src, dest, options);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,11 +268,11 @@ export function copySync(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (srcStat.isDirectory) {
|
if (srcStat.isSymlink) {
|
||||||
|
copySymlinkSync(src, dest, options);
|
||||||
|
} else if (srcStat.isDirectory) {
|
||||||
copyDirSync(src, dest, options);
|
copyDirSync(src, dest, options);
|
||||||
} else if (srcStat.isFile) {
|
} else if (srcStat.isFile) {
|
||||||
copyFileSync(src, dest, options);
|
copyFileSync(src, dest, options);
|
||||||
} else if (srcStat.isSymlink) {
|
|
||||||
copySymlinkSync(src, dest, options);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,9 +8,15 @@ import {
|
||||||
joinGlobs,
|
joinGlobs,
|
||||||
normalize,
|
normalize,
|
||||||
} from "../path/mod.ts";
|
} from "../path/mod.ts";
|
||||||
import { WalkEntry, walk, walkSync } from "./walk.ts";
|
import {
|
||||||
|
WalkEntry,
|
||||||
|
createWalkEntry,
|
||||||
|
createWalkEntrySync,
|
||||||
|
walk,
|
||||||
|
walkSync,
|
||||||
|
} from "./walk.ts";
|
||||||
import { assert } from "../testing/asserts.ts";
|
import { assert } from "../testing/asserts.ts";
|
||||||
const { cwd, stat, statSync } = Deno;
|
const { cwd } = Deno;
|
||||||
type FileInfo = Deno.FileInfo;
|
type FileInfo = Deno.FileInfo;
|
||||||
|
|
||||||
export interface ExpandGlobOptions extends GlobOptions {
|
export interface ExpandGlobOptions extends GlobOptions {
|
||||||
|
@ -48,6 +54,12 @@ function throwUnlessNotFound(error: Error): void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function comparePath(a: WalkEntry, b: WalkEntry): number {
|
||||||
|
if (a.path < b.path) return -1;
|
||||||
|
if (a.path > b.path) return 1;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expand the glob string from the specified `root` directory and yield each
|
* Expand the glob string from the specified `root` directory and yield each
|
||||||
* result as a `WalkEntry` object.
|
* result as a `WalkEntry` object.
|
||||||
|
@ -73,8 +85,8 @@ export async function* expandGlob(
|
||||||
const excludePatterns = exclude
|
const excludePatterns = exclude
|
||||||
.map(resolveFromRoot)
|
.map(resolveFromRoot)
|
||||||
.map((s: string): RegExp => globToRegExp(s, globOptions));
|
.map((s: string): RegExp => globToRegExp(s, globOptions));
|
||||||
const shouldInclude = (filename: string): boolean =>
|
const shouldInclude = (path: string): boolean =>
|
||||||
!excludePatterns.some((p: RegExp): boolean => !!filename.match(p));
|
!excludePatterns.some((p: RegExp): boolean => !!path.match(p));
|
||||||
const { segments, hasTrailingSep, winRoot } = split(resolveFromRoot(glob));
|
const { segments, hasTrailingSep, winRoot } = split(resolveFromRoot(glob));
|
||||||
|
|
||||||
let fixedRoot = winRoot != undefined ? winRoot : "/";
|
let fixedRoot = winRoot != undefined ? winRoot : "/";
|
||||||
|
@ -86,7 +98,7 @@ export async function* expandGlob(
|
||||||
|
|
||||||
let fixedRootInfo: WalkEntry;
|
let fixedRootInfo: WalkEntry;
|
||||||
try {
|
try {
|
||||||
fixedRootInfo = { filename: fixedRoot, info: await stat(fixedRoot) };
|
fixedRootInfo = await createWalkEntry(fixedRoot);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return throwUnlessNotFound(error);
|
return throwUnlessNotFound(error);
|
||||||
}
|
}
|
||||||
|
@ -95,29 +107,29 @@ export async function* expandGlob(
|
||||||
walkInfo: WalkEntry,
|
walkInfo: WalkEntry,
|
||||||
globSegment: string
|
globSegment: string
|
||||||
): AsyncIterableIterator<WalkEntry> {
|
): AsyncIterableIterator<WalkEntry> {
|
||||||
if (!walkInfo.info.isDirectory) {
|
if (!walkInfo.isDirectory) {
|
||||||
return;
|
return;
|
||||||
} else if (globSegment == "..") {
|
} else if (globSegment == "..") {
|
||||||
const parentPath = joinGlobs([walkInfo.filename, ".."], globOptions);
|
const parentPath = joinGlobs([walkInfo.path, ".."], globOptions);
|
||||||
try {
|
try {
|
||||||
if (shouldInclude(parentPath)) {
|
if (shouldInclude(parentPath)) {
|
||||||
return yield { filename: parentPath, info: await stat(parentPath) };
|
return yield await createWalkEntry(parentPath);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throwUnlessNotFound(error);
|
throwUnlessNotFound(error);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
} else if (globSegment == "**") {
|
} else if (globSegment == "**") {
|
||||||
return yield* walk(walkInfo.filename, {
|
return yield* walk(walkInfo.path, {
|
||||||
includeFiles: false,
|
includeFiles: false,
|
||||||
skip: excludePatterns,
|
skip: excludePatterns,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
yield* walk(walkInfo.filename, {
|
yield* walk(walkInfo.path, {
|
||||||
maxDepth: 1,
|
maxDepth: 1,
|
||||||
match: [
|
match: [
|
||||||
globToRegExp(
|
globToRegExp(
|
||||||
joinGlobs([walkInfo.filename, globSegment], globOptions),
|
joinGlobs([walkInfo.path, globSegment], globOptions),
|
||||||
globOptions
|
globOptions
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
@ -129,27 +141,22 @@ export async function* expandGlob(
|
||||||
for (const segment of segments) {
|
for (const segment of segments) {
|
||||||
// Advancing the list of current matches may introduce duplicates, so we
|
// Advancing the list of current matches may introduce duplicates, so we
|
||||||
// pass everything through this Map.
|
// pass everything through this Map.
|
||||||
const nextMatchMap: Map<string, FileInfo> = new Map();
|
const nextMatchMap: Map<string, WalkEntry> = new Map();
|
||||||
for (const currentMatch of currentMatches) {
|
for (const currentMatch of currentMatches) {
|
||||||
for await (const nextMatch of advanceMatch(currentMatch, segment)) {
|
for await (const nextMatch of advanceMatch(currentMatch, segment)) {
|
||||||
nextMatchMap.set(nextMatch.filename, nextMatch.info);
|
nextMatchMap.set(nextMatch.path, nextMatch);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
currentMatches = [...nextMatchMap].sort().map(
|
currentMatches = [...nextMatchMap.values()].sort(comparePath);
|
||||||
([filename, info]): WalkEntry => ({
|
|
||||||
filename,
|
|
||||||
info,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
if (hasTrailingSep) {
|
if (hasTrailingSep) {
|
||||||
currentMatches = currentMatches.filter(
|
currentMatches = currentMatches.filter(
|
||||||
({ info }): boolean => info.isDirectory
|
(entry: WalkEntry): boolean => entry.isDirectory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!includeDirs) {
|
if (!includeDirs) {
|
||||||
currentMatches = currentMatches.filter(
|
currentMatches = currentMatches.filter(
|
||||||
({ info }): boolean => !info.isDirectory
|
(entry: WalkEntry): boolean => !entry.isDirectory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
yield* currentMatches;
|
yield* currentMatches;
|
||||||
|
@ -177,8 +184,8 @@ export function* expandGlobSync(
|
||||||
const excludePatterns = exclude
|
const excludePatterns = exclude
|
||||||
.map(resolveFromRoot)
|
.map(resolveFromRoot)
|
||||||
.map((s: string): RegExp => globToRegExp(s, globOptions));
|
.map((s: string): RegExp => globToRegExp(s, globOptions));
|
||||||
const shouldInclude = (filename: string): boolean =>
|
const shouldInclude = (path: string): boolean =>
|
||||||
!excludePatterns.some((p: RegExp): boolean => !!filename.match(p));
|
!excludePatterns.some((p: RegExp): boolean => !!path.match(p));
|
||||||
const { segments, hasTrailingSep, winRoot } = split(resolveFromRoot(glob));
|
const { segments, hasTrailingSep, winRoot } = split(resolveFromRoot(glob));
|
||||||
|
|
||||||
let fixedRoot = winRoot != undefined ? winRoot : "/";
|
let fixedRoot = winRoot != undefined ? winRoot : "/";
|
||||||
|
@ -190,7 +197,7 @@ export function* expandGlobSync(
|
||||||
|
|
||||||
let fixedRootInfo: WalkEntry;
|
let fixedRootInfo: WalkEntry;
|
||||||
try {
|
try {
|
||||||
fixedRootInfo = { filename: fixedRoot, info: statSync(fixedRoot) };
|
fixedRootInfo = createWalkEntrySync(fixedRoot);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return throwUnlessNotFound(error);
|
return throwUnlessNotFound(error);
|
||||||
}
|
}
|
||||||
|
@ -199,29 +206,29 @@ export function* expandGlobSync(
|
||||||
walkInfo: WalkEntry,
|
walkInfo: WalkEntry,
|
||||||
globSegment: string
|
globSegment: string
|
||||||
): IterableIterator<WalkEntry> {
|
): IterableIterator<WalkEntry> {
|
||||||
if (!walkInfo.info.isDirectory) {
|
if (!walkInfo.isDirectory) {
|
||||||
return;
|
return;
|
||||||
} else if (globSegment == "..") {
|
} else if (globSegment == "..") {
|
||||||
const parentPath = joinGlobs([walkInfo.filename, ".."], globOptions);
|
const parentPath = joinGlobs([walkInfo.path, ".."], globOptions);
|
||||||
try {
|
try {
|
||||||
if (shouldInclude(parentPath)) {
|
if (shouldInclude(parentPath)) {
|
||||||
return yield { filename: parentPath, info: statSync(parentPath) };
|
return yield createWalkEntrySync(parentPath);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throwUnlessNotFound(error);
|
throwUnlessNotFound(error);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
} else if (globSegment == "**") {
|
} else if (globSegment == "**") {
|
||||||
return yield* walkSync(walkInfo.filename, {
|
return yield* walkSync(walkInfo.path, {
|
||||||
includeFiles: false,
|
includeFiles: false,
|
||||||
skip: excludePatterns,
|
skip: excludePatterns,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
yield* walkSync(walkInfo.filename, {
|
yield* walkSync(walkInfo.path, {
|
||||||
maxDepth: 1,
|
maxDepth: 1,
|
||||||
match: [
|
match: [
|
||||||
globToRegExp(
|
globToRegExp(
|
||||||
joinGlobs([walkInfo.filename, globSegment], globOptions),
|
joinGlobs([walkInfo.path, globSegment], globOptions),
|
||||||
globOptions
|
globOptions
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
@ -233,27 +240,22 @@ export function* expandGlobSync(
|
||||||
for (const segment of segments) {
|
for (const segment of segments) {
|
||||||
// Advancing the list of current matches may introduce duplicates, so we
|
// Advancing the list of current matches may introduce duplicates, so we
|
||||||
// pass everything through this Map.
|
// pass everything through this Map.
|
||||||
const nextMatchMap: Map<string, FileInfo> = new Map();
|
const nextMatchMap: Map<string, WalkEntry> = new Map();
|
||||||
for (const currentMatch of currentMatches) {
|
for (const currentMatch of currentMatches) {
|
||||||
for (const nextMatch of advanceMatch(currentMatch, segment)) {
|
for (const nextMatch of advanceMatch(currentMatch, segment)) {
|
||||||
nextMatchMap.set(nextMatch.filename, nextMatch.info);
|
nextMatchMap.set(nextMatch.path, nextMatch);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
currentMatches = [...nextMatchMap].sort().map(
|
currentMatches = [...nextMatchMap.values()].sort(comparePath);
|
||||||
([filename, info]): WalkEntry => ({
|
|
||||||
filename,
|
|
||||||
info,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
if (hasTrailingSep) {
|
if (hasTrailingSep) {
|
||||||
currentMatches = currentMatches.filter(
|
currentMatches = currentMatches.filter(
|
||||||
({ info }): boolean => info.isDirectory
|
(entry: WalkEntry): boolean => entry.isDirectory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!includeDirs) {
|
if (!includeDirs) {
|
||||||
currentMatches = currentMatches.filter(
|
currentMatches = currentMatches.filter(
|
||||||
({ info }): boolean => !info.isDirectory
|
(entry: WalkEntry): boolean => !entry.isDirectory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
yield* currentMatches;
|
yield* currentMatches;
|
||||||
|
|
|
@ -19,12 +19,12 @@ async function expandGlobArray(
|
||||||
options: ExpandGlobOptions
|
options: ExpandGlobOptions
|
||||||
): Promise<string[]> {
|
): Promise<string[]> {
|
||||||
const paths: string[] = [];
|
const paths: string[] = [];
|
||||||
for await (const { filename } of expandGlob(globString, options)) {
|
for await (const { path } of expandGlob(globString, options)) {
|
||||||
paths.push(filename);
|
paths.push(path);
|
||||||
}
|
}
|
||||||
paths.sort();
|
paths.sort();
|
||||||
const pathsSync = [...expandGlobSync(globString, options)].map(
|
const pathsSync = [...expandGlobSync(globString, options)].map(
|
||||||
({ filename }): string => filename
|
({ path }): string => path
|
||||||
);
|
);
|
||||||
pathsSync.sort();
|
pathsSync.sort();
|
||||||
assertEquals(paths, pathsSync);
|
assertEquals(paths, pathsSync);
|
||||||
|
|
|
@ -2,9 +2,35 @@
|
||||||
// https://golang.org/pkg/path/filepath/#Walk
|
// https://golang.org/pkg/path/filepath/#Walk
|
||||||
// Copyright 2009 The Go Authors. All rights reserved. BSD license.
|
// Copyright 2009 The Go Authors. All rights reserved. BSD license.
|
||||||
import { unimplemented, assert } from "../testing/asserts.ts";
|
import { unimplemented, assert } from "../testing/asserts.ts";
|
||||||
import { join } from "../path/mod.ts";
|
import { basename, join, normalize } from "../path/mod.ts";
|
||||||
const { readdir, readdirSync, stat, statSync } = Deno;
|
const { readdir, readdirSync, stat, statSync } = Deno;
|
||||||
|
|
||||||
|
export function createWalkEntrySync(path: string): WalkEntry {
|
||||||
|
path = normalize(path);
|
||||||
|
const name = basename(path);
|
||||||
|
const info = statSync(path);
|
||||||
|
return {
|
||||||
|
path,
|
||||||
|
name,
|
||||||
|
isFile: info.isFile,
|
||||||
|
isDirectory: info.isDirectory,
|
||||||
|
isSymlink: info.isSymlink,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createWalkEntry(path: string): Promise<WalkEntry> {
|
||||||
|
path = normalize(path);
|
||||||
|
const name = basename(path);
|
||||||
|
const info = await stat(path);
|
||||||
|
return {
|
||||||
|
path,
|
||||||
|
name,
|
||||||
|
isFile: info.isFile,
|
||||||
|
isDirectory: info.isDirectory,
|
||||||
|
isSymlink: info.isSymlink,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export interface WalkOptions {
|
export interface WalkOptions {
|
||||||
maxDepth?: number;
|
maxDepth?: number;
|
||||||
includeFiles?: boolean;
|
includeFiles?: boolean;
|
||||||
|
@ -16,26 +42,25 @@ export interface WalkOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
function include(
|
function include(
|
||||||
filename: string,
|
path: string,
|
||||||
exts?: string[],
|
exts?: string[],
|
||||||
match?: RegExp[],
|
match?: RegExp[],
|
||||||
skip?: RegExp[]
|
skip?: RegExp[]
|
||||||
): boolean {
|
): boolean {
|
||||||
if (exts && !exts.some((ext): boolean => filename.endsWith(ext))) {
|
if (exts && !exts.some((ext): boolean => path.endsWith(ext))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (match && !match.some((pattern): boolean => !!filename.match(pattern))) {
|
if (match && !match.some((pattern): boolean => !!path.match(pattern))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (skip && skip.some((pattern): boolean => !!filename.match(pattern))) {
|
if (skip && skip.some((pattern): boolean => !!path.match(pattern))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WalkEntry {
|
export interface WalkEntry extends Deno.DirEntry {
|
||||||
filename: string;
|
path: string;
|
||||||
info: Deno.FileInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Walks the file tree rooted at root, yielding each file or directory in the
|
/** Walks the file tree rooted at root, yielding each file or directory in the
|
||||||
|
@ -52,8 +77,8 @@ export interface WalkEntry {
|
||||||
* - match?: RegExp[];
|
* - match?: RegExp[];
|
||||||
* - skip?: RegExp[];
|
* - skip?: RegExp[];
|
||||||
*
|
*
|
||||||
* for await (const { filename, info } of walk(".")) {
|
* for await (const { name, info } of walk(".")) {
|
||||||
* console.log(filename);
|
* console.log(name);
|
||||||
* assert(info.isFile);
|
* assert(info.isFile);
|
||||||
* };
|
* };
|
||||||
*/
|
*/
|
||||||
|
@ -73,13 +98,13 @@ export async function* walk(
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (includeDirs && include(root, exts, match, skip)) {
|
if (includeDirs && include(root, exts, match, skip)) {
|
||||||
yield { filename: root, info: await stat(root) };
|
yield await createWalkEntry(root);
|
||||||
}
|
}
|
||||||
if (maxDepth < 1 || !include(root, undefined, undefined, skip)) {
|
if (maxDepth < 1 || !include(root, undefined, undefined, skip)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for await (const dirEntry of readdir(root)) {
|
for await (const entry of readdir(root)) {
|
||||||
if (dirEntry.isSymlink) {
|
if (entry.isSymlink) {
|
||||||
if (followSymlinks) {
|
if (followSymlinks) {
|
||||||
// TODO(ry) Re-enable followSymlinks.
|
// TODO(ry) Re-enable followSymlinks.
|
||||||
unimplemented();
|
unimplemented();
|
||||||
|
@ -88,14 +113,15 @@ export async function* walk(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const filename = join(root, dirEntry.name);
|
assert(entry.name != null);
|
||||||
|
const path = join(root, entry.name);
|
||||||
|
|
||||||
if (dirEntry.isFile) {
|
if (entry.isFile) {
|
||||||
if (includeFiles && include(filename, exts, match, skip)) {
|
if (includeFiles && include(path, exts, match, skip)) {
|
||||||
yield { filename, info: dirEntry };
|
yield { path, ...entry };
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
yield* walk(filename, {
|
yield* walk(path, {
|
||||||
maxDepth: maxDepth - 1,
|
maxDepth: maxDepth - 1,
|
||||||
includeFiles,
|
includeFiles,
|
||||||
includeDirs,
|
includeDirs,
|
||||||
|
@ -125,13 +151,13 @@ export function* walkSync(
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (includeDirs && include(root, exts, match, skip)) {
|
if (includeDirs && include(root, exts, match, skip)) {
|
||||||
yield { filename: root, info: statSync(root) };
|
yield createWalkEntrySync(root);
|
||||||
}
|
}
|
||||||
if (maxDepth < 1 || !include(root, undefined, undefined, skip)) {
|
if (maxDepth < 1 || !include(root, undefined, undefined, skip)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for (const dirEntry of readdirSync(root)) {
|
for (const entry of readdirSync(root)) {
|
||||||
if (dirEntry.isSymlink) {
|
if (entry.isSymlink) {
|
||||||
if (followSymlinks) {
|
if (followSymlinks) {
|
||||||
unimplemented();
|
unimplemented();
|
||||||
} else {
|
} else {
|
||||||
|
@ -139,15 +165,15 @@ export function* walkSync(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(dirEntry.name != null);
|
assert(entry.name != null);
|
||||||
const filename = join(root, dirEntry.name);
|
const path = join(root, entry.name);
|
||||||
|
|
||||||
if (dirEntry.isFile) {
|
if (entry.isFile) {
|
||||||
if (includeFiles && include(filename, exts, match, skip)) {
|
if (includeFiles && include(path, exts, match, skip)) {
|
||||||
yield { filename, info: dirEntry };
|
yield { path, ...entry };
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
yield* walkSync(filename, {
|
yield* walkSync(path, {
|
||||||
maxDepth: maxDepth - 1,
|
maxDepth: maxDepth - 1,
|
||||||
includeFiles,
|
includeFiles,
|
||||||
includeDirs,
|
includeDirs,
|
||||||
|
|
|
@ -24,8 +24,8 @@ export function testWalk(
|
||||||
Deno.test({ ignore, name: `[walk] ${name}`, fn });
|
Deno.test({ ignore, name: `[walk] ${name}`, fn });
|
||||||
}
|
}
|
||||||
|
|
||||||
function normalize({ filename }: WalkEntry): string {
|
function normalize({ path }: WalkEntry): string {
|
||||||
return filename.replace(/\\/g, "/");
|
return path.replace(/\\/g, "/");
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function walkArray(
|
export async function walkArray(
|
||||||
|
|
|
@ -140,22 +140,22 @@ async function serveDir(
|
||||||
): Promise<Response> {
|
): Promise<Response> {
|
||||||
const dirUrl = `/${posix.relative(target, dirPath)}`;
|
const dirUrl = `/${posix.relative(target, dirPath)}`;
|
||||||
const listEntry: EntryInfo[] = [];
|
const listEntry: EntryInfo[] = [];
|
||||||
for await (const dirEntry of readdir(dirPath)) {
|
for await (const entry of readdir(dirPath)) {
|
||||||
const filePath = posix.join(dirPath, dirEntry.name);
|
const filePath = posix.join(dirPath, entry.name);
|
||||||
const fileUrl = posix.join(dirUrl, dirEntry.name);
|
const fileUrl = posix.join(dirUrl, entry.name);
|
||||||
if (dirEntry.name === "index.html" && dirEntry.isFile) {
|
if (entry.name === "index.html" && entry.isFile) {
|
||||||
// in case index.html as dir...
|
// in case index.html as dir...
|
||||||
return serveFile(req, filePath);
|
return serveFile(req, filePath);
|
||||||
}
|
}
|
||||||
// Yuck!
|
// Yuck!
|
||||||
let mode = null;
|
let fileInfo = null;
|
||||||
try {
|
try {
|
||||||
mode = (await stat(filePath)).mode;
|
fileInfo = await stat(filePath);
|
||||||
} catch (e) {}
|
} catch (e) {}
|
||||||
listEntry.push({
|
listEntry.push({
|
||||||
mode: modeToString(dirEntry.isDirectory, mode),
|
mode: modeToString(entry.isDirectory, fileInfo?.mode ?? null),
|
||||||
size: dirEntry.isFile ? fileLenToString(dirEntry.size) : "",
|
size: entry.isFile ? fileLenToString(fileInfo?.size ?? 0) : "",
|
||||||
name: dirEntry.name,
|
name: entry.name,
|
||||||
url: fileUrl,
|
url: fileUrl,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -331,8 +331,8 @@ function main(): void {
|
||||||
|
|
||||||
let response: Response | undefined;
|
let response: Response | undefined;
|
||||||
try {
|
try {
|
||||||
const info = await stat(fsPath);
|
const fileInfo = await stat(fsPath);
|
||||||
if (info.isDirectory) {
|
if (fileInfo.isDirectory) {
|
||||||
response = await serveDir(req, fsPath);
|
response = await serveDir(req, fsPath);
|
||||||
} else {
|
} else {
|
||||||
response = await serveFile(req, fsPath);
|
response = await serveFile(req, fsPath);
|
||||||
|
|
|
@ -4,11 +4,15 @@ export default class Dirent {
|
||||||
constructor(private entry: Deno.DirEntry) {}
|
constructor(private entry: Deno.DirEntry) {}
|
||||||
|
|
||||||
isBlockDevice(): boolean {
|
isBlockDevice(): boolean {
|
||||||
return this.entry.blocks != null;
|
notImplemented("Deno does not yet support identification of block devices");
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
isCharacterDevice(): boolean {
|
isCharacterDevice(): boolean {
|
||||||
return this.entry.blocks == null;
|
notImplemented(
|
||||||
|
"Deno does not yet support identification of character devices"
|
||||||
|
);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
isDirectory(): boolean {
|
isDirectory(): boolean {
|
||||||
|
|
|
@ -3,107 +3,74 @@ import { assert, assertEquals, assertThrows } from "../../testing/asserts.ts";
|
||||||
import Dirent from "./_fs_dirent.ts";
|
import Dirent from "./_fs_dirent.ts";
|
||||||
|
|
||||||
class DirEntryMock implements Deno.DirEntry {
|
class DirEntryMock implements Deno.DirEntry {
|
||||||
|
name = "";
|
||||||
isFile = false;
|
isFile = false;
|
||||||
isDirectory = false;
|
isDirectory = false;
|
||||||
isSymlink = false;
|
isSymlink = false;
|
||||||
size = -1;
|
|
||||||
mtime = new Date(-1);
|
|
||||||
atime = new Date(-1);
|
|
||||||
birthtime = new Date(-1);
|
|
||||||
name = "";
|
|
||||||
dev = -1;
|
|
||||||
ino = -1;
|
|
||||||
mode = -1;
|
|
||||||
nlink = -1;
|
|
||||||
uid = -1;
|
|
||||||
gid = -1;
|
|
||||||
rdev = -1;
|
|
||||||
blksize = -1;
|
|
||||||
blocks: number | null = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test({
|
|
||||||
name: "Block devices are correctly identified",
|
|
||||||
fn() {
|
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
|
||||||
fileInfo.blocks = 5;
|
|
||||||
assert(new Dirent(fileInfo).isBlockDevice());
|
|
||||||
assert(!new Dirent(fileInfo).isCharacterDevice());
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
test({
|
|
||||||
name: "Character devices are correctly identified",
|
|
||||||
fn() {
|
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
|
||||||
fileInfo.blocks = null;
|
|
||||||
assert(new Dirent(fileInfo).isCharacterDevice());
|
|
||||||
assert(!new Dirent(fileInfo).isBlockDevice());
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
test({
|
test({
|
||||||
name: "Directories are correctly identified",
|
name: "Directories are correctly identified",
|
||||||
fn() {
|
fn() {
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
const entry: DirEntryMock = new DirEntryMock();
|
||||||
fileInfo.isDirectory = true;
|
entry.isDirectory = true;
|
||||||
fileInfo.isFile = false;
|
entry.isFile = false;
|
||||||
fileInfo.isSymlink = false;
|
entry.isSymlink = false;
|
||||||
assert(new Dirent(fileInfo).isDirectory());
|
assert(new Dirent(entry).isDirectory());
|
||||||
assert(!new Dirent(fileInfo).isFile());
|
assert(!new Dirent(entry).isFile());
|
||||||
assert(!new Dirent(fileInfo).isSymbolicLink());
|
assert(!new Dirent(entry).isSymbolicLink());
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
test({
|
test({
|
||||||
name: "Files are correctly identified",
|
name: "Files are correctly identified",
|
||||||
fn() {
|
fn() {
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
const entry: DirEntryMock = new DirEntryMock();
|
||||||
fileInfo.isDirectory = false;
|
entry.isDirectory = false;
|
||||||
fileInfo.isFile = true;
|
entry.isFile = true;
|
||||||
fileInfo.isSymlink = false;
|
entry.isSymlink = false;
|
||||||
assert(!new Dirent(fileInfo).isDirectory());
|
assert(!new Dirent(entry).isDirectory());
|
||||||
assert(new Dirent(fileInfo).isFile());
|
assert(new Dirent(entry).isFile());
|
||||||
assert(!new Dirent(fileInfo).isSymbolicLink());
|
assert(!new Dirent(entry).isSymbolicLink());
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
test({
|
test({
|
||||||
name: "Symlinks are correctly identified",
|
name: "Symlinks are correctly identified",
|
||||||
fn() {
|
fn() {
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
const entry: DirEntryMock = new DirEntryMock();
|
||||||
fileInfo.isDirectory = false;
|
entry.isDirectory = false;
|
||||||
fileInfo.isFile = false;
|
entry.isFile = false;
|
||||||
fileInfo.isSymlink = true;
|
entry.isSymlink = true;
|
||||||
assert(!new Dirent(fileInfo).isDirectory());
|
assert(!new Dirent(entry).isDirectory());
|
||||||
assert(!new Dirent(fileInfo).isFile());
|
assert(!new Dirent(entry).isFile());
|
||||||
assert(new Dirent(fileInfo).isSymbolicLink());
|
assert(new Dirent(entry).isSymbolicLink());
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
test({
|
test({
|
||||||
name: "File name is correct",
|
name: "File name is correct",
|
||||||
fn() {
|
fn() {
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
const entry: DirEntryMock = new DirEntryMock();
|
||||||
fileInfo.name = "my_file";
|
entry.name = "my_file";
|
||||||
assertEquals(new Dirent(fileInfo).name, "my_file");
|
assertEquals(new Dirent(entry).name, "my_file");
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
test({
|
test({
|
||||||
name: "Socket and FIFO pipes aren't yet available",
|
name: "Socket and FIFO pipes aren't yet available",
|
||||||
fn() {
|
fn() {
|
||||||
const fileInfo: DirEntryMock = new DirEntryMock();
|
const entry: DirEntryMock = new DirEntryMock();
|
||||||
assertThrows(
|
assertThrows(
|
||||||
() => {
|
() => {
|
||||||
new Dirent(fileInfo).isFIFO();
|
new Dirent(entry).isFIFO();
|
||||||
},
|
},
|
||||||
Error,
|
Error,
|
||||||
"does not yet support"
|
"does not yet support"
|
||||||
);
|
);
|
||||||
assertThrows(
|
assertThrows(
|
||||||
() => {
|
() => {
|
||||||
new Dirent(fileInfo).isSocket();
|
new Dirent(entry).isSocket();
|
||||||
},
|
},
|
||||||
Error,
|
Error,
|
||||||
"does not yet support"
|
"does not yet support"
|
||||||
|
|
Loading…
Add table
Reference in a new issue