1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 21:50:00 -05:00

refactor(op_crate/fetch): align streams to spec (#9103)

Fixes #8814
This commit is contained in:
Kitson Kelly 2021-01-15 08:57:19 +11:00 committed by GitHub
parent 2d1208556a
commit b8303c7812
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 3225 additions and 3678 deletions

View file

@ -1047,9 +1047,13 @@ unitTest(
const buf = bufferServer(addr);
const stream = new TransformStream();
const writer = stream.writable.getWriter();
await writer.write(new TextEncoder().encode("hello "));
await writer.write(new TextEncoder().encode("world"));
await writer.close();
// transformer writes don't resolve until they are read, so awaiting these
// will cause the transformer to hang, as the suspend the transformer, it
// is also illogical to await for the reads, as that is the whole point of
// streams is to have a "queue" which gets drained...
writer.write(new TextEncoder().encode("hello "));
writer.write(new TextEncoder().encode("world"));
writer.close();
const response = await fetch(`http://${addr}/blah`, {
method: "POST",
headers: [

View file

@ -1,72 +0,0 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
import { assertThrows, unitTest } from "./test_util.ts";
unitTest(function streamReadableHwmError() {
// deno-lint-ignore no-explicit-any
const invalidHwm: any[] = [NaN, Number("NaN"), {}, -1, "two"];
for (const highWaterMark of invalidHwm) {
assertThrows(
() => {
new ReadableStream<number>(undefined, { highWaterMark });
},
RangeError,
"highWaterMark must be a positive number or Infinity. Received:",
);
}
assertThrows(() => {
new ReadableStream<number>(
undefined,
// deno-lint-ignore no-explicit-any
{ highWaterMark: Symbol("hwk") as any },
);
}, TypeError);
});
unitTest(function streamWriteableHwmError() {
// deno-lint-ignore no-explicit-any
const invalidHwm: any[] = [NaN, Number("NaN"), {}, -1, "two"];
for (const highWaterMark of invalidHwm) {
assertThrows(
() => {
new WritableStream(
undefined,
new CountQueuingStrategy({ highWaterMark }),
);
},
RangeError,
"highWaterMark must be a positive number or Infinity. Received:",
);
}
assertThrows(() => {
new WritableStream(
undefined,
// deno-lint-ignore no-explicit-any
new CountQueuingStrategy({ highWaterMark: Symbol("hwmk") as any }),
);
}, TypeError);
});
unitTest(function streamTransformHwmError() {
// deno-lint-ignore no-explicit-any
const invalidHwm: any[] = [NaN, Number("NaN"), {}, -1, "two"];
for (const highWaterMark of invalidHwm) {
assertThrows(
() => {
new TransformStream(undefined, undefined, { highWaterMark });
},
RangeError,
"highWaterMark must be a positive number or Infinity. Received:",
);
}
assertThrows(() => {
new TransformStream(
undefined,
undefined,
// deno-lint-ignore no-explicit-any
{ highWaterMark: Symbol("hwmk") as any },
);
}, TypeError);
});

View file

@ -1,131 +0,0 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
import { assert, assertEquals, unitTest } from "./test_util.ts";
import { assertThrowsAsync } from "../../../std/testing/asserts.ts";
unitTest(function streamPipeLocks() {
const rs = new ReadableStream();
const ws = new WritableStream();
assertEquals(rs.locked, false);
assertEquals(ws.locked, false);
rs.pipeTo(ws);
assert(rs.locked);
assert(ws.locked);
});
unitTest(async function streamPipeFinishUnlocks() {
const rs = new ReadableStream({
start(controller: ReadableStreamDefaultController): void {
controller.close();
},
});
const ws = new WritableStream();
await rs.pipeTo(ws);
assertEquals(rs.locked, false);
assertEquals(ws.locked, false);
});
unitTest(async function streamPipeReadableStreamLocked() {
const rs = new ReadableStream();
const ws = new WritableStream();
rs.getReader();
await assertThrowsAsync(async () => {
await rs.pipeTo(ws);
}, TypeError);
});
unitTest(async function streamPipeReadableStreamLocked() {
const rs = new ReadableStream();
const ws = new WritableStream();
ws.getWriter();
await assertThrowsAsync(async () => {
await rs.pipeTo(ws);
}, TypeError);
});
unitTest(async function streamPipeLotsOfChunks() {
const CHUNKS = 10;
const rs = new ReadableStream<number>({
start(c: ReadableStreamDefaultController): void {
for (let i = 0; i < CHUNKS; ++i) {
c.enqueue(i);
}
c.close();
},
});
const written: Array<string | number> = [];
const ws = new WritableStream(
{
write(chunk: number): void {
written.push(chunk);
},
close(): void {
written.push("closed");
},
},
new CountQueuingStrategy({ highWaterMark: CHUNKS }),
);
await rs.pipeTo(ws);
const targetValues = [];
for (let i = 0; i < CHUNKS; ++i) {
targetValues.push(i);
}
targetValues.push("closed");
assertEquals(written, targetValues, "the correct values must be written");
// Ensure both readable and writable are closed by the time the pipe finishes.
await Promise.all([rs.getReader().closed, ws.getWriter().closed]);
});
for (const preventAbort of [true, false]) {
unitTest(function undefinedRejectionFromPull() {
const rs = new ReadableStream({
pull(): Promise<void> {
return Promise.reject(undefined);
},
});
return rs.pipeTo(new WritableStream(), { preventAbort }).then(
() => {
throw new Error("pipeTo promise should be rejected");
},
(value) =>
assertEquals(value, undefined, "rejection value should be undefined"),
);
});
}
for (const preventCancel of [true, false]) {
unitTest(function undefinedRejectionWithPreventCancel() {
const rs = new ReadableStream({
pull(controller: ReadableStreamDefaultController<number>): void {
controller.enqueue(0);
},
});
const ws = new WritableStream({
write(): Promise<void> {
return Promise.reject(undefined);
},
});
return rs.pipeTo(ws, { preventCancel }).then(
() => {
throw new Error("pipeTo promise should be rejected");
},
(value) =>
assertEquals(value, undefined, "rejection value should be undefined"),
);
});
}

View file

@ -1,562 +0,0 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
import {
assert,
assertEquals,
assertNotEquals,
assertThrows,
unitTest,
} from "./test_util.ts";
function delay(seconds: number): Promise<void> {
return new Promise<void>((resolve) => {
setTimeout(() => {
resolve();
}, seconds);
});
}
function readableStreamToArray<R>(
readable: { getReader(): ReadableStreamDefaultReader<R> },
reader?: ReadableStreamDefaultReader<R>,
): Promise<R[]> {
if (reader === undefined) {
reader = readable.getReader();
}
const chunks: R[] = [];
return pump();
function pump(): Promise<R[]> {
return reader!.read().then((result) => {
if (result.done) {
return chunks;
}
chunks.push(result.value);
return pump();
});
}
}
unitTest(function transformStreamConstructedWithTransformFunction() {
new TransformStream({ transform(): void {} });
});
unitTest(function transformStreamConstructedNoTransform() {
new TransformStream();
new TransformStream({});
});
unitTest(function transformStreamIntstancesHaveProperProperties() {
const ts = new TransformStream({ transform(): void {} });
const proto = Object.getPrototypeOf(ts);
const writableStream = Object.getOwnPropertyDescriptor(proto, "writable");
assert(writableStream !== undefined, "it has a writable property");
assert(!writableStream.enumerable, "writable should be non-enumerable");
assertEquals(
typeof writableStream.get,
"function",
"writable should have a getter",
);
assertEquals(
writableStream.set,
undefined,
"writable should not have a setter",
);
assert(writableStream.configurable, "writable should be configurable");
assert(
ts.writable instanceof WritableStream,
"writable is an instance of WritableStream",
);
assert(
WritableStream.prototype.getWriter.call(ts.writable),
"writable should pass WritableStream brand check",
);
const readableStream = Object.getOwnPropertyDescriptor(proto, "readable");
assert(readableStream !== undefined, "it has a readable property");
assert(!readableStream.enumerable, "readable should be non-enumerable");
assertEquals(
typeof readableStream.get,
"function",
"readable should have a getter",
);
assertEquals(
readableStream.set,
undefined,
"readable should not have a setter",
);
assert(readableStream.configurable, "readable should be configurable");
assert(
ts.readable instanceof ReadableStream,
"readable is an instance of ReadableStream",
);
assertNotEquals(
ReadableStream.prototype.getReader.call(ts.readable),
undefined,
"readable should pass ReadableStream brand check",
);
});
unitTest(function transformStreamWritableStartsAsWritable() {
const ts = new TransformStream({ transform(): void {} });
const writer = ts.writable.getWriter();
assertEquals(writer.desiredSize, 1, "writer.desiredSize should be 1");
});
unitTest(async function transformStreamReadableCanReadOutOfWritable() {
const ts = new TransformStream();
const writer = ts.writable.getWriter();
writer.write("a");
assertEquals(
writer.desiredSize,
0,
"writer.desiredSize should be 0 after write()",
);
const result = await ts.readable.getReader().read();
assertEquals(
result.value,
"a",
"result from reading the readable is the same as was written to writable",
);
assert(!result.done, "stream should not be done");
await delay(0);
assert(writer.desiredSize === 1, "desiredSize should be 1 again");
});
unitTest(async function transformStreamCanReadWhatIsWritten() {
let c: TransformStreamDefaultController;
const ts = new TransformStream({
start(controller: TransformStreamDefaultController): void {
c = controller;
},
transform(chunk: string): void {
c.enqueue(chunk.toUpperCase());
},
});
const writer = ts.writable.getWriter();
writer.write("a");
const result = await ts.readable.getReader().read();
assertEquals(
result.value,
"A",
"result from reading the readable is the transformation of what was written to writable",
);
assert(!result.done, "stream should not be done");
});
unitTest(async function transformStreamCanReadBothChunks() {
let c: TransformStreamDefaultController;
const ts = new TransformStream({
start(controller: TransformStreamDefaultController): void {
c = controller;
},
transform(chunk: string): void {
c.enqueue(chunk.toUpperCase());
c.enqueue(chunk.toUpperCase());
},
});
const writer = ts.writable.getWriter();
writer.write("a");
const reader = ts.readable.getReader();
const result1 = await reader.read();
assertEquals(
result1.value,
"A",
"the first chunk read is the transformation of the single chunk written",
);
assert(!result1.done, "stream should not be done");
const result2 = await reader.read();
assertEquals(
result2.value,
"A",
"the second chunk read is also the transformation of the single chunk written",
);
assert(!result2.done, "stream should not be done");
});
unitTest(async function transformStreamCanReadWhatIsWritten() {
let c: TransformStreamDefaultController;
const ts = new TransformStream({
start(controller: TransformStreamDefaultController): void {
c = controller;
},
transform(chunk: string): Promise<void> {
return delay(0).then(() => c.enqueue(chunk.toUpperCase()));
},
});
const writer = ts.writable.getWriter();
writer.write("a");
const result = await ts.readable.getReader().read();
assertEquals(
result.value,
"A",
"result from reading the readable is the transformation of what was written to writable",
);
assert(!result.done, "stream should not be done");
});
unitTest(async function transformStreamAsyncReadMultipleChunks() {
let doSecondEnqueue: () => void;
let returnFromTransform: () => void;
const ts = new TransformStream({
transform(
chunk: string,
controller: TransformStreamDefaultController,
): Promise<void> {
delay(0).then(() => controller.enqueue(chunk.toUpperCase()));
doSecondEnqueue = (): void => controller.enqueue(chunk.toUpperCase());
return new Promise((resolve) => {
returnFromTransform = resolve;
});
},
});
const reader = ts.readable.getReader();
const writer = ts.writable.getWriter();
writer.write("a");
const result1 = await reader.read();
assertEquals(
result1.value,
"A",
"the first chunk read is the transformation of the single chunk written",
);
assert(!result1.done, "stream should not be done");
doSecondEnqueue!();
const result2 = await reader.read();
assertEquals(
result2.value,
"A",
"the second chunk read is also the transformation of the single chunk written",
);
assert(!result2.done, "stream should not be done");
returnFromTransform!();
});
unitTest(function transformStreamClosingWriteClosesRead() {
const ts = new TransformStream({ transform(): void {} });
const writer = ts.writable.getWriter();
writer.close();
return Promise.all([writer.closed, ts.readable.getReader().closed]).then(
undefined,
);
});
unitTest(async function transformStreamCloseWaitAwaitsTransforms() {
let transformResolve: () => void;
const transformPromise = new Promise<void>((resolve) => {
transformResolve = resolve;
});
const ts = new TransformStream(
{
transform(): Promise<void> {
return transformPromise;
},
},
undefined,
{ highWaterMark: 1 },
);
const writer = ts.writable.getWriter();
writer.write("a");
writer.close();
let rsClosed = false;
ts.readable.getReader().closed.then(() => {
rsClosed = true;
});
await delay(0);
assertEquals(rsClosed, false, "readable is not closed after a tick");
transformResolve!();
await writer.closed;
// TODO: Is this expectation correct?
assertEquals(rsClosed, true, "readable is closed at that point");
});
unitTest(async function transformStreamCloseWriteAfterSyncEnqueues() {
let c: TransformStreamDefaultController<string>;
const ts = new TransformStream<string, string>({
start(controller: TransformStreamDefaultController): void {
c = controller;
},
transform(): Promise<void> {
c.enqueue("x");
c.enqueue("y");
return delay(0);
},
});
const writer = ts.writable.getWriter();
writer.write("a");
writer.close();
const readableChunks = readableStreamToArray(ts.readable);
await writer.closed;
const chunks = await readableChunks;
assertEquals(
chunks,
["x", "y"],
"both enqueued chunks can be read from the readable",
);
});
unitTest(async function transformStreamWritableCloseAsyncAfterAsyncEnqueues() {
let c: TransformStreamDefaultController<string>;
const ts = new TransformStream<string, string>({
start(controller: TransformStreamDefaultController<string>): void {
c = controller;
},
transform(): Promise<void> {
return delay(0)
.then(() => c.enqueue("x"))
.then(() => c.enqueue("y"))
.then(() => delay(0));
},
});
const writer = ts.writable.getWriter();
writer.write("a");
writer.close();
const readableChunks = readableStreamToArray(ts.readable);
await writer.closed;
const chunks = await readableChunks;
assertEquals(
chunks,
["x", "y"],
"both enqueued chunks can be read from the readable",
);
});
unitTest(async function transformStreamTransformerMethodsCalledAsMethods() {
let c: TransformStreamDefaultController<string>;
const transformer = {
suffix: "-suffix",
start(controller: TransformStreamDefaultController<string>): void {
c = controller;
c.enqueue("start" + this.suffix);
},
transform(chunk: string): void {
c.enqueue(chunk + this.suffix);
},
flush(): void {
c.enqueue("flushed" + this.suffix);
},
};
const ts = new TransformStream(transformer);
const writer = ts.writable.getWriter();
writer.write("a");
writer.close();
const readableChunks = readableStreamToArray(ts.readable);
await writer.closed;
const chunks = await readableChunks;
assertEquals(
chunks,
["start-suffix", "a-suffix", "flushed-suffix"],
"all enqueued chunks have suffixes",
);
});
unitTest(async function transformStreamMethodsShouldNotBeAppliedOrCalled() {
function functionWithOverloads(): void {}
functionWithOverloads.apply = (): void => {
throw new Error("apply() should not be called");
};
functionWithOverloads.call = (): void => {
throw new Error("call() should not be called");
};
const ts = new TransformStream({
start: functionWithOverloads,
transform: functionWithOverloads,
flush: functionWithOverloads,
});
const writer = ts.writable.getWriter();
writer.write("a");
writer.close();
await readableStreamToArray(ts.readable);
});
unitTest(async function transformStreamCallTransformSync() {
let transformCalled = false;
const ts = new TransformStream(
{
transform(): void {
transformCalled = true;
},
},
undefined,
{ highWaterMark: Infinity },
);
// transform() is only called synchronously when there is no backpressure and
// all microtasks have run.
await delay(0);
const writePromise = ts.writable.getWriter().write(undefined);
assert(transformCalled, "transform() should have been called");
await writePromise;
});
unitTest(function transformStreamCloseWriteCloesesReadWithNoChunks() {
const ts = new TransformStream({}, undefined, { highWaterMark: 0 });
const writer = ts.writable.getWriter();
writer.close();
return Promise.all([writer.closed, ts.readable.getReader().closed]).then(
undefined,
);
});
unitTest(function transformStreamEnqueueThrowsAfterTerminate() {
new TransformStream({
start(controller: TransformStreamDefaultController): void {
controller.terminate();
assertThrows(() => {
controller.enqueue(undefined);
}, TypeError);
},
});
});
unitTest(function transformStreamEnqueueThrowsAfterReadableCancel() {
let controller: TransformStreamDefaultController;
const ts = new TransformStream({
start(c: TransformStreamDefaultController): void {
controller = c;
},
});
const cancelPromise = ts.readable.cancel();
assertThrows(
() => controller.enqueue(undefined),
TypeError,
undefined,
"enqueue should throw",
);
return cancelPromise;
});
unitTest(function transformStreamSecondTerminateNoOp() {
new TransformStream({
start(controller: TransformStreamDefaultController): void {
controller.terminate();
controller.terminate();
},
});
});
unitTest(async function transformStreamTerminateAfterReadableCancelIsNoop() {
let controller: TransformStreamDefaultController;
const ts = new TransformStream({
start(c: TransformStreamDefaultController): void {
controller = c;
},
});
const cancelReason = { name: "cancelReason" };
const cancelPromise = ts.readable.cancel(cancelReason);
controller!.terminate();
await cancelPromise;
try {
await ts.writable.getWriter().closed;
} catch (e) {
assert(e === cancelReason);
return;
}
throw new Error("closed should have rejected");
});
unitTest(async function transformStreamStartCalledOnce() {
let calls = 0;
new TransformStream({
start(): void {
++calls;
},
});
await delay(0);
assertEquals(calls, 1, "start() should have been called exactly once");
});
unitTest(function transformStreamReadableTypeThrows() {
assertThrows(
// deno-lint-ignore no-explicit-any
() => new TransformStream({ readableType: "bytes" as any }),
RangeError,
undefined,
"constructor should throw",
);
});
unitTest(function transformStreamWirtableTypeThrows() {
assertThrows(
// deno-lint-ignore no-explicit-any
() => new TransformStream({ writableType: "bytes" as any }),
RangeError,
undefined,
"constructor should throw",
);
});
unitTest(function transformStreamSubclassable() {
class Subclass extends TransformStream {
extraFunction(): boolean {
return true;
}
}
assert(
Object.getPrototypeOf(Subclass.prototype) === TransformStream.prototype,
"Subclass.prototype's prototype should be TransformStream.prototype",
);
assert(
Object.getPrototypeOf(Subclass) === TransformStream,
"Subclass's prototype should be TransformStream",
);
const sub = new Subclass();
assert(
sub instanceof TransformStream,
"Subclass object should be an instance of TransformStream",
);
assert(
sub instanceof Subclass,
"Subclass object should be an instance of Subclass",
);
const readableGetter = Object.getOwnPropertyDescriptor(
TransformStream.prototype,
"readable",
)!.get;
assert(
readableGetter!.call(sub) === sub.readable,
"Subclass object should pass brand check",
);
assert(
sub.extraFunction(),
"extraFunction() should be present on Subclass object",
);
});

View file

@ -1,253 +0,0 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
import { assert, assertEquals, assertThrows, unitTest } from "./test_util.ts";
unitTest(function writableStreamDesiredSizeOnReleasedWriter() {
const ws = new WritableStream();
const writer = ws.getWriter();
writer.releaseLock();
assertThrows(() => {
writer.desiredSize;
}, TypeError);
});
unitTest(function writableStreamDesiredSizeInitialValue() {
const ws = new WritableStream();
const writer = ws.getWriter();
assertEquals(writer.desiredSize, 1);
});
unitTest(async function writableStreamDesiredSizeClosed() {
const ws = new WritableStream();
const writer = ws.getWriter();
await writer.close();
assertEquals(writer.desiredSize, 0);
});
unitTest(function writableStreamStartThrowsDesiredSizeNull() {
const ws = new WritableStream({
start(c): void {
c.error();
},
});
const writer = ws.getWriter();
assertEquals(writer.desiredSize, null, "desiredSize should be null");
});
unitTest(function getWriterOnClosingStream() {
const ws = new WritableStream({});
const writer = ws.getWriter();
writer.close();
writer.releaseLock();
ws.getWriter();
});
unitTest(async function getWriterOnClosedStream() {
const ws = new WritableStream({});
const writer = ws.getWriter();
await writer.close();
writer.releaseLock();
ws.getWriter();
});
unitTest(function getWriterOnAbortedStream() {
const ws = new WritableStream({});
const writer = ws.getWriter();
writer.abort();
writer.releaseLock();
ws.getWriter();
});
unitTest(function getWriterOnErroredStream() {
const ws = new WritableStream({
start(c): void {
c.error();
},
});
const writer = ws.getWriter();
return writer.closed.then(
(v) => {
throw new Error(`writer.closed fulfilled unexpectedly with: ${v}`);
},
() => {
writer.releaseLock();
ws.getWriter();
},
);
});
unitTest(function closedAndReadyOnReleasedWriter() {
const ws = new WritableStream({});
const writer = ws.getWriter();
writer.releaseLock();
return writer.closed.then(
(v) => {
throw new Error("writer.closed fulfilled unexpectedly with: " + v);
},
(closedRejection) => {
assertEquals(
closedRejection.name,
"TypeError",
"closed promise should reject with a TypeError",
);
return writer.ready.then(
(v) => {
throw new Error("writer.ready fulfilled unexpectedly with: " + v);
},
(readyRejection) =>
assertEquals(
readyRejection,
closedRejection,
"ready promise should reject with the same error",
),
);
},
);
});
unitTest(function sinkMethodsCalledAsMethods() {
let thisObject: Sink | null = null;
// Calls to Sink methods after the first are implicitly ignored. Only the
// first value that is passed to the resolver is used.
class Sink {
start(): void {
assertEquals(this, thisObject, "start should be called as a method");
}
write(): void {
assertEquals(this, thisObject, "write should be called as a method");
}
close(): void {
assertEquals(this, thisObject, "close should be called as a method");
}
abort(): void {
assertEquals(this, thisObject, "abort should be called as a method");
}
}
const theSink = new Sink();
thisObject = theSink;
const ws = new WritableStream(theSink);
const writer = ws.getWriter();
writer.write("a");
const closePromise = writer.close();
const ws2 = new WritableStream(theSink);
const writer2 = ws2.getWriter();
const abortPromise = writer2.abort();
return Promise.all([closePromise, abortPromise]).then(undefined);
});
unitTest(function sizeShouldNotBeCalledAsMethod() {
const strategy = {
size(): number {
if (this !== undefined) {
throw new Error("size called as a method");
}
return 1;
},
};
const ws = new WritableStream({}, strategy);
const writer = ws.getWriter();
return writer.write("a");
});
unitTest(function redundantReleaseLockIsNoOp() {
const ws = new WritableStream();
const writer1 = ws.getWriter();
assertEquals(
undefined,
writer1.releaseLock(),
"releaseLock() should return undefined",
);
const writer2 = ws.getWriter();
assertEquals(
undefined,
writer1.releaseLock(),
"no-op releaseLock() should return undefined",
);
// Calling releaseLock() on writer1 should not interfere with writer2. If it did, then the ready promise would be
// rejected.
return writer2.ready;
});
unitTest(function readyPromiseShouldFireBeforeReleaseLock() {
const events: string[] = [];
const ws = new WritableStream();
const writer = ws.getWriter();
return writer.ready.then(() => {
// Force the ready promise back to a pending state.
const writerPromise = writer.write("dummy");
const readyPromise = writer.ready.catch(() => events.push("ready"));
const closedPromise = writer.closed.catch(() => events.push("closed"));
writer.releaseLock();
return Promise.all([readyPromise, closedPromise]).then(() => {
assertEquals(
events,
["ready", "closed"],
"ready promise should fire before closed promise",
);
// Stop the writer promise hanging around after the test has finished.
return Promise.all([writerPromise, ws.abort()]).then(undefined);
});
});
});
unitTest(function subclassingWritableStream() {
class Subclass extends WritableStream {
extraFunction(): boolean {
return true;
}
}
assert(
Object.getPrototypeOf(Subclass.prototype) === WritableStream.prototype,
"Subclass.prototype's prototype should be WritableStream.prototype",
);
assert(
Object.getPrototypeOf(Subclass) === WritableStream,
"Subclass's prototype should be WritableStream",
);
const sub = new Subclass();
assert(
sub instanceof WritableStream,
"Subclass object should be an instance of WritableStream",
);
assert(
sub instanceof Subclass,
"Subclass object should be an instance of Subclass",
);
const lockedGetter = Object.getOwnPropertyDescriptor(
WritableStream.prototype,
"locked",
)!.get!;
assert(
lockedGetter.call(sub) === sub.locked,
"Subclass object should pass brand check",
);
assert(
sub.extraFunction(),
"extraFunction() should be present on Subclass object",
);
});
unitTest(function lockedGetterShouldReturnTrue() {
const ws = new WritableStream();
assert(!ws.locked, "stream should not be locked");
ws.getWriter();
assert(ws.locked, "stream should be locked");
});

View file

@ -57,10 +57,6 @@ import "./response_test.ts";
import "./signal_test.ts";
import "./stat_test.ts";
import "./stdio_test.ts";
import "./streams_internal_test.ts";
import "./streams_piping_test.ts";
import "./streams_transform_test.ts";
import "./streams_writable_test.ts";
import "./symlink_test.ts";
import "./sync_test.ts";
import "./text_encoding_test.ts";

View file

@ -1,13 +1,62 @@
{
"streams": [
// "piping/abort",
// "piping/close-propagation-backward",
// "piping/close-propagation-forward",
// "piping/error-propagation-backward",
// "piping/error-propagation-forward",
"piping/flow-control",
// "piping/general",
"piping/multiple-propagation",
// "piping/pipe-through",
"piping/then-interception",
// "piping/throwing-options",
// "piping/transform-streams",
"queuing-strategies.any",
// "readable-byte-streams",
// "readable-streams/async-iterator",
// "readable-streams/bad-strategies",
// "readable-streams/bad-underlying-source",
// "readable-streams/cancel",
// "readable-streams/constructor",
"readable-streams/count-queuing-strategy-integration",
"readable-streams/default-reader",
"readable-streams/floating-point-total-queue-size",
"readable-streams/garbage-collection",
"readable-streams/general",
{
"name": "readable-streams/general",
"name": "readable-streams/patched-global",
"expectFail": [
"ReadableStream can't be constructed with an invalid type",
"default ReadableStream getReader() should only accept mode:undefined"
"ReadableStream async iterator should use the original values of getReader() and ReadableStreamDefaultReader methods"
]
},
"writable-streams/general"
"readable-streams/reentrant-strategies",
"readable-streams/tee",
// "readable-streams/templated",
"transform-streams/backpressure",
"transform-streams/errors",
"transform-streams/flush",
"transform-streams/general",
"transform-streams/lipfuzz",
// "transform-streams/patched-global",
"transform-streams/properties",
"transform-streams/reentrant-strategies",
"transform-streams/strategies",
// "transform-streams/terminate",
// "writable-streams/aborting",
// "writable-streams/bad-strategies",
"writable-streams/bad-underlying-sinks",
"writable-streams/byte-length-queuing-strategy",
// "writable-streams/close",
// "writable-streams/constructor",
"writable-streams/count-queuing-strategy",
"writable-streams/error",
"writable-streams/floating-point-total-queue-size",
"writable-streams/general",
"writable-streams/properties",
"writable-streams/reentrant-strategy",
"writable-streams/start",
"writable-streams/write"
],
"encoding": [
{

File diff suppressed because it is too large Load diff

49
op_crates/fetch/11_streams_types.d.ts vendored Normal file
View file

@ -0,0 +1,49 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
// ** Internal Interfaces **
interface PendingAbortRequest {
deferred: Deferred<void>;
// deno-lint-ignore no-explicit-any
reason: any;
wasAlreadyErroring: boolean;
}
// deno-lint-ignore no-explicit-any
interface ReadRequest<R = any> {
chunkSteps: (chunk: R) => void;
closeSteps: () => void;
// deno-lint-ignore no-explicit-any
errorSteps: (error: any) => void;
}
interface ReadableByteStreamQueueEntry {
buffer: ArrayBufferLike;
byteOffset: number;
byteLength: number;
}
interface ReadableStreamGetReaderOptions {
mode?: "byob";
}
interface ReadableStreamIteratorOptions {
preventCancel?: boolean;
}
interface ValueWithSize<T> {
value: T;
size: number;
}
interface VoidFunction {
(): void;
}
// ** Ambient Definitions and Interfaces not provided by fetch **
declare function queueMicrotask(callback: VoidFunction): void;
declare namespace Deno {
function inspect(value: unknown, options?: Record<string, unknown>): string;
}

View file

@ -37,12 +37,22 @@ interface ReadableStreamDefaultReader<R = any> {
releaseLock(): void;
}
declare var ReadableStreamDefaultReader: {
prototype: ReadableStreamDefaultReader;
new <R>(stream: ReadableStream<R>): ReadableStreamDefaultReader<R>;
};
interface ReadableStreamReader<R = any> {
cancel(): Promise<void>;
read(): Promise<ReadableStreamReadResult<R>>;
releaseLock(): void;
}
declare var ReadableStreamReader: {
prototype: ReadableStreamReader;
new (): ReadableStreamReader;
};
interface ReadableByteStreamControllerCallback {
(controller: ReadableByteStreamController): void | PromiseLike<void>;
}
@ -55,6 +65,14 @@ interface UnderlyingByteSource {
type: "bytes";
}
interface UnderlyingSink<W = any> {
abort?: WritableStreamErrorCallback;
close?: WritableStreamDefaultControllerCloseCallback;
start?: WritableStreamDefaultControllerStartCallback;
type?: undefined;
write?: WritableStreamDefaultControllerWriteCallback<W>;
}
interface UnderlyingSource<R = any> {
cancel?: ReadableStreamErrorCallback;
pull?: ReadableStreamDefaultControllerCallback<R>;
@ -77,6 +95,11 @@ interface ReadableStreamDefaultController<R = any> {
error(error?: any): void;
}
declare var ReadableStreamDefaultController: {
prototype: ReadableStreamDefaultController;
new (): ReadableStreamDefaultController;
};
interface ReadableByteStreamController {
readonly byobRequest: undefined;
readonly desiredSize: number | null;
@ -85,6 +108,11 @@ interface ReadableByteStreamController {
error(error?: any): void;
}
declare var ReadableByteStreamController: {
prototype: ReadableByteStreamController;
new (): ReadableByteStreamController;
};
interface PipeOptions {
preventAbort?: boolean;
preventCancel?: boolean;
@ -122,14 +150,9 @@ declare class ByteLengthQueuingStrategy
interface ReadableStream<R = any> {
readonly locked: boolean;
cancel(reason?: any): Promise<void>;
getIterator(options?: { preventCancel?: boolean }): AsyncIterableIterator<R>;
// getReader(options: { mode: "byob" }): ReadableStreamBYOBReader;
getReader(): ReadableStreamDefaultReader<R>;
pipeThrough<T>(
{
writable,
readable,
}: {
{ writable, readable }: {
writable: WritableStream<R>;
readable: ReadableStream<T>;
},
@ -174,28 +197,23 @@ interface WritableStreamErrorCallback {
(reason: any): void | PromiseLike<void>;
}
interface UnderlyingSink<W = any> {
abort?: WritableStreamErrorCallback;
close?: WritableStreamDefaultControllerCloseCallback;
start?: WritableStreamDefaultControllerStartCallback;
type?: undefined;
write?: WritableStreamDefaultControllerWriteCallback<W>;
}
/** This Streams API interface provides a standard abstraction for writing
* streaming data to a destination, known as a sink. This object comes with
* built-in backpressure and queuing. */
declare class WritableStream<W = any> {
constructor(
underlyingSink?: UnderlyingSink<W>,
strategy?: QueuingStrategy<W>,
);
interface WritableStream<W = any> {
readonly locked: boolean;
abort(reason?: any): Promise<void>;
close(): Promise<void>;
getWriter(): WritableStreamDefaultWriter<W>;
}
declare var WritableStream: {
prototype: WritableStream;
new <W = any>(
underlyingSink?: UnderlyingSink<W>,
strategy?: QueuingStrategy<W>,
): WritableStream<W>;
};
/** This Streams API interface represents a controller allowing control of a
* WritableStream's state. When constructing a WritableStream, the underlying
* sink is given a corresponding WritableStreamDefaultController instance to
@ -218,16 +236,25 @@ interface WritableStreamDefaultWriter<W = any> {
write(chunk: W): Promise<void>;
}
declare class TransformStream<I = any, O = any> {
constructor(
transformer?: Transformer<I, O>,
writableStrategy?: QueuingStrategy<I>,
readableStrategy?: QueuingStrategy<O>,
);
declare var WritableStreamDefaultWriter: {
prototype: WritableStreamDefaultWriter;
new (): WritableStreamDefaultWriter;
};
interface TransformStream<I = any, O = any> {
readonly readable: ReadableStream<O>;
readonly writable: WritableStream<I>;
}
declare var TransformStream: {
prototype: TransformStream;
new <I = any, O = any>(
transformer?: Transformer<I, O>,
writableStrategy?: QueuingStrategy<I>,
readableStrategy?: QueuingStrategy<O>,
): TransformStream<I, O>;
};
interface TransformStreamDefaultController<O = any> {
readonly desiredSize: number | null;
enqueue(chunk: O): void;

View file

@ -223,6 +223,9 @@ delete Object.prototype.__proto__;
PerformanceMeasure: util.nonEnumerable(performance.PerformanceMeasure),
ProgressEvent: util.nonEnumerable(ProgressEvent),
ReadableStream: util.nonEnumerable(streams.ReadableStream),
ReadableStreamDefaultReader: util.nonEnumerable(
streams.ReadableStreamDefaultReader,
),
Request: util.nonEnumerable(fetch.Request),
Response: util.nonEnumerable(fetch.Response),
TextDecoder: util.nonEnumerable(TextDecoder),
@ -233,6 +236,9 @@ delete Object.prototype.__proto__;
WebSocket: util.nonEnumerable(webSocket.WebSocket),
Worker: util.nonEnumerable(worker.Worker),
WritableStream: util.nonEnumerable(streams.WritableStream),
WritableStreamDefaultWriter: util.nonEnumerable(
streams.WritableStreamDefaultWriter,
),
atob: util.writable(atob),
btoa: util.writable(btoa),
clearInterval: util.writable(timers.clearInterval),

View file

@ -42,5 +42,5 @@ export function pooledMap<T, R>(
await Promise.all(executing);
writer.close();
})();
return res.readable.getIterator();
return res.readable[Symbol.asyncIterator]();
}

View file

@ -181,7 +181,7 @@ Deno.test("toReadableCheck", async function (): Promise<void> {
const writableStream = readableStreamFromAsyncIterator(iter);
const decoder = new TextDecoder();
for await (const chunk of writableStream.getIterator()) {
for await (const chunk of writableStream) {
readChunks.push(decoder.decode(chunk));
}