mirror of
https://github.com/denoland/std.git
synced 2024-11-21 20:50:22 +00:00
chore(streams): format test names (#4378)
* initial commit * Update buffer_test.ts Co-authored-by: Asher Gomez <ashersaupingomez@gmail.com> * remove brackets for classes --------- Co-authored-by: Asher Gomez <ashersaupingomez@gmail.com>
This commit is contained in:
parent
f7076beaaa
commit
b1b048d759
@ -3,7 +3,7 @@
|
||||
import { assert, assertEquals } from "../assert/mod.ts";
|
||||
import { Buffer } from "./buffer.ts";
|
||||
|
||||
Deno.test("[streams] Buffer Write & Read", async function () {
|
||||
Deno.test("Buffer handles write and read", async function () {
|
||||
const buf = new Buffer();
|
||||
const writer = buf.writable.getWriter();
|
||||
const reader = buf.readable.getReader({ mode: "byob" });
|
||||
@ -13,7 +13,7 @@ Deno.test("[streams] Buffer Write & Read", async function () {
|
||||
assertEquals(read.value, data);
|
||||
});
|
||||
|
||||
Deno.test("[streams] Buffer Read empty", async function () {
|
||||
Deno.test("Buffer handles read empty", async function () {
|
||||
const buf = new Buffer();
|
||||
const reader = buf.readable.getReader({ mode: "byob" });
|
||||
const read = await reader.read(new Uint8Array(5));
|
||||
@ -21,7 +21,7 @@ Deno.test("[streams] Buffer Read empty", async function () {
|
||||
assertEquals(read.value!.byteLength, 0);
|
||||
});
|
||||
|
||||
Deno.test("[streams] Buffer Write & get bytes", async function () {
|
||||
Deno.test("Buffer handles write and get bytes", async function () {
|
||||
const buf = new Buffer();
|
||||
const writer = buf.writable.getWriter();
|
||||
const data = new Uint8Array([4, 21, 45, 19]);
|
||||
@ -29,7 +29,7 @@ Deno.test("[streams] Buffer Write & get bytes", async function () {
|
||||
assertEquals(buf.bytes(), data);
|
||||
});
|
||||
|
||||
Deno.test("[streams] Buffer truncate", async function () {
|
||||
Deno.test("Buffer handles truncate", async function () {
|
||||
const buf = new Buffer();
|
||||
const writer = buf.writable.getWriter();
|
||||
await writer.write(new Uint8Array([4, 21, 45, 19]));
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals, assertThrows } from "../assert/mod.ts";
|
||||
import { ByteSliceStream } from "./byte_slice_stream.ts";
|
||||
|
||||
Deno.test("[streams] ByteSliceStream", async function () {
|
||||
Deno.test("ByteSliceStream", async function () {
|
||||
function createStream(start = 0, end = Infinity) {
|
||||
return ReadableStream.from([
|
||||
new Uint8Array([0, 1]),
|
||||
|
@ -19,7 +19,7 @@ const DELIMITER_STREAM_INPUTS = [
|
||||
"CRLFasd", // chunk starts with delimiter
|
||||
].map((s) => new TextEncoder().encode(s));
|
||||
|
||||
Deno.test("[streams] DelimiterStream, discard", async () => {
|
||||
Deno.test("DelimiterStream discard", async () => {
|
||||
const crlf = new TextEncoder().encode("CRLF");
|
||||
const delimStream = new DelimiterStream(crlf, { disposition: "discard" });
|
||||
const outputs = [
|
||||
@ -39,7 +39,7 @@ Deno.test("[streams] DelimiterStream, discard", async () => {
|
||||
await testTransformStream(delimStream, DELIMITER_STREAM_INPUTS, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] DelimiterStream, suffix", async () => {
|
||||
Deno.test("DelimiterStream suffix", async () => {
|
||||
const crlf = new TextEncoder().encode("CRLF");
|
||||
const delimStream = new DelimiterStream(crlf, { disposition: "suffix" });
|
||||
const outputs = [
|
||||
@ -59,7 +59,7 @@ Deno.test("[streams] DelimiterStream, suffix", async () => {
|
||||
await testTransformStream(delimStream, DELIMITER_STREAM_INPUTS, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] DelimiterStream, prefix", async () => {
|
||||
Deno.test("DelimiterStream prefix", async () => {
|
||||
const crlf = new TextEncoder().encode("CRLF");
|
||||
const delimStream = new DelimiterStream(crlf, { disposition: "prefix" });
|
||||
const outputs = [
|
||||
@ -95,7 +95,7 @@ const CHAR_DELIMITER_STREAM_INPUTS = [
|
||||
"_asd", // chunk starts with delimiter
|
||||
].map((s) => new TextEncoder().encode(s));
|
||||
|
||||
Deno.test("[streams] DelimiterStream, char delimiter, discard", async () => {
|
||||
Deno.test("DelimiterStream char delimiter, discard", async () => {
|
||||
const delim = new TextEncoder().encode("_");
|
||||
const delimStream = new DelimiterStream(delim, { disposition: "discard" });
|
||||
const outputs = [
|
||||
@ -113,7 +113,7 @@ Deno.test("[streams] DelimiterStream, char delimiter, discard", async () => {
|
||||
await testTransformStream(delimStream, CHAR_DELIMITER_STREAM_INPUTS, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] DelimiterStream, char delimiter, suffix", async () => {
|
||||
Deno.test("DelimiterStream char delimiter, suffix", async () => {
|
||||
const delim = new TextEncoder().encode("_");
|
||||
const delimStream = new DelimiterStream(delim, { disposition: "suffix" });
|
||||
const outputs = [
|
||||
@ -131,7 +131,7 @@ Deno.test("[streams] DelimiterStream, char delimiter, suffix", async () => {
|
||||
await testTransformStream(delimStream, CHAR_DELIMITER_STREAM_INPUTS, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] DelimiterStream, char delimiter, prefix", async () => {
|
||||
Deno.test("DelimiterStream char delimiter, prefix", async () => {
|
||||
const delim = new TextEncoder().encode("_");
|
||||
const delimStream = new DelimiterStream(delim, { disposition: "prefix" });
|
||||
const outputs = [
|
||||
@ -149,7 +149,7 @@ Deno.test("[streams] DelimiterStream, char delimiter, prefix", async () => {
|
||||
await testTransformStream(delimStream, CHAR_DELIMITER_STREAM_INPUTS, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] DelimiterStream, regression 3609", async () => {
|
||||
Deno.test("DelimiterStream regression 3609", async () => {
|
||||
const delimStream = new DelimiterStream(new TextEncoder().encode(";"));
|
||||
const inputs = [
|
||||
";ab;fg;hn;j",
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { earlyZipReadableStreams } from "./early_zip_readable_streams.ts";
|
||||
import { assertEquals } from "../assert/mod.ts";
|
||||
|
||||
Deno.test("[streams] earlyZipReadableStreams short first", async () => {
|
||||
Deno.test("earlyZipReadableStreams() handles short first", async () => {
|
||||
const textStream = ReadableStream.from(["1", "2", "3"]);
|
||||
const textStream2 = ReadableStream.from(["a", "b", "c", "d", "e"]);
|
||||
|
||||
@ -21,7 +21,7 @@ Deno.test("[streams] earlyZipReadableStreams short first", async () => {
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test("[streams] earlyZipReadableStreams long first", async () => {
|
||||
Deno.test("earlyZipReadableStreams() handles long first", async () => {
|
||||
const textStream = ReadableStream.from(["a", "b", "c", "d", "e"]);
|
||||
const textStream2 = ReadableStream.from(["1", "2", "3"]);
|
||||
|
||||
|
@ -6,7 +6,7 @@ import { readerFromIterable } from "./reader_from_iterable.ts";
|
||||
import { delay } from "../async/delay.ts";
|
||||
import type { Reader, ReaderSync } from "../io/types.ts";
|
||||
|
||||
Deno.test("iterateReader", async () => {
|
||||
Deno.test("iterateReader()", async () => {
|
||||
// ref: https://github.com/denoland/deno/issues/2330
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
@ -42,7 +42,7 @@ Deno.test("iterateReader", async () => {
|
||||
assertEquals(totalSize, 12);
|
||||
});
|
||||
|
||||
Deno.test("iterateReader works with slow consumer", async () => {
|
||||
Deno.test("iterateReader() works with slow consumer", async () => {
|
||||
const a = new Uint8Array([97]);
|
||||
const b = new Uint8Array([98]);
|
||||
const iter = iterateReader(readerFromIterable([a, b]));
|
||||
@ -53,7 +53,7 @@ Deno.test("iterateReader works with slow consumer", async () => {
|
||||
assertEquals([a, b], await Promise.all(promises));
|
||||
});
|
||||
|
||||
Deno.test("iterateReaderSync", () => {
|
||||
Deno.test("iterateReaderSync()", () => {
|
||||
// ref: https://github.com/denoland/deno/issues/2330
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
@ -88,7 +88,7 @@ Deno.test("iterateReaderSync", () => {
|
||||
assertEquals(totalSize, 12);
|
||||
});
|
||||
|
||||
Deno.test("iterateReaderSync works with slow consumer", async () => {
|
||||
Deno.test("iterateReaderSync() works with slow consumer", async () => {
|
||||
const a = new Uint8Array([97]);
|
||||
const b = new Uint8Array([98]);
|
||||
const data = [a, b];
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals, assertRejects } from "../assert/mod.ts";
|
||||
import { LimitedBytesTransformStream } from "./limited_bytes_transform_stream.ts";
|
||||
|
||||
Deno.test("[streams] LimitedBytesTransformStream", async function () {
|
||||
Deno.test("LimitedBytesTransformStream", async function () {
|
||||
const r = ReadableStream.from([
|
||||
new Uint8Array([1, 2, 3]),
|
||||
new Uint8Array([4, 5, 6]),
|
||||
@ -17,7 +17,7 @@ Deno.test("[streams] LimitedBytesTransformStream", async function () {
|
||||
assertEquals(chunks.length, 2);
|
||||
});
|
||||
|
||||
Deno.test("[streams] LimitedBytesTransformStream error", async function () {
|
||||
Deno.test("LimitedBytesTransformStream handles error", async function () {
|
||||
const r = ReadableStream.from([
|
||||
new Uint8Array([1, 2, 3]),
|
||||
new Uint8Array([4, 5, 6]),
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals, assertRejects } from "../assert/mod.ts";
|
||||
import { LimitedTransformStream } from "./limited_transform_stream.ts";
|
||||
|
||||
Deno.test("[streams] LimitedTransformStream", async function () {
|
||||
Deno.test("LimitedTransformStream", async function () {
|
||||
const r = ReadableStream.from([
|
||||
"foo",
|
||||
"foo",
|
||||
@ -17,7 +17,7 @@ Deno.test("[streams] LimitedTransformStream", async function () {
|
||||
assertEquals(chunks.length, 3);
|
||||
});
|
||||
|
||||
Deno.test("[streams] LimitedTransformStream error", async function () {
|
||||
Deno.test("LimitedTransformStream handles error", async function () {
|
||||
const r = ReadableStream.from([
|
||||
"foo",
|
||||
"foo",
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { mergeReadableStreams } from "./merge_readable_streams.ts";
|
||||
import { assertEquals } from "../assert/mod.ts";
|
||||
|
||||
Deno.test("[streams] mergeReadableStreams", async () => {
|
||||
Deno.test("mergeReadableStreams()", async () => {
|
||||
const textStream = ReadableStream.from([
|
||||
"qwertzuiopasd",
|
||||
"mnbvcxylkjhgfds",
|
||||
@ -30,7 +30,7 @@ Deno.test("[streams] mergeReadableStreams", async () => {
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test("[streams] mergeReadableStreams - handling errors", async () => {
|
||||
Deno.test("mergeReadableStreams() handles errors", async () => {
|
||||
const textStream = ReadableStream.from(["1", "3"]);
|
||||
|
||||
const textStream2 = ReadableStream.from(["2", "4"]);
|
||||
|
@ -34,7 +34,7 @@ class MockReaderCloser implements Reader, Closer {
|
||||
}
|
||||
}
|
||||
|
||||
Deno.test("[streams] readableStreamFromReader()", async function () {
|
||||
Deno.test("readableStreamFromReader()", async function () {
|
||||
const encoder = new TextEncoder();
|
||||
const reader = new Buffer(encoder.encode("hello deno land"));
|
||||
const stream = readableStreamFromReader(reader);
|
||||
@ -44,11 +44,11 @@ Deno.test("[streams] readableStreamFromReader()", async function () {
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "[streams] readableStreamFromReader() auto closes closer",
|
||||
name: "readableStreamFromReader() auto closes closer",
|
||||
async fn() {},
|
||||
});
|
||||
|
||||
Deno.test("[streams] readableStreamFromReader() - calls close", async function () {
|
||||
Deno.test("readableStreamFromReader() calls close", async function () {
|
||||
const encoder = new TextEncoder();
|
||||
const reader = new MockReaderCloser();
|
||||
reader.chunks = [
|
||||
@ -63,7 +63,7 @@ Deno.test("[streams] readableStreamFromReader() - calls close", async function (
|
||||
assertEquals(reader.closeCall, 1);
|
||||
});
|
||||
|
||||
Deno.test("[streams] readableStreamFromReader() - doesn't call close with autoClose false", async function () {
|
||||
Deno.test("readableStreamFromReader() doesn't call close with autoClose false", async function () {
|
||||
const encoder = new TextEncoder();
|
||||
const reader = new MockReaderCloser();
|
||||
reader.chunks = [
|
||||
@ -78,7 +78,7 @@ Deno.test("[streams] readableStreamFromReader() - doesn't call close with autoCl
|
||||
assertEquals(reader.closeCall, 0);
|
||||
});
|
||||
|
||||
Deno.test("[streams] readableStreamFromReader() - chunkSize", async function () {
|
||||
Deno.test("readableStreamFromReader() handles chunkSize", async function () {
|
||||
const encoder = new TextEncoder();
|
||||
const reader = new MockReaderCloser();
|
||||
reader.chunks = [
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals } from "../assert/mod.ts";
|
||||
import { readerFromIterable } from "./reader_from_iterable.ts";
|
||||
|
||||
Deno.test("[streams] readerFromIterable()", async function () {
|
||||
Deno.test("readerFromIterable()", async function () {
|
||||
const reader = readerFromIterable((function* () {
|
||||
const encoder = new TextEncoder();
|
||||
for (const string of ["hello", "deno", "foo"]) {
|
||||
|
@ -12,7 +12,7 @@ function repeat(c: string, bytes: number): Uint8Array {
|
||||
return ui8;
|
||||
}
|
||||
|
||||
Deno.test("[streams] readerFromStreamReader()", async function () {
|
||||
Deno.test("readerFromStreamReader()", async function () {
|
||||
const chunks: string[] = ["hello", "deno", "land"];
|
||||
const expected = chunks.slice();
|
||||
const readChunks: Uint8Array[] = [];
|
||||
@ -42,7 +42,7 @@ Deno.test("[streams] readerFromStreamReader()", async function () {
|
||||
);
|
||||
});
|
||||
|
||||
Deno.test("[streams] readerFromStreamReader() big chunks", async function () {
|
||||
Deno.test("readerFromStreamReader() handles big chunks", async function () {
|
||||
const bufSize = 1024;
|
||||
const chunkSize = 3 * bufSize;
|
||||
const writer = new Buffer();
|
||||
@ -66,7 +66,7 @@ Deno.test("[streams] readerFromStreamReader() big chunks", async function () {
|
||||
assertEquals(writer.length, expectedWritten);
|
||||
});
|
||||
|
||||
Deno.test("[streams] readerFromStreamReader() irregular chunks", async function () {
|
||||
Deno.test("readerFromStreamReader() handles irregular chunks", async function () {
|
||||
const bufSize = 1024;
|
||||
const chunkSize = 3 * bufSize;
|
||||
const writer = new Buffer();
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { TextDelimiterStream } from "./text_delimiter_stream.ts";
|
||||
import { testTransformStream } from "./_test_common.ts";
|
||||
|
||||
Deno.test("[streams] TextDelimiterStream, discard", async () => {
|
||||
Deno.test("TextDelimiterStream handles discard", async () => {
|
||||
const delimStream = new TextDelimiterStream("foo", {
|
||||
disposition: "discard",
|
||||
});
|
||||
@ -28,7 +28,7 @@ Deno.test("[streams] TextDelimiterStream, discard", async () => {
|
||||
await testTransformStream(delimStream, inputs, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] TextDelimiterStream, suffix", async () => {
|
||||
Deno.test("TextDelimiterStream handles suffix", async () => {
|
||||
const delimStream = new TextDelimiterStream("foo", {
|
||||
disposition: "suffix",
|
||||
});
|
||||
@ -53,7 +53,7 @@ Deno.test("[streams] TextDelimiterStream, suffix", async () => {
|
||||
await testTransformStream(delimStream, inputs, outputs);
|
||||
});
|
||||
|
||||
Deno.test("[streams] TextDelimiterStream, prefix", async () => {
|
||||
Deno.test("TextDelimiterStream handles prefix", async () => {
|
||||
const delimStream = new TextDelimiterStream("foo", {
|
||||
disposition: "prefix",
|
||||
});
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { TextLineStream } from "./text_line_stream.ts";
|
||||
import { assertEquals } from "../assert/mod.ts";
|
||||
|
||||
Deno.test("TextLineStream() parses simple input", async () => {
|
||||
Deno.test("TextLineStream parses simple input", async () => {
|
||||
const stream = ReadableStream.from([
|
||||
"qwertzu",
|
||||
"iopasd\r\nmnbvc",
|
||||
@ -35,7 +35,7 @@ Deno.test("TextLineStream() parses simple input", async () => {
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test("TextLineStream() parses with `allowCR` enabled", async () => {
|
||||
Deno.test("TextLineStream parses with `allowCR` enabled", async () => {
|
||||
const stream = ReadableStream.from([
|
||||
"qwertzu",
|
||||
"iopasd\r\nmnbvc",
|
||||
@ -70,7 +70,7 @@ Deno.test("TextLineStream() parses with `allowCR` enabled", async () => {
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test("TextLineStream() parses large chunks", async () => {
|
||||
Deno.test("TextLineStream parses large chunks", async () => {
|
||||
const totalLines = 20_000;
|
||||
const stream = ReadableStream.from("\n".repeat(totalLines))
|
||||
.pipeThrough(new TextLineStream());
|
||||
@ -80,7 +80,7 @@ Deno.test("TextLineStream() parses large chunks", async () => {
|
||||
assertEquals(lines, Array.from({ length: totalLines }).fill(""));
|
||||
});
|
||||
|
||||
Deno.test("TextLineStream() parses no final empty chunk with terminal newline", async () => {
|
||||
Deno.test("TextLineStream parses no final empty chunk with terminal newline", async () => {
|
||||
const stream = ReadableStream.from([
|
||||
"abc\n",
|
||||
"def\nghi\njk",
|
||||
@ -104,7 +104,7 @@ Deno.test("TextLineStream() parses no final empty chunk with terminal newline",
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test("TextLineStream() parses no final empty chunk without terminal newline", async () => {
|
||||
Deno.test("TextLineStream parses no final empty chunk without terminal newline", async () => {
|
||||
const stream = ReadableStream.from([
|
||||
"abc\n",
|
||||
"def\nghi\njk",
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals } from "../assert/assert_equals.ts";
|
||||
import { toArrayBuffer } from "./to_array_buffer.ts";
|
||||
|
||||
Deno.test("[streams] toArrayBuffer", async () => {
|
||||
Deno.test("toArrayBuffer()", async () => {
|
||||
const stream = ReadableStream.from([
|
||||
new Uint8Array([1, 2, 3, 4, 5]),
|
||||
new Uint8Array([6, 7]),
|
||||
|
@ -4,7 +4,7 @@ import { assert } from "../assert/assert.ts";
|
||||
import { assertEquals } from "../assert/assert_equals.ts";
|
||||
import { toBlob } from "./to_blob.ts";
|
||||
|
||||
Deno.test("[streams] toBlob", async () => {
|
||||
Deno.test("toBlob()", async () => {
|
||||
const stream = ReadableStream.from([
|
||||
new Uint8Array([1, 2, 3, 4, 5]),
|
||||
new Uint8Array([6, 7]),
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals } from "../assert/assert_equals.ts";
|
||||
import { toJson } from "./to_json.ts";
|
||||
|
||||
Deno.test("[streams] toJson", async () => {
|
||||
Deno.test("toJson()", async () => {
|
||||
const byteStream = ReadableStream.from(["[", "1, 2, 3, 4", "]"])
|
||||
.pipeThrough(new TextEncoderStream());
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals } from "../assert/assert_equals.ts";
|
||||
import { toText } from "./to_text.ts";
|
||||
|
||||
Deno.test("[streams] toText", async () => {
|
||||
Deno.test("toText()", async () => {
|
||||
const byteStream = ReadableStream.from(["hello", " js ", "fans"])
|
||||
.pipeThrough(new TextEncoderStream());
|
||||
|
||||
|
@ -4,7 +4,7 @@ import { assertEquals, assertRejects } from "../assert/mod.ts";
|
||||
import { toTransformStream } from "./to_transform_stream.ts";
|
||||
|
||||
Deno.test({
|
||||
name: "[streams] toTransformStream()",
|
||||
name: "toTransformStream()",
|
||||
async fn() {
|
||||
const readable = ReadableStream.from([0, 1, 2])
|
||||
.pipeThrough(toTransformStream(async function* (src) {
|
||||
@ -19,7 +19,7 @@ Deno.test({
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "[streams] toTransformStream() Pass iterable instead of asyncIterable",
|
||||
name: "toTransformStream() handles iterable instead of asyncIterable",
|
||||
async fn() {
|
||||
const readable = ReadableStream.from([0, 1, 2])
|
||||
.pipeThrough(toTransformStream(function* (_src) {
|
||||
@ -34,7 +34,7 @@ Deno.test({
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "[streams] toTransformStream() Propagate the error from readable 1",
|
||||
name: "toTransformStream() propagates the error from readable 1",
|
||||
async fn(t) {
|
||||
// When data is pipelined in the order of readable1 → generator → readable2,
|
||||
// Propagate the error that occurred in readable1 to generator and readable2.
|
||||
@ -97,7 +97,7 @@ Deno.test({
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "[streams] toTransformStream() Propagate the error from generator",
|
||||
name: "toTransformStream() propagates the error from generator",
|
||||
async fn(t) {
|
||||
// When data is pipelined in the order of readable1 → generator → readable2,
|
||||
// Propagate the error that occurred in generator to readable2 and readable1.
|
||||
@ -139,7 +139,7 @@ Deno.test({
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "[streams] toTransformStream() Propagate cancellation from readable 2",
|
||||
name: "toTransformStream() propagates cancellation from readable 2",
|
||||
async fn(t) {
|
||||
// When data is pipelined in the order of readable1 → generator → readable2,
|
||||
// Propagate the cancellation that occurred in readable2 to readable1 and generator.
|
||||
@ -191,7 +191,7 @@ Deno.test({
|
||||
|
||||
Deno.test({
|
||||
name:
|
||||
"[streams] toTransformStream() Cancel streams with the correct error message",
|
||||
"toTransformStream() handles streams cancel with the correct error message",
|
||||
async fn() {
|
||||
const src = ReadableStream.from([0, 1, 2]);
|
||||
// deno-lint-ignore require-yield
|
||||
|
@ -23,7 +23,7 @@ class MockWriterCloser implements Writer, Closer {
|
||||
}
|
||||
}
|
||||
|
||||
Deno.test("[streams] writableStreamFromWriter()", async function () {
|
||||
Deno.test("writableStreamFromWriter()", async function () {
|
||||
const written: string[] = [];
|
||||
const chunks: string[] = ["hello", "deno", "land"];
|
||||
const decoder = new TextDecoder();
|
||||
@ -45,7 +45,7 @@ Deno.test("[streams] writableStreamFromWriter()", async function () {
|
||||
assertEquals(written, chunks);
|
||||
});
|
||||
|
||||
Deno.test("[streams] writableStreamFromWriter() - calls close on close", async function () {
|
||||
Deno.test("writableStreamFromWriter() calls close on close", async function () {
|
||||
const written: string[] = [];
|
||||
const chunks: string[] = ["hello", "deno", "land"];
|
||||
const decoder = new TextDecoder();
|
||||
@ -68,7 +68,7 @@ Deno.test("[streams] writableStreamFromWriter() - calls close on close", async f
|
||||
assertEquals(writer.closeCall, 1);
|
||||
});
|
||||
|
||||
Deno.test("[streams] writableStreamFromWriter() - calls close on abort", async function () {
|
||||
Deno.test("writableStreamFromWriter() calls close on abort", async function () {
|
||||
const written: string[] = [];
|
||||
const chunks: string[] = ["hello", "deno", "land"];
|
||||
const decoder = new TextDecoder();
|
||||
@ -91,7 +91,7 @@ Deno.test("[streams] writableStreamFromWriter() - calls close on abort", async f
|
||||
assertEquals(writer.closeCall, 1);
|
||||
});
|
||||
|
||||
Deno.test("[streams] writableStreamFromWriter() - doesn't call close with autoClose false", async function () {
|
||||
Deno.test("writableStreamFromWriter() doesn't call close with autoClose false", async function () {
|
||||
const written: string[] = [];
|
||||
const chunks: string[] = ["hello", "deno", "land"];
|
||||
const decoder = new TextDecoder();
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals } from "../assert/mod.ts";
|
||||
import { writerFromStreamWriter } from "./writer_from_stream_writer.ts";
|
||||
|
||||
Deno.test("[streams] writerFromStreamWriter()", async function () {
|
||||
Deno.test("writerFromStreamWriter()", async function () {
|
||||
const written: string[] = [];
|
||||
const chunks: string[] = ["hello", "deno", "land"];
|
||||
const writableStream = new WritableStream({
|
||||
|
@ -3,7 +3,7 @@
|
||||
import { assertEquals } from "../assert/mod.ts";
|
||||
import { zipReadableStreams } from "./zip_readable_streams.ts";
|
||||
|
||||
Deno.test("[streams] zipReadableStreams", async () => {
|
||||
Deno.test("zipReadableStreams()", async () => {
|
||||
const textStream = ReadableStream.from([
|
||||
"qwertzuiopasd",
|
||||
"mnbvcxylkjhgfds",
|
||||
|
Loading…
Reference in New Issue
Block a user