2024-01-01 21:11:32 +00:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2022-11-29 13:55:38 +00:00
|
|
|
|
2024-04-29 02:57:30 +00:00
|
|
|
import { assert, assertEquals } from "@std/assert";
|
|
|
|
import { copy } from "@std/io/copy";
|
2022-11-29 13:55:38 +00:00
|
|
|
import { readerFromStreamReader } from "./reader_from_stream_reader.ts";
|
2024-04-29 02:57:30 +00:00
|
|
|
import { Buffer } from "@std/io/buffer";
|
2022-11-29 13:55:38 +00:00
|
|
|
|
|
|
|
function repeat(c: string, bytes: number): Uint8Array {
|
|
|
|
assertEquals(c.length, 1);
|
|
|
|
const ui8 = new Uint8Array(bytes);
|
|
|
|
ui8.fill(c.charCodeAt(0));
|
|
|
|
return ui8;
|
|
|
|
}
|
|
|
|
|
2024-02-27 20:12:47 +00:00
|
|
|
Deno.test("readerFromStreamReader()", async function () {
|
2022-11-29 13:55:38 +00:00
|
|
|
const chunks: string[] = ["hello", "deno", "land"];
|
|
|
|
const expected = chunks.slice();
|
|
|
|
const readChunks: Uint8Array[] = [];
|
2023-11-10 03:57:52 +00:00
|
|
|
const readableStream = ReadableStream.from(chunks)
|
|
|
|
.pipeThrough(new TextEncoderStream());
|
2022-11-29 13:55:38 +00:00
|
|
|
|
|
|
|
const decoder = new TextDecoder();
|
|
|
|
const reader = readerFromStreamReader(readableStream.getReader());
|
|
|
|
|
|
|
|
let i = 0;
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const b = new Uint8Array(1024);
|
|
|
|
const n = await reader.read(b);
|
|
|
|
|
|
|
|
if (n === null) break;
|
|
|
|
|
|
|
|
readChunks.push(b.subarray(0, n));
|
|
|
|
assert(i < expected.length);
|
|
|
|
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
|
|
|
|
assertEquals(
|
|
|
|
expected,
|
|
|
|
readChunks.map((chunk) => decoder.decode(chunk)),
|
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2024-02-27 20:12:47 +00:00
|
|
|
Deno.test("readerFromStreamReader() handles big chunks", async function () {
|
2022-11-29 13:55:38 +00:00
|
|
|
const bufSize = 1024;
|
|
|
|
const chunkSize = 3 * bufSize;
|
|
|
|
const writer = new Buffer();
|
|
|
|
|
|
|
|
// A readable stream can enqueue chunks bigger than Copy bufSize
|
|
|
|
// Reader returned by toReader should enqueue exceeding bytes
|
|
|
|
const chunks: string[] = [
|
|
|
|
"a".repeat(chunkSize),
|
|
|
|
"b".repeat(chunkSize),
|
|
|
|
"c".repeat(chunkSize),
|
|
|
|
];
|
|
|
|
const expected = chunks.slice();
|
2023-11-10 03:57:52 +00:00
|
|
|
const readableStream = ReadableStream.from(chunks)
|
|
|
|
.pipeThrough(new TextEncoderStream());
|
2022-11-29 13:55:38 +00:00
|
|
|
|
|
|
|
const reader = readerFromStreamReader(readableStream.getReader());
|
|
|
|
const n = await copy(reader, writer, { bufSize });
|
|
|
|
|
|
|
|
const expectedWritten = chunkSize * expected.length;
|
|
|
|
assertEquals(n, chunkSize * expected.length);
|
|
|
|
assertEquals(writer.length, expectedWritten);
|
|
|
|
});
|
|
|
|
|
2024-02-27 20:12:47 +00:00
|
|
|
Deno.test("readerFromStreamReader() handles irregular chunks", async function () {
|
2022-11-29 13:55:38 +00:00
|
|
|
const bufSize = 1024;
|
|
|
|
const chunkSize = 3 * bufSize;
|
|
|
|
const writer = new Buffer();
|
|
|
|
|
|
|
|
// A readable stream can enqueue chunks bigger than Copy bufSize
|
|
|
|
// Reader returned by toReader should enqueue exceeding bytes
|
|
|
|
const chunks: Uint8Array[] = [
|
|
|
|
repeat("a", chunkSize),
|
|
|
|
repeat("b", chunkSize + 253),
|
|
|
|
repeat("c", chunkSize + 8),
|
|
|
|
];
|
|
|
|
const expected = new Uint8Array(
|
|
|
|
chunks
|
|
|
|
.slice()
|
|
|
|
.map((chunk) => [...chunk])
|
|
|
|
.flat(),
|
|
|
|
);
|
2023-11-10 03:57:52 +00:00
|
|
|
const readableStream = ReadableStream.from(chunks);
|
2022-11-29 13:55:38 +00:00
|
|
|
|
|
|
|
const reader = readerFromStreamReader(readableStream.getReader());
|
|
|
|
|
|
|
|
const n = await copy(reader, writer, { bufSize });
|
|
|
|
assertEquals(n, expected.length);
|
|
|
|
assertEquals(expected, writer.bytes());
|
|
|
|
});
|