fix(encoding/csv/stream): cancel lineReader if readable is canceled (#2401)

This commit is contained in:
nkronlage 2022-07-11 02:58:23 -07:00 committed by GitHub
parent dab98b6e1b
commit eb4f512cc4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 32797 additions and 0 deletions

View File

@ -56,6 +56,7 @@ export class CSVStream implements TransformStream<string, Array<string>> {
this.#lineReader = new StreamLineReader(this.#lines.readable.getReader());
this.#readable = new ReadableStream<Array<string>>({
pull: (controller) => this.#pull(controller),
cancel: () => this.#lineReader.cancel(),
});
}

View File

@ -295,3 +295,31 @@ function createReadableStreamFromString(s: string): ReadableStream<string> {
new TextDecoderStream(),
);
}
// Work around resource leak error with TextDecoderStream:
// https://github.com/denoland/deno/issues/13142
export const MyTextDecoderStream = () => {
const textDecoder = new TextDecoder();
return new TransformStream({
transform(chunk: Uint8Array, controller: TransformStreamDefaultController) {
controller.enqueue(textDecoder.decode(chunk));
},
flush(controller: TransformStreamDefaultController) {
controller.enqueue(textDecoder.decode());
},
});
};
Deno.test({
name:
"[encoding/csv/stream] cancel CSVStream during iteration does not leak file",
permissions: { read: [testdataDir] },
fn: async () => {
const file = await Deno.open(join(testdataDir, "large.csv"));
const readable = file.readable.pipeThrough(MyTextDecoderStream())
.pipeThrough(new CSVStream());
for await (const _record of readable) {
break;
}
},
});

32768
encoding/testdata/large.csv vendored Normal file

File diff suppressed because it is too large Load Diff