refactor(streams): rework single-export files (#3061)

This commit is contained in:
Asher Gomez 2022-12-27 15:39:31 +11:00 committed by GitHub
parent 22cdb7f0fc
commit a98606f736
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 448 additions and 470 deletions

View File

@ -1,7 +1,4 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
import { LimitedBytesTransformStream as _LimitedBytesTransformStream } from "./limited_bytes_transform_stream.ts";
import { LimitedTransformStream as _LimitedTransformStream } from "./limited_transform_stream.ts";
import { ByteSliceStream as _ByteSliceStream } from "./byte_slice_stream.ts";
import { assert } from "../_util/asserts.ts";
import { copy } from "../bytes/copy.ts";
@ -169,52 +166,58 @@ export class Buffer {
}
}
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/limited_bytes_transform_stream.ts` instead.
*
* A TransformStream that will only read & enqueue `size` amount of bytes.
* This operation is chunk based and not BYOB based,
* and as such will read more than needed.
*
* if options.error is set, then instead of terminating the stream,
* an error will be thrown.
*
* ```ts
* import { LimitedBytesTransformStream } from "https://deno.land/std@$STD_VERSION/streams/buffer.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!
* .pipeThrough(new LimitedBytesTransformStream(512 * 1024));
* ```
*/
export const LimitedBytesTransformStream = _LimitedBytesTransformStream;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/limited_bytes_transform_stream.ts` instead.
*
* A TransformStream that will only read & enqueue `size` amount of bytes.
* This operation is chunk based and not BYOB based,
* and as such will read more than needed.
*
* if options.error is set, then instead of terminating the stream,
* an error will be thrown.
*
* ```ts
* import { LimitedBytesTransformStream } from "https://deno.land/std@$STD_VERSION/streams/buffer.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!
* .pipeThrough(new LimitedBytesTransformStream(512 * 1024));
* ```
*/
LimitedBytesTransformStream,
} from "./limited_bytes_transform_stream.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/limited_transform_stream.ts` instead.
*
* A TransformStream that will only read & enqueue `size` amount of chunks.
*
* if options.error is set, then instead of terminating the stream,
* an error will be thrown.
*
* ```ts
* import { LimitedTransformStream } from "https://deno.land/std@$STD_VERSION/streams/buffer.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!.pipeThrough(new LimitedTransformStream(50));
* ```
*/
export const LimitedTransformStream = _LimitedTransformStream;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/limited_transform_stream.ts` instead.
*
* A TransformStream that will only read & enqueue `size` amount of chunks.
*
* if options.error is set, then instead of terminating the stream,
* an error will be thrown.
*
* ```ts
* import { LimitedTransformStream } from "https://deno.land/std@$STD_VERSION/streams/buffer.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!.pipeThrough(new LimitedTransformStream(50));
* ```
*/
LimitedTransformStream,
} from "./limited_transform_stream.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/byte_slice_stream.ts` instead.
*
* A transform stream that only transforms from the zero-indexed `start` and `end` bytes (both inclusive).
*
* @example
* ```ts
* import { ByteSliceStream } from "https://deno.land/std@$STD_VERSION/streams/buffer.ts";
* const response = await fetch("https://example.com");
* const rangedStream = response.body!
* .pipeThrough(new ByteSliceStream(3, 8));
* ```
*/
export const ByteSliceStream = _ByteSliceStream;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/byte_slice_stream.ts` instead.
*
* A transform stream that only transforms from the zero-indexed `start` and `end` bytes (both inclusive).
*
* @example
* ```ts
* import { ByteSliceStream } from "https://deno.land/std@$STD_VERSION/streams/buffer.ts";
* const response = await fetch("https://example.com");
* const rangedStream = response.body!
* .pipeThrough(new ByteSliceStream(3, 8));
* ```
*/
ByteSliceStream,
} from "./byte_slice_stream.ts";

View File

@ -1,273 +1,241 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
import {
writeAll as _writeAll,
writeAllSync as _writeAllSync,
} from "./write_all.ts";
import { readerFromIterable as _readerFromIterable } from "./reader_from_iterable.ts";
import {
iterateReader as _iterateReader,
iterateReaderSync as _iterateReaderSync,
} from "./iterate_reader.ts";
import { copy as _copy } from "./copy.ts";
import {
readAll as _readAll,
readAllSync as _readAllSync,
} from "./read_all.ts";
import { writerFromStreamWriter as _writerFromStreamWriter } from "./writer_from_stream_writer.ts";
import { readerFromStreamReader as _readerFromStreamReader } from "./reader_from_stream_reader.ts";
import { readableStreamFromIterable as _readableStreamFromIterable } from "./readable_stream_from_iterable.ts";
import { toTransformStream as _toTransformStream } from "./to_transform_stream.ts";
import {
readableStreamFromReader as _readableStreamFromReader,
type ReadableStreamFromReaderOptions as _ReadableStreamFromReaderOptions,
} from "./readable_stream_from_reader.ts";
import { writableStreamFromWriter as _writableStreamFromWriter } from "./writable_stream_from_writer.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/reader_from_iterable.ts` instead.
*
* Create a `Deno.Reader` from an iterable of `Uint8Array`s.
*
* ```ts
* import { readerFromIterable, copy } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const file = await Deno.open("metrics.txt", { write: true });
* const reader = readerFromIterable((async function* () {
* while (true) {
* await new Promise((r) => setTimeout(r, 1000));
* const message = `data: ${JSON.stringify(Deno.metrics())}\n\n`;
* yield new TextEncoder().encode(message);
* }
* })());
* await copy(reader, file);
* ```
*/
export const readerFromIterable = _readerFromIterable;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/writer_from_stream_writer.ts` instead.
*
* Create a `Writer` from a `WritableStreamDefaultWriter`.
*
* @example
* ```ts
* import {
* copy,
* writerFromStreamWriter,
* } from "https://deno.land/std@$STD_VERSION/streams/mod.ts";
* const file = await Deno.open("./deno.land.html", { read: true });
*
* const writableStream = new WritableStream({
* write(chunk): void {
* console.log(chunk);
* },
* });
* const writer = writerFromStreamWriter(writableStream.getWriter());
* await copy(file, writer);
* file.close();
* ```
*/
export const writerFromStreamWriter = _writerFromStreamWriter;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/reader_from_stream_reader.ts` instead.
*
* Create a `Reader` from a `ReadableStreamDefaultReader`.
*
* @example
* ```ts
* import {
* copy,
* readerFromStreamReader,
* } from "https://deno.land/std@$STD_VERSION/streams/mod.ts";
* const res = await fetch("https://deno.land");
* const file = await Deno.open("./deno.land.html", { create: true, write: true });
*
* const reader = readerFromStreamReader(res.body!.getReader());
* await copy(reader, file);
* file.close();
* ```
*/
export const readerFromStreamReader = _readerFromStreamReader;
/** @deprecated (will be removed after 0.171.0) Import from `std/streams/writable_stream_from_writer.ts` instead. */
export interface WritableStreamFromWriterOptions {
export {
/**
* If the `writer` is also a `Deno.Closer`, automatically close the `writer`
* when the stream is closed, aborted, or a write error occurs.
* @deprecated (will be removed after 0.171.0) Import from `std/streams/reader_from_iterable.ts` instead.
*
* @default {true}
*/
autoClose?: boolean;
}
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/writable_stream_from_writer.ts` instead.
* Create a `WritableStream` from a `Writer`.
*/
export const writableStreamFromWriter = _writableStreamFromWriter;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/readable_stream_from_iterable.ts` instead.
*
* reate a `ReadableStream` from any kind of iterable.
*
* ```ts
* import { readableStreamFromIterable } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const r1 = readableStreamFromIterable(["foo, bar, baz"]);
* const r2 = readableStreamFromIterable(async function* () {
* await new Promise(((r) => setTimeout(r, 1000)));
* yield "foo";
* await new Promise(((r) => setTimeout(r, 1000)));
* yield "bar";
* await new Promise(((r) => setTimeout(r, 1000)));
* yield "baz";
* }());
* ```
*
* If the produced iterator (`iterable[Symbol.asyncIterator]()` or
* `iterable[Symbol.iterator]()`) is a generator, or more specifically is found
* to have a `.throw()` method on it, that will be called upon
* `readableStream.cancel()`. This is the case for the second input type above:
*
* ```ts
* import { readableStreamFromIterable } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const r3 = readableStreamFromIterable(async function* () {
* try {
* yield "foo";
* } catch (error) {
* console.log(error); // Error: Cancelled by consumer.
* }
* }());
* const reader = r3.getReader();
* console.log(await reader.read()); // { value: "foo", done: false }
* await reader.cancel(new Error("Cancelled by consumer."));
* ```
*/
export const readableStreamFromIterable = _readableStreamFromIterable;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/to_transform_stream.ts` instead.
*
* Convert the generator function into a TransformStream.
*
* ```ts
* import { readableStreamFromIterable, toTransformStream } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const readable = readableStreamFromIterable([0, 1, 2])
* .pipeThrough(toTransformStream(async function* (src) {
* for await (const chunk of src) {
* yield chunk * 100;
* }
* }));
*
* for await (const chunk of readable) {
* console.log(chunk);
* }
* // output: 0, 100, 200
* ```
*
* @param transformer A function to transform.
* @param writableStrategy An object that optionally defines a queuing strategy for the stream.
* @param readableStrategy An object that optionally defines a queuing strategy for the stream.
*/
export const toTransformStream = _toTransformStream;
/** @deprecated (will be removed after 0.171.0) Import from `std/streams/readable_stream_from_reader.ts` instead. */
export interface ReadableStreamFromReaderOptions {
/** If the `reader` is also a `Deno.Closer`, automatically close the `reader`
* when `EOF` is encountered, or a read error occurs.
* Create a `Deno.Reader` from an iterable of `Uint8Array`s.
*
* @default {true}
* ```ts
* import { readerFromIterable, copy } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const file = await Deno.open("metrics.txt", { write: true });
* const reader = readerFromIterable((async function* () {
* while (true) {
* await new Promise((r) => setTimeout(r, 1000));
* const message = `data: ${JSON.stringify(Deno.metrics())}\n\n`;
* yield new TextEncoder().encode(message);
* }
* })());
* await copy(reader, file);
* ```
*/
autoClose?: boolean;
readerFromIterable,
} from "./reader_from_iterable.ts";
/** The size of chunks to allocate to read, the default is ~16KiB, which is
* the maximum size that Deno operations can currently support. */
chunkSize?: number;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/writer_from_stream_writer.ts` instead.
*
* Create a `Writer` from a `WritableStreamDefaultWriter`.
*
* @example
* ```ts
* import {
* copy,
* writerFromStreamWriter,
* } from "https://deno.land/std@$STD_VERSION/streams/mod.ts";
* const file = await Deno.open("./deno.land.html", { read: true });
*
* const writableStream = new WritableStream({
* write(chunk): void {
* console.log(chunk);
* },
* });
* const writer = writerFromStreamWriter(writableStream.getWriter());
* await copy(file, writer);
* file.close();
* ```
*/
writerFromStreamWriter,
} from "./writer_from_stream_writer.ts";
/** The queuing strategy to create the `ReadableStream` with. */
strategy?: { highWaterMark?: number | undefined; size?: undefined };
}
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/reader_from_stream_reader.ts` instead.
*
* Create a `Reader` from a `ReadableStreamDefaultReader`.
*
* @example
* ```ts
* import {
* copy,
* readerFromStreamReader,
* } from "https://deno.land/std@$STD_VERSION/streams/mod.ts";
* const res = await fetch("https://deno.land");
* const file = await Deno.open("./deno.land.html", { create: true, write: true });
*
* const reader = readerFromStreamReader(res.body!.getReader());
* await copy(reader, file);
* file.close();
* ```
*/
readerFromStreamReader,
} from "./reader_from_stream_reader.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/readable_stream_from_reader.ts` instead.
*
* Create a `ReadableStream<Uint8Array>` from a `Deno.Reader`.
*
* When the pull algorithm is called on the stream, a chunk from the reader
* will be read. When `null` is returned from the reader, the stream will be
* closed along with the reader (if it is also a `Deno.Closer`).
*
* An example converting a `Deno.FsFile` into a readable stream:
*
* ```ts
* import { readableStreamFromReader } from "https://deno.land/std@$STD_VERSION/streams/mod.ts";
*
* const file = await Deno.open("./file.txt", { read: true });
* const fileStream = readableStreamFromReader(file);
* ```
*/
export const readableStreamFromReader = _readableStreamFromReader;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/writable_stream_from_writer.ts` instead.
* Create a `WritableStream` from a `Writer`.
*/
writableStreamFromWriter,
/** @deprecated (will be removed after 0.171.0) Import from `std/streams/writable_stream_from_writer.ts` instead. */
type WritableStreamFromWriterOptions,
} from "./writable_stream_from_writer.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/read_all.ts` instead.
*
* Read Reader `r` until EOF (`null`) and resolve to the content as
* Uint8Array`.
*
* ```ts
* import { Buffer } from "https://deno.land/std@$STD_VERSION/io/buffer.ts";
* import { readAll } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* // Example from stdin
* const stdinContent = await readAll(Deno.stdin);
*
* // Example from file
* const file = await Deno.open("my_file.txt", {read: true});
* const myFileContent = await readAll(file);
* file.close();
*
* // Example from buffer
* const myData = new Uint8Array(100);
* // ... fill myData array with data
* const reader = new Buffer(myData.buffer);
* const bufferContent = await readAll(reader);
* ```
*/
export const readAll = _readAll;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/readable_stream_from_iterable.ts` instead.
*
* reate a `ReadableStream` from any kind of iterable.
*
* ```ts
* import { readableStreamFromIterable } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const r1 = readableStreamFromIterable(["foo, bar, baz"]);
* const r2 = readableStreamFromIterable(async function* () {
* await new Promise(((r) => setTimeout(r, 1000)));
* yield "foo";
* await new Promise(((r) => setTimeout(r, 1000)));
* yield "bar";
* await new Promise(((r) => setTimeout(r, 1000)));
* yield "baz";
* }());
* ```
*
* If the produced iterator (`iterable[Symbol.asyncIterator]()` or
* `iterable[Symbol.iterator]()`) is a generator, or more specifically is found
* to have a `.throw()` method on it, that will be called upon
* `readableStream.cancel()`. This is the case for the second input type above:
*
* ```ts
* import { readableStreamFromIterable } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const r3 = readableStreamFromIterable(async function* () {
* try {
* yield "foo";
* } catch (error) {
* console.log(error); // Error: Cancelled by consumer.
* }
* }());
* const reader = r3.getReader();
* console.log(await reader.read()); // { value: "foo", done: false }
* await reader.cancel(new Error("Cancelled by consumer."));
* ```
*/
readableStreamFromIterable,
} from "./readable_stream_from_iterable.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/read_all.ts` instead.
*
* Synchronously reads Reader `r` until EOF (`null`) and returns the content
* as `Uint8Array`.
*
* ```ts
* import { Buffer } from "https://deno.land/std@$STD_VERSION/io/buffer.ts";
* import { readAllSync } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* // Example from stdin
* const stdinContent = readAllSync(Deno.stdin);
*
* // Example from file
* const file = Deno.openSync("my_file.txt", {read: true});
* const myFileContent = readAllSync(file);
* file.close();
*
* // Example from buffer
* const myData = new Uint8Array(100);
* // ... fill myData array with data
* const reader = new Buffer(myData.buffer);
* const bufferContent = readAllSync(reader);
* ```
*/
export const readAllSync = _readAllSync;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/to_transform_stream.ts` instead.
*
* Convert the generator function into a TransformStream.
*
* ```ts
* import { readableStreamFromIterable, toTransformStream } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const readable = readableStreamFromIterable([0, 1, 2])
* .pipeThrough(toTransformStream(async function* (src) {
* for await (const chunk of src) {
* yield chunk * 100;
* }
* }));
*
* for await (const chunk of readable) {
* console.log(chunk);
* }
* // output: 0, 100, 200
* ```
*
* @param transformer A function to transform.
* @param writableStrategy An object that optionally defines a queuing strategy for the stream.
* @param readableStrategy An object that optionally defines a queuing strategy for the stream.
*/
toTransformStream,
} from "./to_transform_stream.ts";
/**
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/readable_stream_from_reader.ts` instead.
*
* Create a `ReadableStream<Uint8Array>` from a `Deno.Reader`.
*
* When the pull algorithm is called on the stream, a chunk from the reader
* will be read. When `null` is returned from the reader, the stream will be
* closed along with the reader (if it is also a `Deno.Closer`).
*
* An example converting a `Deno.FsFile` into a readable stream:
*
* ```ts
* import { readableStreamFromReader } from "https://deno.land/std@$STD_VERSION/streams/mod.ts";
*
* const file = await Deno.open("./file.txt", { read: true });
* const fileStream = readableStreamFromReader(file);
* ```
*/
readableStreamFromReader,
/** @deprecated (will be removed after 0.171.0) Import from `std/streams/readable_stream_from_reader.ts` instead. */
type ReadableStreamFromReaderOptions,
} from "./readable_stream_from_reader.ts";
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/read_all.ts` instead.
*
* Read Reader `r` until EOF (`null`) and resolve to the content as
* Uint8Array`.
*
* ```ts
* import { Buffer } from "https://deno.land/std@$STD_VERSION/io/buffer.ts";
* import { readAll } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* // Example from stdin
* const stdinContent = await readAll(Deno.stdin);
*
* // Example from file
* const file = await Deno.open("my_file.txt", {read: true});
* const myFileContent = await readAll(file);
* file.close();
*
* // Example from buffer
* const myData = new Uint8Array(100);
* // ... fill myData array with data
* const reader = new Buffer(myData.buffer);
* const bufferContent = await readAll(reader);
* ```
*/
readAll,
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/read_all.ts` instead.
*
* Synchronously reads Reader `r` until EOF (`null`) and returns the content
* as `Uint8Array`.
*
* ```ts
* import { Buffer } from "https://deno.land/std@$STD_VERSION/io/buffer.ts";
* import { readAllSync } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* // Example from stdin
* const stdinContent = readAllSync(Deno.stdin);
*
* // Example from file
* const file = Deno.openSync("my_file.txt", {read: true});
* const myFileContent = readAllSync(file);
* file.close();
*
* // Example from buffer
* const myData = new Uint8Array(100);
* // ... fill myData array with data
* const reader = new Buffer(myData.buffer);
* const bufferContent = readAllSync(reader);
* ```
*/
readAllSync,
} from "./read_all.ts";
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/write_all.ts` instead.
*
* Write all the content of the array buffer (`arr`) to the writer (`w`).
@ -293,71 +261,71 @@ export const readAllSync = _readAllSync;
* console.log(writer.bytes().length); // 11
* ```
*/
export const writeAll = _writeAll;
writeAll,
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/write_all.ts` instead.
*
* Synchronously write all the content of the array buffer (`arr`) to the
* writer (`w`).
*
* ```ts
* import { Buffer } from "https://deno.land/std@$STD_VERSION/io/buffer.ts";
* import { writeAllSync } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* // Example writing to stdout
* let contentBytes = new TextEncoder().encode("Hello World");
* writeAllSync(Deno.stdout, contentBytes);
*
* // Example writing to file
* contentBytes = new TextEncoder().encode("Hello World");
* const file = Deno.openSync('test.file', {write: true});
* writeAllSync(file, contentBytes);
* file.close();
*
* // Example writing to buffer
* contentBytes = new TextEncoder().encode("Hello World");
* const writer = new Buffer();
* writeAllSync(writer, contentBytes);
* console.log(writer.bytes().length); // 11
* ```
*/
writeAllSync,
} from "./write_all.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/write_all.ts` instead.
*
* Synchronously write all the content of the array buffer (`arr`) to the
* writer (`w`).
*
* ```ts
* import { Buffer } from "https://deno.land/std@$STD_VERSION/io/buffer.ts";
* import { writeAllSync } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* // Example writing to stdout
* let contentBytes = new TextEncoder().encode("Hello World");
* writeAllSync(Deno.stdout, contentBytes);
*
* // Example writing to file
* contentBytes = new TextEncoder().encode("Hello World");
* const file = Deno.openSync('test.file', {write: true});
* writeAllSync(file, contentBytes);
* file.close();
*
* // Example writing to buffer
* contentBytes = new TextEncoder().encode("Hello World");
* const writer = new Buffer();
* writeAllSync(writer, contentBytes);
* console.log(writer.bytes().length); // 11
* ```
*/
export const writeAllSync = _writeAllSync;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/iterate_reader.ts` instead.
*
* Turns a Reader, `r`, into an async iterator.
*
* ```ts
* import { iterateReader } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* let f = await Deno.open("/etc/passwd");
* for await (const chunk of iterateReader(f)) {
* console.log(chunk);
* }
* f.close();
* ```
*
* Second argument can be used to tune size of a buffer.
* Default size of the buffer is 32kB.
*
* ```ts
* import { iterateReader } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* let f = await Deno.open("/etc/passwd");
* const it = iterateReader(f, {
* bufSize: 1024 * 1024
* });
* for await (const chunk of it) {
* console.log(chunk);
* }
* f.close();
* ```
*/
export const iterateReader = _iterateReader;
/**
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/iterate_reader.ts` instead.
*
* Turns a Reader, `r`, into an async iterator.
*
* ```ts
* import { iterateReader } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* let f = await Deno.open("/etc/passwd");
* for await (const chunk of iterateReader(f)) {
* console.log(chunk);
* }
* f.close();
* ```
*
* Second argument can be used to tune size of a buffer.
* Default size of the buffer is 32kB.
*
* ```ts
* import { iterateReader } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* let f = await Deno.open("/etc/passwd");
* const it = iterateReader(f, {
* bufSize: 1024 * 1024
* });
* for await (const chunk of it) {
* console.log(chunk);
* }
* f.close();
* ```
*/
iterateReader,
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/iterate_reader.ts` instead.
*
* Turns a ReaderSync, `r`, into an iterator.
@ -393,26 +361,29 @@ export const iterateReader = _iterateReader;
* responsibility to copy contents of the buffer if needed; otherwise the
* next iteration will overwrite contents of previously returned chunk.
*/
export const iterateReaderSync = _iterateReaderSync;
iterateReaderSync,
} from "./iterate_reader.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/copy.ts` instead.
*
* Copies from `src` to `dst` until either EOF (`null`) is read from `src` or
* an error occurs. It resolves to the number of bytes copied or rejects with
* the first error encountered while copying.
*
* ```ts
* import { copy } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const source = await Deno.open("my_file.txt");
* const bytesCopied1 = await copy(source, Deno.stdout);
* const destination = await Deno.create("my_file_2.txt");
* const bytesCopied2 = await copy(source, destination);
* ```
*
* @param src The source to copy from
* @param dst The destination to copy to
* @param options Can be used to tune size of the buffer. Default size is 32kB
*/
export const copy = _copy;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/copy.ts` instead.
*
* Copies from `src` to `dst` until either EOF (`null`) is read from `src` or
* an error occurs. It resolves to the number of bytes copied or rejects with
* the first error encountered while copying.
*
* ```ts
* import { copy } from "https://deno.land/std@$STD_VERSION/streams/conversion.ts";
*
* const source = await Deno.open("my_file.txt");
* const bytesCopied1 = await copy(source, Deno.stdout);
* const destination = await Deno.create("my_file_2.txt");
* const bytesCopied2 = await copy(source, destination);
* ```
*
* @param src The source to copy from
* @param dst The destination to copy to
* @param options Can be used to tune size of the buffer. Default size is 32kB
*/
copy,
} from "./copy.ts";

View File

@ -1,52 +1,54 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
// This module is browser compatible.
import { TextLineStream as _TextLineStream } from "./text_line_stream.ts";
import { DelimiterStream as _DelimiterStream } from "./delimiter_stream.ts";
import { TextDelimiterStream as _TextDelimiterStream } from "./text_delimiter_stream.ts";
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/text_line_stream.ts` instead.
*
* Transform a stream into a stream where each chunk is divided by a newline,
* be it `\n` or `\r\n`. `\r` can be enabled via the `allowCR` option.
*
* ```ts
* import { TextLineStream } from "https://deno.land/std@$STD_VERSION/streams/delimiter.ts";
* const res = await fetch("https://example.com");
* const lines = res.body!
* .pipeThrough(new TextDecoderStream())
* .pipeThrough(new TextLineStream());
* ```
*/
TextLineStream,
} from "./text_line_stream.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/text_line_stream.ts` instead.
*
* Transform a stream into a stream where each chunk is divided by a newline,
* be it `\n` or `\r\n`. `\r` can be enabled via the `allowCR` option.
*
* ```ts
* import { TextLineStream } from "https://deno.land/std@$STD_VERSION/streams/delimiter.ts";
* const res = await fetch("https://example.com");
* const lines = res.body!
* .pipeThrough(new TextDecoderStream())
* .pipeThrough(new TextLineStream());
* ```
*/
export const TextLineStream = _TextLineStream;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/delimiter_stream.ts` instead.
*
* Transform a stream into a stream where each chunk is divided by a given delimiter.
*
* ```ts
* import { DelimiterStream } from "https://deno.land/std@$STD_VERSION/streams/delimiter.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!
* .pipeThrough(new DelimiterStream(new TextEncoder().encode("foo")))
* .pipeThrough(new TextDecoderStream());
* ```
*/
DelimiterStream,
} from "./delimiter_stream.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/delimiter_stream.ts` instead.
*
* Transform a stream into a stream where each chunk is divided by a given delimiter.
*
* ```ts
* import { DelimiterStream } from "https://deno.land/std@$STD_VERSION/streams/delimiter.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!
* .pipeThrough(new DelimiterStream(new TextEncoder().encode("foo")))
* .pipeThrough(new TextDecoderStream());
* ```
*/
export const DelimiterStream = _DelimiterStream;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/delimiter_stream.ts` instead.
*
* Transform a stream into a stream where each chunk is divided by a given delimiter.
*
* ```ts
* import { TextDelimiterStream } from "https://deno.land/std@$STD_VERSION/streams/delimiter.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!
* .pipeThrough(new TextDecoderStream())
* .pipeThrough(new TextDelimiterStream("foo"));
* ```
*/
export const TextDelimiterStream = _TextDelimiterStream;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/delimiter_stream.ts` instead.
*
* Transform a stream into a stream where each chunk is divided by a given delimiter.
*
* ```ts
* import { TextDelimiterStream } from "https://deno.land/std@$STD_VERSION/streams/delimiter.ts";
* const res = await fetch("https://example.com");
* const parts = res.body!
* .pipeThrough(new TextDecoderStream())
* .pipeThrough(new TextDelimiterStream("foo"));
* ```
*/
TextDelimiterStream,
} from "./text_delimiter_stream.ts";

View File

@ -1,33 +1,35 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
import { mergeReadableStreams as _mergeReadableStreams } from "./merge_readable_streams.ts";
import { zipReadableStreams as _zipReadableStreams } from "./zip_readable_streams.ts";
import { earlyZipReadableStreams as _earlyZipReadableStreams } from "./early_zip_readable_streams.ts";
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/merge_readable_streams.ts` instead.
*
* Merge multiple streams into a single one, not taking order into account.
* If a stream ends before other ones, the other will continue adding data,
* and the finished one will not add any more data.
*/
mergeReadableStreams,
} from "./merge_readable_streams.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/merge_readable_streams.ts` instead.
*
* Merge multiple streams into a single one, not taking order into account.
* If a stream ends before other ones, the other will continue adding data,
* and the finished one will not add any more data.
*/
export const mergeReadableStreams = _mergeReadableStreams;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/zip_readable_streams.ts` instead.
*
* Merge multiple streams into a single one, taking order into account, and each stream
* will wait for a chunk to enqueue before the next stream can append another chunk.
* If a stream ends before other ones, the others will continue adding data in order,
* and the finished one will not add any more data.
*/
zipReadableStreams,
} from "./zip_readable_streams.ts";
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/zip_readable_streams.ts` instead.
*
* Merge multiple streams into a single one, taking order into account, and each stream
* will wait for a chunk to enqueue before the next stream can append another chunk.
* If a stream ends before other ones, the others will continue adding data in order,
* and the finished one will not add any more data.
*/
export const zipReadableStreams = _zipReadableStreams;
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/zip_readable_streams.ts` instead.
*
* Merge multiple streams into a single one, taking order into account, and each stream
* will wait for a chunk to enqueue before the next stream can append another chunk.
* If a stream ends before other ones, the others will be cancelled.
*/
export const earlyZipReadableStreams = _earlyZipReadableStreams;
export {
/**
* @deprecated (will be removed after 0.171.0) Import from `std/streams/zip_readable_streams.ts` instead.
*
* Merge multiple streams into a single one, taking order into account, and each stream
* will wait for a chunk to enqueue before the next stream can append another chunk.
* If a stream ends before other ones, the others will be cancelled.
*/
earlyZipReadableStreams,
} from "./early_zip_readable_streams.ts";