@prairielearn/csv 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
package/README.md ADDED
@@ -0,0 +1,50 @@
1
+ # `@prairielearn/csv`
2
+
3
+ A few helpful wrappers on top of the functionality from [`csv-stringify`](https://www.npmjs.com/package/csv-stringify).
4
+
5
+ ## Usage
6
+
7
+ Here's an example taking data from `@prairielearn/postgres#queryCursor()` and writing it to a file, though this should be applicable to any source and destination streams:
8
+
9
+ ```ts
10
+ import { stringifyStream } from '@prairielearn/csv';
11
+ import { queryCursor } from '@prairielearn/postgres';
12
+ import { pipeline } from 'node:stream/promises';
13
+ import { createWriteStream } from 'node:fs';
14
+
15
+ const cursor = await queryCursor('SELECT id FROM workspaces;', {});
16
+ const output = createWriteStream('workspaces.csv');
17
+
18
+ const stringifier = stringifyStream({
19
+ header: true,
20
+ columns: [{ key: 'id', header: 'ID' }],
21
+ // Optionally provide a function to transform each item in the stream.
22
+ transform(record) {
23
+ return {
24
+ id: `workspace-${id}`,
25
+ };
26
+ },
27
+ });
28
+
29
+ await pipeline(cursor.stream(100), stringifier, output);
30
+ ```
31
+
32
+ Note that this works best when the source stream is producing data asynchronously, such as though an async iterator. If you use a synchronous data source like `Readable.from([...])`, the conversion will still occur synchronously. If you have a large array of data in memory and want to convert it to a CSV, you can use `stringifyNonblocking`:
33
+
34
+ ```ts
35
+ import { stringifyNonblocking } from '@prairielearn/csv';
36
+ import { createWriteStream } from 'node:fs';
37
+
38
+ const data = Array.from(new Array(100_000), (_, i) => ({ id: i }));
39
+ const output = createWriteStream('numbers.csv');
40
+ stringifyNonblocking(data, {
41
+ header: true,
42
+ columns: [{ key: 'id', header: 'ID' }],
43
+ }).pipe(output);
44
+ ```
45
+
46
+ For lower-level usage, `stringify` and `Stringifier` are also re-exported from `csv-stringify`:
47
+
48
+ ```ts
49
+ import { stringify, Stringifier } from '@prairielearn/csv';
50
+ ```
@@ -0,0 +1,25 @@
1
+ /// <reference types="node" />
2
+ import { stringify, Stringifier, Options as StringifierOptions } from 'csv-stringify';
3
+ import { Handler as TransformHandler } from 'stream-transform';
4
+ export { stringify, Stringifier };
5
+ export interface StringifyNonblockingOptions extends StringifierOptions {
6
+ batchSize?: number;
7
+ }
8
+ /**
9
+ * Streaming transform from an array of objects to a CSV that doesn't
10
+ * block the event loop.
11
+ */
12
+ export declare function stringifyNonblocking(data: any[], options?: StringifyNonblockingOptions): Stringifier;
13
+ interface StringifyOptions<T = any, U = any> extends Pick<StringifierOptions, 'columns' | 'header'> {
14
+ transform?: TransformHandler<T, U>;
15
+ }
16
+ /**
17
+ * Transforms an object stream into a CSV stream.
18
+ *
19
+ * This is a thin wrapper around `stringify` from the `csv-stringify` package
20
+ * with added support for transforming the input stream.
21
+ *
22
+ * Works best when combined with the `pipeline` function from
23
+ * `node:stream/promises`, which will help ensure that errors are handled properly.
24
+ */
25
+ export declare function stringifyStream<T = any, U = any>(options?: StringifyOptions<T, U>): NodeJS.ReadWriteStream;
package/dist/index.js ADDED
@@ -0,0 +1,57 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.stringifyStream = exports.stringifyNonblocking = exports.Stringifier = exports.stringify = void 0;
7
+ const csv_stringify_1 = require("csv-stringify");
8
+ Object.defineProperty(exports, "stringify", { enumerable: true, get: function () { return csv_stringify_1.stringify; } });
9
+ Object.defineProperty(exports, "Stringifier", { enumerable: true, get: function () { return csv_stringify_1.Stringifier; } });
10
+ const stream_transform_1 = require("stream-transform");
11
+ const multipipe_1 = __importDefault(require("multipipe"));
12
+ /**
13
+ * Streaming transform from an array of objects to a CSV that doesn't
14
+ * block the event loop.
15
+ */
16
+ function stringifyNonblocking(data, options = {}) {
17
+ const { batchSize = 100, ...stringifierOptions } = options;
18
+ const stringifier = new csv_stringify_1.Stringifier(stringifierOptions);
19
+ process.nextTick(function () {
20
+ let j = 0;
21
+ function loop() {
22
+ for (let i = 0; i < batchSize; i++) {
23
+ if (j < data.length) {
24
+ stringifier.write(data[j]);
25
+ j += 1;
26
+ }
27
+ else {
28
+ stringifier.end();
29
+ return;
30
+ }
31
+ }
32
+ setImmediate(loop);
33
+ }
34
+ loop();
35
+ });
36
+ return stringifier;
37
+ }
38
+ exports.stringifyNonblocking = stringifyNonblocking;
39
+ /**
40
+ * Transforms an object stream into a CSV stream.
41
+ *
42
+ * This is a thin wrapper around `stringify` from the `csv-stringify` package
43
+ * with added support for transforming the input stream.
44
+ *
45
+ * Works best when combined with the `pipeline` function from
46
+ * `node:stream/promises`, which will help ensure that errors are handled properly.
47
+ */
48
+ function stringifyStream(options = {}) {
49
+ const { transform: _transform, ...stringifierOptions } = options;
50
+ const stringifier = new csv_stringify_1.Stringifier(stringifierOptions);
51
+ if (!_transform)
52
+ return stringifier;
53
+ // TODO: use native `node:stream#compose` once it's stable.
54
+ return (0, multipipe_1.default)((0, stream_transform_1.transform)(_transform), stringifier);
55
+ }
56
+ exports.stringifyStream = stringifyStream;
57
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,iDAAsF;AAI7E,0FAJA,yBAAS,OAIA;AAAE,4FAJA,2BAAW,OAIA;AAH/B,uDAA0E;AAC1E,0DAAkC;AAQlC;;;GAGG;AACH,SAAgB,oBAAoB,CAClC,IAAW,EACX,UAAuC,EAAE;IAEzC,MAAM,EAAE,SAAS,GAAG,GAAG,EAAE,GAAG,kBAAkB,EAAE,GAAG,OAAO,CAAC;IAC3D,MAAM,WAAW,GAAG,IAAI,2BAAW,CAAC,kBAAkB,CAAC,CAAC;IAExD,OAAO,CAAC,QAAQ,CAAC;QACf,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,SAAS,IAAI;YACX,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;gBAClC,IAAI,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE;oBACnB,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC3B,CAAC,IAAI,CAAC,CAAC;iBACR;qBAAM;oBACL,WAAW,CAAC,GAAG,EAAE,CAAC;oBAClB,OAAO;iBACR;aACF;YACD,YAAY,CAAC,IAAI,CAAC,CAAC;QACrB,CAAC;QACD,IAAI,EAAE,CAAC;IACT,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;AAzBD,oDAyBC;AAOD;;;;;;;;GAQG;AACH,SAAgB,eAAe,CAC7B,UAAkC,EAAE;IAEpC,MAAM,EAAE,SAAS,EAAE,UAAU,EAAE,GAAG,kBAAkB,EAAE,GAAG,OAAO,CAAC;IACjE,MAAM,WAAW,GAAG,IAAI,2BAAW,CAAC,kBAAkB,CAAC,CAAC;IACxD,IAAI,CAAC,UAAU;QAAE,OAAO,WAAW,CAAC;IACpC,2DAA2D;IAC3D,OAAO,IAAA,mBAAS,EAAC,IAAA,4BAAS,EAAC,UAAU,CAAC,EAAE,WAAW,CAAC,CAAC;AACvD,CAAC;AARD,0CAQC"}
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,77 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const node_stream_1 = require("node:stream");
4
+ const chai_1 = require("chai");
5
+ const index_1 = require("./index");
6
+ function streamToString(stream) {
7
+ const chunks = [];
8
+ return new Promise((resolve, reject) => {
9
+ stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
10
+ stream.on('error', reject);
11
+ stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
12
+ });
13
+ }
14
+ describe('stringifyStream', () => {
15
+ it('stringifies a stream of objects', async () => {
16
+ const stream = node_stream_1.Readable.from([
17
+ { a: 1, b: 1 },
18
+ { a: 2, b: 2 },
19
+ { a: 3, b: 3 },
20
+ ]);
21
+ const csvStream = stream.pipe((0, index_1.stringifyStream)());
22
+ const csv = await streamToString(csvStream);
23
+ chai_1.assert.equal(csv, '1,1\n2,2\n3,3\n');
24
+ });
25
+ it('stringifies a stream of arrays', async () => {
26
+ const stream = node_stream_1.Readable.from([
27
+ ['1', '1'],
28
+ ['2', '2'],
29
+ ['3', '3'],
30
+ ]);
31
+ const csvStream = stream.pipe((0, index_1.stringifyStream)());
32
+ const csv = await streamToString(csvStream);
33
+ chai_1.assert.equal(csv, '1,1\n2,2\n3,3\n');
34
+ });
35
+ it('stringifies a stream with a transform', async () => {
36
+ const stream = node_stream_1.Readable.from([
37
+ { a: 1, b: 1 },
38
+ { a: 2, b: 2 },
39
+ { a: 3, b: 3 },
40
+ ]);
41
+ const csvStream = stream.pipe((0, index_1.stringifyStream)({ transform: (row) => [row.a + 1, row.b + 2] }));
42
+ const csv = await streamToString(csvStream);
43
+ chai_1.assert.equal(csv, '2,3\n3,4\n4,5\n');
44
+ });
45
+ it('stringifies a stream with keyed columns and a transform', async () => {
46
+ const stream = node_stream_1.Readable.from([
47
+ { a: 1, b: 1 },
48
+ { a: 2, b: 2 },
49
+ { a: 3, b: 3 },
50
+ ]);
51
+ const stringifier = (0, index_1.stringifyStream)({
52
+ header: true,
53
+ columns: [
54
+ { key: 'a', header: 'first' },
55
+ { key: 'b', header: 'second' },
56
+ ],
57
+ transform: (row) => [row.a + 1, row.b + 2],
58
+ });
59
+ const csv = await streamToString(stream.pipe(stringifier));
60
+ chai_1.assert.equal(csv, 'first,second\n2,3\n3,4\n4,5\n');
61
+ });
62
+ it('stringifies a stream with named columns and a transform', async () => {
63
+ const stream = node_stream_1.Readable.from([
64
+ { a: 1, b: 1 },
65
+ { a: 2, b: 2 },
66
+ { a: 3, b: 3 },
67
+ ]);
68
+ const stringifier = (0, index_1.stringifyStream)({
69
+ header: true,
70
+ columns: ['first', 'second'],
71
+ transform: (row) => [row.a + 1, row.b + 2],
72
+ });
73
+ const csv = await streamToString(stream.pipe(stringifier));
74
+ chai_1.assert.equal(csv, 'first,second\n2,3\n3,4\n4,5\n');
75
+ });
76
+ });
77
+ //# sourceMappingURL=index.test.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.test.js","sourceRoot":"","sources":["../src/index.test.ts"],"names":[],"mappings":";;AAAA,6CAAuC;AACvC,+BAA8B;AAE9B,mCAA0C;AAE1C,SAAS,cAAc,CAAC,MAA6B;IACnD,MAAM,MAAM,GAAa,EAAE,CAAC;IAC5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC9D,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC3B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC1E,CAAC,CAAC,CAAC;AACL,CAAC;AAED,QAAQ,CAAC,iBAAiB,EAAE,GAAG,EAAE;IAC/B,EAAE,CAAC,iCAAiC,EAAE,KAAK,IAAI,EAAE;QAC/C,MAAM,MAAM,GAAG,sBAAQ,CAAC,IAAI,CAAC;YAC3B,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;SACf,CAAC,CAAC;QACH,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,IAAA,uBAAe,GAAE,CAAC,CAAC;QACjD,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,SAAS,CAAC,CAAC;QAC5C,aAAM,CAAC,KAAK,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,gCAAgC,EAAE,KAAK,IAAI,EAAE;QAC9C,MAAM,MAAM,GAAG,sBAAQ,CAAC,IAAI,CAAC;YAC3B,CAAC,GAAG,EAAE,GAAG,CAAC;YACV,CAAC,GAAG,EAAE,GAAG,CAAC;YACV,CAAC,GAAG,EAAE,GAAG,CAAC;SACX,CAAC,CAAC;QACH,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,IAAA,uBAAe,GAAE,CAAC,CAAC;QACjD,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,SAAS,CAAC,CAAC;QAC5C,aAAM,CAAC,KAAK,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,uCAAuC,EAAE,KAAK,IAAI,EAAE;QACrD,MAAM,MAAM,GAAG,sBAAQ,CAAC,IAAI,CAAC;YAC3B,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;SACf,CAAC,CAAC;QACH,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,IAAA,uBAAe,EAAC,EAAE,SAAS,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC/F,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,SAAS,CAAC,CAAC;QAC5C,aAAM,CAAC,KAAK,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,yDAAyD,EAAE,KAAK,IAAI,EAAE;QACvE,MAAM,MAAM,GAAG,sBAAQ,CAAC,IAAI,CAAC;YAC3B,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;SACf,CAAC,CAAC;QACH,MAAM,WAAW,GAAG,IAAA,uBAAe,EAAC;YAClC,MAAM,EAAE,IAAI;YACZ,OAAO,EAAE;gBACP,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE;gBAC7B,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE;aAC/B;YACD,SAAS,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;SAC3C,CAAC,CAAC;QACH,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;QAC3D,aAAM,CAAC,KAAK,CAAC,GAAG,EAAE,+BAA+B,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,yDAAyD,EAAE,KAAK,IAAI,EAAE;QACvE,MAAM,MAAM,GAAG,sBAAQ,CAAC,IAAI,CAAC;YAC3B,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;YACd,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;SACf,CAAC,CAAC;QACH,MAAM,WAAW,GAAG,IAAA,uBAAe,EAAC;YAClC,MAAM,EAAE,IAAI;YACZ,OAAO,EAAE,CAAC,OAAO,EAAE,QAAQ,CAAC;YAC5B,SAAS,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;SAC3C,CAAC,CAAC;QACH,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;QAC3D,aAAM,CAAC,KAAK,CAAC,GAAG,EAAE,+BAA+B,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
package/package.json ADDED
@@ -0,0 +1,24 @@
1
+ {
2
+ "name": "@prairielearn/csv",
3
+ "version": "1.0.0",
4
+ "main": "./dist/index.js",
5
+ "scripts": {
6
+ "build": "tsc",
7
+ "dev": "tsc --watch --preserveWatchOutput",
8
+ "test": "mocha --no-config --require ts-node/register src/*.test.ts"
9
+ },
10
+ "devDependencies": {
11
+ "@prairielearn/tsconfig": "*",
12
+ "@types/mocha": "^10.0.1",
13
+ "@types/multipipe": "^3.0.1",
14
+ "@types/node": "^18.14.2",
15
+ "mocha": "^10.2.0",
16
+ "ts-node": "^10.9.1",
17
+ "typescript": "^4.9.4"
18
+ },
19
+ "dependencies": {
20
+ "csv-stringify": "^6.3.0",
21
+ "multipipe": "^4.0.0",
22
+ "stream-transform": "^3.2.2"
23
+ }
24
+ }
@@ -0,0 +1,81 @@
1
+ import { Readable } from 'node:stream';
2
+ import { assert } from 'chai';
3
+
4
+ import { stringifyStream } from './index';
5
+
6
+ function streamToString(stream: NodeJS.ReadableStream): Promise<string> {
7
+ const chunks: Buffer[] = [];
8
+ return new Promise((resolve, reject) => {
9
+ stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
10
+ stream.on('error', reject);
11
+ stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
12
+ });
13
+ }
14
+
15
+ describe('stringifyStream', () => {
16
+ it('stringifies a stream of objects', async () => {
17
+ const stream = Readable.from([
18
+ { a: 1, b: 1 },
19
+ { a: 2, b: 2 },
20
+ { a: 3, b: 3 },
21
+ ]);
22
+ const csvStream = stream.pipe(stringifyStream());
23
+ const csv = await streamToString(csvStream);
24
+ assert.equal(csv, '1,1\n2,2\n3,3\n');
25
+ });
26
+
27
+ it('stringifies a stream of arrays', async () => {
28
+ const stream = Readable.from([
29
+ ['1', '1'],
30
+ ['2', '2'],
31
+ ['3', '3'],
32
+ ]);
33
+ const csvStream = stream.pipe(stringifyStream());
34
+ const csv = await streamToString(csvStream);
35
+ assert.equal(csv, '1,1\n2,2\n3,3\n');
36
+ });
37
+
38
+ it('stringifies a stream with a transform', async () => {
39
+ const stream = Readable.from([
40
+ { a: 1, b: 1 },
41
+ { a: 2, b: 2 },
42
+ { a: 3, b: 3 },
43
+ ]);
44
+ const csvStream = stream.pipe(stringifyStream({ transform: (row) => [row.a + 1, row.b + 2] }));
45
+ const csv = await streamToString(csvStream);
46
+ assert.equal(csv, '2,3\n3,4\n4,5\n');
47
+ });
48
+
49
+ it('stringifies a stream with keyed columns and a transform', async () => {
50
+ const stream = Readable.from([
51
+ { a: 1, b: 1 },
52
+ { a: 2, b: 2 },
53
+ { a: 3, b: 3 },
54
+ ]);
55
+ const stringifier = stringifyStream({
56
+ header: true,
57
+ columns: [
58
+ { key: 'a', header: 'first' },
59
+ { key: 'b', header: 'second' },
60
+ ],
61
+ transform: (row) => [row.a + 1, row.b + 2],
62
+ });
63
+ const csv = await streamToString(stream.pipe(stringifier));
64
+ assert.equal(csv, 'first,second\n2,3\n3,4\n4,5\n');
65
+ });
66
+
67
+ it('stringifies a stream with named columns and a transform', async () => {
68
+ const stream = Readable.from([
69
+ { a: 1, b: 1 },
70
+ { a: 2, b: 2 },
71
+ { a: 3, b: 3 },
72
+ ]);
73
+ const stringifier = stringifyStream({
74
+ header: true,
75
+ columns: ['first', 'second'],
76
+ transform: (row) => [row.a + 1, row.b + 2],
77
+ });
78
+ const csv = await streamToString(stream.pipe(stringifier));
79
+ assert.equal(csv, 'first,second\n2,3\n3,4\n4,5\n');
80
+ });
81
+ });
package/src/index.ts ADDED
@@ -0,0 +1,64 @@
1
+ import { stringify, Stringifier, Options as StringifierOptions } from 'csv-stringify';
2
+ import { transform, Handler as TransformHandler } from 'stream-transform';
3
+ import multipipe from 'multipipe';
4
+
5
+ export { stringify, Stringifier };
6
+
7
+ export interface StringifyNonblockingOptions extends StringifierOptions {
8
+ batchSize?: number;
9
+ }
10
+
11
+ /**
12
+ * Streaming transform from an array of objects to a CSV that doesn't
13
+ * block the event loop.
14
+ */
15
+ export function stringifyNonblocking(
16
+ data: any[],
17
+ options: StringifyNonblockingOptions = {}
18
+ ): Stringifier {
19
+ const { batchSize = 100, ...stringifierOptions } = options;
20
+ const stringifier = new Stringifier(stringifierOptions);
21
+
22
+ process.nextTick(function () {
23
+ let j = 0;
24
+ function loop() {
25
+ for (let i = 0; i < batchSize; i++) {
26
+ if (j < data.length) {
27
+ stringifier.write(data[j]);
28
+ j += 1;
29
+ } else {
30
+ stringifier.end();
31
+ return;
32
+ }
33
+ }
34
+ setImmediate(loop);
35
+ }
36
+ loop();
37
+ });
38
+
39
+ return stringifier;
40
+ }
41
+
42
+ interface StringifyOptions<T = any, U = any>
43
+ extends Pick<StringifierOptions, 'columns' | 'header'> {
44
+ transform?: TransformHandler<T, U>;
45
+ }
46
+
47
+ /**
48
+ * Transforms an object stream into a CSV stream.
49
+ *
50
+ * This is a thin wrapper around `stringify` from the `csv-stringify` package
51
+ * with added support for transforming the input stream.
52
+ *
53
+ * Works best when combined with the `pipeline` function from
54
+ * `node:stream/promises`, which will help ensure that errors are handled properly.
55
+ */
56
+ export function stringifyStream<T = any, U = any>(
57
+ options: StringifyOptions<T, U> = {}
58
+ ): NodeJS.ReadWriteStream {
59
+ const { transform: _transform, ...stringifierOptions } = options;
60
+ const stringifier = new Stringifier(stringifierOptions);
61
+ if (!_transform) return stringifier;
62
+ // TODO: use native `node:stream#compose` once it's stable.
63
+ return multipipe(transform(_transform), stringifier);
64
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,8 @@
1
+ {
2
+ "extends": "@prairielearn/tsconfig",
3
+ "compilerOptions": {
4
+ "outDir": "./dist",
5
+ "rootDir": "./src",
6
+ "types": ["mocha", "node"],
7
+ }
8
+ }