@loaders.gl/csv 4.0.0-alpha.9 → 4.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/dist.min.js +60 -42
  2. package/dist/es5/csv-loader.js +37 -31
  3. package/dist/es5/csv-loader.js.map +1 -1
  4. package/dist/es5/csv-writer.js +7 -8
  5. package/dist/es5/csv-writer.js.map +1 -1
  6. package/dist/es5/lib/encoders/encode-csv.js.map +1 -1
  7. package/dist/esm/csv-loader.js +32 -28
  8. package/dist/esm/csv-loader.js.map +1 -1
  9. package/dist/esm/csv-writer.js +7 -8
  10. package/dist/esm/csv-writer.js.map +1 -1
  11. package/dist/esm/lib/encoders/encode-csv.js.map +1 -1
  12. package/dist/src/bundle.d.ts.map +1 -0
  13. package/dist/{csv-loader.d.ts → src/csv-loader.d.ts} +1 -1
  14. package/dist/src/csv-loader.d.ts.map +1 -0
  15. package/dist/src/csv-writer.d.ts +11 -0
  16. package/dist/src/csv-writer.d.ts.map +1 -0
  17. package/dist/src/index.d.ts.map +1 -0
  18. package/dist/src/lib/encoders/encode-csv.d.ts +7 -0
  19. package/dist/src/lib/encoders/encode-csv.d.ts.map +1 -0
  20. package/dist/src/papaparse/async-iterator-streamer.d.ts.map +1 -0
  21. package/dist/src/papaparse/papaparse.d.ts.map +1 -0
  22. package/dist/tsconfig.tsbuildinfo +1 -0
  23. package/package.json +4 -4
  24. package/src/csv-loader.ts +51 -54
  25. package/src/csv-writer.ts +15 -11
  26. package/src/lib/encoders/encode-csv.ts +1 -8
  27. package/dist/bundle.d.ts.map +0 -1
  28. package/dist/bundle.js +0 -5
  29. package/dist/csv-loader.d.ts.map +0 -1
  30. package/dist/csv-loader.js +0 -268
  31. package/dist/csv-writer.d.ts +0 -6
  32. package/dist/csv-writer.d.ts.map +0 -1
  33. package/dist/csv-writer.js +0 -23
  34. package/dist/index.d.ts.map +0 -1
  35. package/dist/index.js +0 -8
  36. package/dist/lib/encoders/encode-csv.d.ts +0 -13
  37. package/dist/lib/encoders/encode-csv.d.ts.map +0 -1
  38. package/dist/lib/encoders/encode-csv.js +0 -50
  39. package/dist/papaparse/async-iterator-streamer.d.ts.map +0 -1
  40. package/dist/papaparse/async-iterator-streamer.js +0 -63
  41. package/dist/papaparse/papaparse.d.ts.map +0 -1
  42. package/dist/papaparse/papaparse.js +0 -935
  43. /package/dist/{bundle.d.ts → src/bundle.d.ts} +0 -0
  44. /package/dist/{index.d.ts → src/index.d.ts} +0 -0
  45. /package/dist/{papaparse → src/papaparse}/async-iterator-streamer.d.ts +0 -0
  46. /package/dist/{papaparse → src/papaparse}/papaparse.d.ts +0 -0
package/src/csv-writer.ts CHANGED
@@ -1,18 +1,16 @@
1
1
  // loaders.gl, MIT license
2
2
 
3
3
  /* global TextEncoder */
4
- import type {Writer} from '@loaders.gl/loader-utils';
4
+ import type {Writer, WriterOptions} from '@loaders.gl/loader-utils';
5
5
  import type {Table, TableBatch} from '@loaders.gl/schema';
6
- import type {CSVWriterOptions} from './lib/encoders/encode-csv';
7
6
  import {encodeTableAsCSV} from './lib/encoders/encode-csv';
8
7
 
9
- export type {CSVWriterOptions};
10
-
11
- const DEFAULT_WRITER_OPTIONS: Required<CSVWriterOptions> = {
12
- csv: {
13
- useDisplayNames: false
14
- },
15
- useDisplayNames: false
8
+ export type CSVWriterOptions = WriterOptions & {
9
+ csv?: {
10
+ useDisplayNames?: boolean;
11
+ };
12
+ /** @deprecated */
13
+ useDisplayNames?: boolean;
16
14
  };
17
15
 
18
16
  export const CSVWriter: Writer<Table, TableBatch, CSVWriterOptions> = {
@@ -22,9 +20,15 @@ export const CSVWriter: Writer<Table, TableBatch, CSVWriterOptions> = {
22
20
  name: 'CSV',
23
21
  extensions: ['csv'],
24
22
  mimeTypes: ['text/csv'],
25
- options: DEFAULT_WRITER_OPTIONS,
23
+ options: {
24
+ csv: {
25
+ useDisplayNames: false
26
+ },
27
+ /** @deprecated use csv.displayNames */
28
+ useDisplayNames: false
29
+ },
26
30
  text: true,
27
31
  encode: async (table, options) =>
28
32
  new TextEncoder().encode(encodeTableAsCSV(table, options)).buffer,
29
- encodeText: (table, options) => encodeTableAsCSV(table, options)
33
+ encodeTextSync: (table, options) => encodeTableAsCSV(table, options)
30
34
  };
@@ -3,17 +3,10 @@
3
3
 
4
4
  import {Table, makeArrayRowIterator, getTableNumCols} from '@loaders.gl/schema';
5
5
  import {csvFormatRows} from 'd3-dsv';
6
+ import type {CSVWriterOptions} from '../../csv-writer';
6
7
 
7
8
  type EncodableData = string | null;
8
9
 
9
- export type CSVWriterOptions = {
10
- csv?: {
11
- useDisplayNames?: boolean;
12
- };
13
- /** @deprecated */
14
- useDisplayNames?: boolean;
15
- };
16
-
17
10
  /**
18
11
  * Encode a Table object as CSV
19
12
  */
@@ -1 +0,0 @@
1
- {"version":3,"file":"bundle.d.ts","sourceRoot":"","sources":["../src/bundle.ts"],"names":[],"mappings":"AACA,QAAA,MAAM,aAAa,KAAqB,CAAC"}
package/dist/bundle.js DELETED
@@ -1,5 +0,0 @@
1
- "use strict";
2
- // @ts-nocheck
3
- const moduleExports = require('./index');
4
- globalThis.loaders = globalThis.loaders || {};
5
- module.exports = Object.assign(globalThis.loaders, moduleExports);
@@ -1 +0,0 @@
1
- {"version":3,"file":"csv-loader.d.ts","sourceRoot":"","sources":["../src/csv-loader.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAC,gBAAgB,EAAE,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAC9E,OAAO,KAAK,EAAQ,UAAU,EAAC,MAAM,oBAAoB,CAAC;AAE1D,OAAO,EAEL,KAAK,EAIN,MAAM,oBAAoB,CAAC;AAW5B,MAAM,MAAM,gBAAgB,GAAG,aAAa,GAAG;IAC7C,GAAG,CAAC,EAAE;QAEJ,KAAK,CAAC,EAAE,iBAAiB,GAAG,kBAAkB,GAAG,gBAAgB,CAAC;QAClE,yDAAyD;QACzD,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,MAAM,CAAC,EAAE,MAAM,CAAC;QAKhB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,UAAU,CAAC,EAAE,MAAM,CAAC;QAEpB,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,cAAc,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;QAEpC,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;KAE9B,CAAC;CACH,CAAC;AAsBF,eAAO,MAAM,SAAS,EAAE,gBAAgB,CAAC,KAAK,EAAE,UAAU,EAAE,gBAAgB,CAe3E,CAAC"}
@@ -1,268 +0,0 @@
1
- "use strict";
2
- // loaders.gl, MIT license
3
- var __importDefault = (this && this.__importDefault) || function (mod) {
4
- return (mod && mod.__esModule) ? mod : { "default": mod };
5
- };
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- exports.CSVLoader = void 0;
8
- const schema_1 = require("@loaders.gl/schema");
9
- const papaparse_1 = __importDefault(require("./papaparse/papaparse"));
10
- const async_iterator_streamer_1 = __importDefault(require("./papaparse/async-iterator-streamer"));
11
- // __VERSION__ is injected by babel-plugin-version-inline
12
- // @ts-ignore TS2304: Cannot find name '__VERSION__'.
13
- const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
14
- const DEFAULT_CSV_LOADER_OPTIONS = {
15
- csv: {
16
- shape: 'object-row-table',
17
- optimizeMemoryUsage: false,
18
- // CSV options
19
- header: 'auto',
20
- columnPrefix: 'column',
21
- // delimiter: auto
22
- // newline: auto
23
- quoteChar: '"',
24
- escapeChar: '"',
25
- dynamicTyping: true,
26
- comments: false,
27
- skipEmptyLines: true,
28
- // transform: null?
29
- delimitersToGuess: [',', '\t', '|', ';']
30
- // fastMode: auto
31
- }
32
- };
33
- exports.CSVLoader = {
34
- id: 'csv',
35
- module: 'csv',
36
- name: 'CSV',
37
- version: VERSION,
38
- extensions: ['csv', 'tsv', 'dsv'],
39
- mimeTypes: ['text/csv', 'text/tab-separated-values', 'text/dsv'],
40
- category: 'table',
41
- parse: async (arrayBuffer, options) => parseCSV(new TextDecoder().decode(arrayBuffer), options),
42
- parseText: (text, options) => parseCSV(text, options),
43
- parseInBatches: parseCSVInBatches,
44
- // @ts-ignore
45
- // testText: null,
46
- options: DEFAULT_CSV_LOADER_OPTIONS
47
- };
48
- async function parseCSV(csvText, options) {
49
- // Apps can call the parse method directly, we so apply default options here
50
- const csvOptions = { ...DEFAULT_CSV_LOADER_OPTIONS.csv, ...options?.csv };
51
- const firstRow = readFirstRow(csvText);
52
- const header = csvOptions.header === 'auto' ? isHeaderRow(firstRow) : Boolean(csvOptions.header);
53
- const parseWithHeader = header;
54
- const papaparseConfig = {
55
- // dynamicTyping: true,
56
- ...csvOptions,
57
- header: parseWithHeader,
58
- download: false,
59
- transformHeader: parseWithHeader ? duplicateColumnTransformer() : undefined,
60
- error: (e) => {
61
- throw new Error(e);
62
- }
63
- };
64
- const result = papaparse_1.default.parse(csvText, papaparseConfig);
65
- let { data: rows } = result;
66
- const headerRow = result.meta.fields || generateHeader(csvOptions.columnPrefix, firstRow.length);
67
- switch (csvOptions.shape) {
68
- case 'object-row-table':
69
- rows = rows.map((row) => (Array.isArray(row) ? (0, schema_1.convertToObjectRow)(row, headerRow) : row));
70
- break;
71
- case 'array-row-table':
72
- rows = rows.map((row) => (Array.isArray(row) ? row : (0, schema_1.convertToArrayRow)(row, headerRow)));
73
- break;
74
- default:
75
- }
76
- /*
77
- if (!header && shape === 'object-row-table') {
78
- // If the dataset has no header, transform the array result into an object shape with an
79
- // autogenerated header
80
- return result.data.map((row) =>
81
- row.reduce((acc, value, i) => {
82
- acc[headerRow[i]] = value;
83
- return acc;
84
- }, {})
85
- );
86
- }
87
- */
88
- return rows;
89
- }
90
- // TODO - support batch size 0 = no batching/single batch?
91
- function parseCSVInBatches(asyncIterator, options) {
92
- // Papaparse does not support standard batch size handling
93
- // TODO - investigate papaparse chunks mode
94
- options = { ...options };
95
- if (options.batchSize === 'auto') {
96
- options.batchSize = 4000;
97
- }
98
- // Apps can call the parse method directly, we so apply default options here
99
- const csvOptions = { ...DEFAULT_CSV_LOADER_OPTIONS.csv, ...options?.csv };
100
- const asyncQueue = new schema_1.AsyncQueue();
101
- let isFirstRow = true;
102
- let headerRow = null;
103
- let tableBatchBuilder = null;
104
- let schema = null;
105
- const config = {
106
- // dynamicTyping: true, // Convert numbers and boolean values in rows from strings,
107
- ...csvOptions,
108
- header: false,
109
- download: false,
110
- // chunkSize is set to 5MB explicitly (same as Papaparse default) due to a bug where the
111
- // streaming parser gets stuck if skipEmptyLines and a step callback are both supplied.
112
- // See https://github.com/mholt/PapaParse/issues/465
113
- chunkSize: 1024 * 1024 * 5,
114
- // skipEmptyLines is set to a boolean value if supplied. Greedy is set to true
115
- // skipEmptyLines is handled manually given two bugs where the streaming parser gets stuck if
116
- // both of the skipEmptyLines and step callback options are provided:
117
- // - true doesn't work unless chunkSize is set: https://github.com/mholt/PapaParse/issues/465
118
- // - greedy doesn't work: https://github.com/mholt/PapaParse/issues/825
119
- skipEmptyLines: false,
120
- // step is called on every row
121
- // eslint-disable-next-line complexity
122
- step(results) {
123
- let row = results.data;
124
- if (csvOptions.skipEmptyLines) {
125
- // Manually reject lines that are empty
126
- const collapsedRow = row.flat().join('').trim();
127
- if (collapsedRow === '') {
128
- return;
129
- }
130
- }
131
- const bytesUsed = results.meta.cursor;
132
- // Check if we need to save a header row
133
- if (isFirstRow && !headerRow) {
134
- // Auto detects or can be forced with csvOptions.header
135
- const header = csvOptions.header === 'auto' ? isHeaderRow(row) : Boolean(csvOptions.header);
136
- if (header) {
137
- headerRow = row.map(duplicateColumnTransformer());
138
- return;
139
- }
140
- }
141
- // If first data row, we can deduce the schema
142
- if (isFirstRow) {
143
- isFirstRow = false;
144
- if (!headerRow) {
145
- headerRow = generateHeader(csvOptions.columnPrefix, row.length);
146
- }
147
- schema = deduceSchema(row, headerRow);
148
- }
149
- if (csvOptions.optimizeMemoryUsage) {
150
- // A workaround to allocate new strings and don't retain pointers to original strings.
151
- // https://bugs.chromium.org/p/v8/issues/detail?id=2869
152
- row = JSON.parse(JSON.stringify(row));
153
- }
154
- // Add the row
155
- tableBatchBuilder =
156
- tableBatchBuilder ||
157
- new schema_1.TableBatchBuilder(
158
- // @ts-expect-error TODO this is not a proper schema
159
- schema, {
160
- shape: csvOptions.shape || 'array-row-table',
161
- ...options
162
- });
163
- try {
164
- tableBatchBuilder.addRow(row);
165
- // If a batch has been completed, emit it
166
- const batch = tableBatchBuilder && tableBatchBuilder.getFullBatch({ bytesUsed });
167
- if (batch) {
168
- asyncQueue.enqueue(batch);
169
- }
170
- }
171
- catch (error) {
172
- asyncQueue.enqueue(error);
173
- }
174
- },
175
- // complete is called when all rows have been read
176
- complete(results) {
177
- try {
178
- const bytesUsed = results.meta.cursor;
179
- // Ensure any final (partial) batch gets emitted
180
- const batch = tableBatchBuilder && tableBatchBuilder.getFinalBatch({ bytesUsed });
181
- if (batch) {
182
- asyncQueue.enqueue(batch);
183
- }
184
- }
185
- catch (error) {
186
- asyncQueue.enqueue(error);
187
- }
188
- asyncQueue.close();
189
- }
190
- };
191
- papaparse_1.default.parse(asyncIterator, config, async_iterator_streamer_1.default);
192
- // TODO - Does it matter if we return asyncIterable or asyncIterator
193
- // return asyncQueue[Symbol.asyncIterator]();
194
- return asyncQueue;
195
- }
196
- /**
197
- * Checks if a certain row is a header row
198
- * @param row the row to check
199
- * @returns true if the row looks like a header
200
- */
201
- function isHeaderRow(row) {
202
- return row && row.every((value) => typeof value === 'string');
203
- }
204
- /**
205
- * Reads, parses, and returns the first row of a CSV text
206
- * @param csvText the csv text to parse
207
- * @returns the first row
208
- */
209
- function readFirstRow(csvText) {
210
- const result = papaparse_1.default.parse(csvText, {
211
- download: false,
212
- dynamicTyping: true,
213
- preview: 1
214
- });
215
- return result.data[0];
216
- }
217
- /**
218
- * Creates a transformer that renames duplicate columns. This is needed as Papaparse doesn't handle
219
- * duplicate header columns and would use the latest occurrence by default.
220
- * See the header option in https://www.papaparse.com/docs#config
221
- * @returns a transform function that returns sanitized names for duplicate fields
222
- */
223
- function duplicateColumnTransformer() {
224
- const observedColumns = new Set();
225
- return (col) => {
226
- let colName = col;
227
- let counter = 1;
228
- while (observedColumns.has(colName)) {
229
- colName = `${col}.${counter}`;
230
- counter++;
231
- }
232
- observedColumns.add(colName);
233
- return colName;
234
- };
235
- }
236
- /**
237
- * Generates the header of a CSV given a prefix and a column count
238
- * @param columnPrefix the columnPrefix to use
239
- * @param count the count of column names to generate
240
- * @returns an array of column names
241
- */
242
- function generateHeader(columnPrefix, count = 0) {
243
- const headers = [];
244
- for (let i = 0; i < count; i++) {
245
- headers.push(`${columnPrefix}${i + 1}`);
246
- }
247
- return headers;
248
- }
249
- function deduceSchema(row, headerRow) {
250
- const schema = headerRow ? {} : [];
251
- for (let i = 0; i < row.length; i++) {
252
- const columnName = (headerRow && headerRow[i]) || i;
253
- const value = row[i];
254
- switch (typeof value) {
255
- case 'number':
256
- case 'boolean':
257
- // TODO - booleans could be handled differently...
258
- schema[columnName] = { name: String(columnName), index: i, type: Float32Array };
259
- break;
260
- case 'string':
261
- default:
262
- schema[columnName] = { name: String(columnName), index: i, type: Array };
263
- // We currently only handle numeric rows
264
- // TODO we could offer a function to map strings to numbers?
265
- }
266
- }
267
- return schema;
268
- }
@@ -1,6 +0,0 @@
1
- import type { Writer } from '@loaders.gl/loader-utils';
2
- import type { Table, TableBatch } from '@loaders.gl/schema';
3
- import type { CSVWriterOptions } from './lib/encoders/encode-csv';
4
- export type { CSVWriterOptions };
5
- export declare const CSVWriter: Writer<Table, TableBatch, CSVWriterOptions>;
6
- //# sourceMappingURL=csv-writer.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"csv-writer.d.ts","sourceRoot":"","sources":["../src/csv-writer.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,0BAA0B,CAAC;AACrD,OAAO,KAAK,EAAC,KAAK,EAAE,UAAU,EAAC,MAAM,oBAAoB,CAAC;AAC1D,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,2BAA2B,CAAC;AAGhE,YAAY,EAAC,gBAAgB,EAAC,CAAC;AAS/B,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,KAAK,EAAE,UAAU,EAAE,gBAAgB,CAYjE,CAAC"}
@@ -1,23 +0,0 @@
1
- "use strict";
2
- // loaders.gl, MIT license
3
- Object.defineProperty(exports, "__esModule", { value: true });
4
- exports.CSVWriter = void 0;
5
- const encode_csv_1 = require("./lib/encoders/encode-csv");
6
- const DEFAULT_WRITER_OPTIONS = {
7
- csv: {
8
- useDisplayNames: false
9
- },
10
- useDisplayNames: false
11
- };
12
- exports.CSVWriter = {
13
- id: 'csv',
14
- version: 'latest',
15
- module: 'csv',
16
- name: 'CSV',
17
- extensions: ['csv'],
18
- mimeTypes: ['text/csv'],
19
- options: DEFAULT_WRITER_OPTIONS,
20
- text: true,
21
- encode: async (table, options) => new TextEncoder().encode((0, encode_csv_1.encodeTableAsCSV)(table, options)).buffer,
22
- encodeText: (table, options) => (0, encode_csv_1.encodeTableAsCSV)(table, options)
23
- };
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,YAAY,EAAC,gBAAgB,EAAC,MAAM,cAAc,CAAC;AACnD,OAAO,EAAC,SAAS,EAAC,MAAM,cAAc,CAAC;AAEvC,YAAY,EAAC,gBAAgB,EAAC,MAAM,cAAc,CAAC;AACnD,OAAO,EAAC,SAAS,EAAC,MAAM,cAAc,CAAC"}
package/dist/index.js DELETED
@@ -1,8 +0,0 @@
1
- "use strict";
2
- // loaders.gl, MIT license
3
- Object.defineProperty(exports, "__esModule", { value: true });
4
- exports.CSVWriter = exports.CSVLoader = void 0;
5
- var csv_loader_1 = require("./csv-loader");
6
- Object.defineProperty(exports, "CSVLoader", { enumerable: true, get: function () { return csv_loader_1.CSVLoader; } });
7
- var csv_writer_1 = require("./csv-writer");
8
- Object.defineProperty(exports, "CSVWriter", { enumerable: true, get: function () { return csv_writer_1.CSVWriter; } });
@@ -1,13 +0,0 @@
1
- import { Table } from '@loaders.gl/schema';
2
- export type CSVWriterOptions = {
3
- csv?: {
4
- useDisplayNames?: boolean;
5
- };
6
- /** @deprecated */
7
- useDisplayNames?: boolean;
8
- };
9
- /**
10
- * Encode a Table object as CSV
11
- */
12
- export declare function encodeTableAsCSV(table: Table, options?: CSVWriterOptions): string;
13
- //# sourceMappingURL=encode-csv.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"encode-csv.d.ts","sourceRoot":"","sources":["../../../src/lib/encoders/encode-csv.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,KAAK,EAAwC,MAAM,oBAAoB,CAAC;AAKhF,MAAM,MAAM,gBAAgB,GAAG;IAC7B,GAAG,CAAC,EAAE;QACJ,eAAe,CAAC,EAAE,OAAO,CAAC;KAC3B,CAAC;IACF,kBAAkB;IAClB,eAAe,CAAC,EAAE,OAAO,CAAC;CAC3B,CAAC;AAEF;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,KAAK,EAAE,KAAK,EACZ,OAAO,GAAE,gBAAiD,GACzD,MAAM,CAsBR"}
@@ -1,50 +0,0 @@
1
- "use strict";
2
- // loaders.gl, MIT license
3
- // Copyright 2022 Foursquare Labs, Inc.
4
- Object.defineProperty(exports, "__esModule", { value: true });
5
- exports.encodeTableAsCSV = void 0;
6
- const schema_1 = require("@loaders.gl/schema");
7
- const d3_dsv_1 = require("d3-dsv");
8
- /**
9
- * Encode a Table object as CSV
10
- */
11
- function encodeTableAsCSV(table, options = { csv: { useDisplayNames: true } }) {
12
- const useDisplayNames = options.useDisplayNames || options.csv?.useDisplayNames;
13
- const fields = table.schema?.fields || [];
14
- const columnNames = fields.map((f) => {
15
- // This is a leaky abstraction, assuming Kepler metadata
16
- const displayName = f.metadata?.displayName;
17
- return useDisplayNames && typeof displayName === 'string' ? displayName : f.name;
18
- });
19
- const formattedData = [columnNames];
20
- for (const row of (0, schema_1.makeArrayRowIterator)(table)) {
21
- const formattedRow = [];
22
- for (let columnIndex = 0; columnIndex < (0, schema_1.getTableNumCols)(table); ++columnIndex) {
23
- const value = row[columnIndex];
24
- formattedRow[columnIndex] = preformatFieldValue(value);
25
- }
26
- formattedData.push(formattedRow);
27
- }
28
- return (0, d3_dsv_1.csvFormatRows)(formattedData);
29
- }
30
- exports.encodeTableAsCSV = encodeTableAsCSV;
31
- /**
32
- * Stringifies a value
33
- * @todo Why is it called parse?
34
- */
35
- const preformatFieldValue = (value) => {
36
- if (value === null || value === undefined) {
37
- // TODO: It would be nice to distinguish between missing values and the empty string
38
- // https://github.com/d3/d3-dsv/issues/84
39
- return null;
40
- }
41
- if (value instanceof Date) {
42
- // d3-dsv formats dates without timezones if they don't have time info;
43
- // this forces them to always use fully-qualified ISO time strings
44
- return value.toISOString();
45
- }
46
- if (typeof value === 'object') {
47
- return JSON.stringify(value);
48
- }
49
- return String(value);
50
- };
@@ -1 +0,0 @@
1
- {"version":3,"file":"async-iterator-streamer.d.ts","sourceRoot":"","sources":["../../src/papaparse/async-iterator-streamer.ts"],"names":[],"mappings":"AAWA,iBAAwB,qBAAqB,CAAC,MAAM,KAAA,QAwDnD;kBAxDuB,qBAAqB;;;eAArB,qBAAqB"}
@@ -1,63 +0,0 @@
1
- "use strict";
2
- // @ts-nocheck
3
- // A custom papaparse `Streamer` for async iterators
4
- // Ideally this can be contributed back to papaparse
5
- // Or papaparse can expose Streamer API so we can extend without forking.
6
- var __importDefault = (this && this.__importDefault) || function (mod) {
7
- return (mod && mod.__esModule) ? mod : { "default": mod };
8
- };
9
- Object.defineProperty(exports, "__esModule", { value: true });
10
- /* eslint-disable no-invalid-this */
11
- // Note: papaparse is not an ES6 module
12
- const papaparse_1 = __importDefault(require("./papaparse"));
13
- const { ChunkStreamer } = papaparse_1.default;
14
- function AsyncIteratorStreamer(config) {
15
- config = config || {};
16
- ChunkStreamer.call(this, config);
17
- this.textDecoder = new TextDecoder(this._config.encoding);
18
- // Implement ChunkStreamer base class methods
19
- // this.pause = function() {
20
- // ChunkStreamer.prototype.pause.apply(this, arguments);
21
- // };
22
- // this.resume = function() {
23
- // ChunkStreamer.prototype.resume.apply(this, arguments);
24
- // this._input.resume();
25
- // };
26
- this.stream = async function (asyncIterator) {
27
- this._input = asyncIterator;
28
- try {
29
- // ES2018 version
30
- // TODO - check for pause and abort flags?
31
- for await (const chunk of asyncIterator) {
32
- this.parseChunk(this.getStringChunk(chunk));
33
- }
34
- // ES5 VERSION
35
- // while (true) {
36
- // asyncIterator.next().then(function(value) {
37
- // if (value.done) {
38
- // // finalize iterator?
39
- // }
40
- // }
41
- // const = await ;
42
- // if (done) return total;
43
- // total += value.length;
44
- // }
45
- this._finished = true;
46
- this.parseChunk('');
47
- }
48
- catch (error) {
49
- // Inform ChunkStreamer base class of error
50
- this._sendError(error);
51
- }
52
- };
53
- this._nextChunk = function nextChunk() {
54
- // Left empty, as async iterator automatically pulls next chunk
55
- };
56
- // HELPER METHODS
57
- this.getStringChunk = function (chunk) {
58
- return typeof chunk === 'string' ? chunk : this.textDecoder.decode(chunk, { stream: true });
59
- };
60
- }
61
- exports.default = AsyncIteratorStreamer;
62
- AsyncIteratorStreamer.prototype = Object.create(ChunkStreamer.prototype);
63
- AsyncIteratorStreamer.prototype.constructor = AsyncIteratorStreamer;
@@ -1 +0,0 @@
1
- {"version":3,"file":"papaparse.d.ts","sourceRoot":"","sources":["../../src/papaparse/papaparse.ts"],"names":[],"mappings":"AAmBA,QAAA,MAAM,IAAI;;;;;;;;;;;;;;;;CAuBT,CAAC;AACF,eAAe,IAAI,CAAC;AAepB,iBAAS,SAAS,CAChB,MAAM,KAAA,EACN,OAAO,KAAA,EACP,mBAAmB,CAAC,KAAA,OAiErB;AAED,iBAAS,SAAS,CAAC,MAAM,KAAA,EAAE,OAAO,KAAA,UA+KjC;AAED,gFAAgF;AAChF,iBAAS,aAAa,CAAC,MAAM,KAAA,QAuF5B;AACD,iBAAS,cAAc,CAAC,MAAM,KAAA,QAiB7B;kBAjBQ,cAAc;;;AAsBvB,iBAAS,YAAY,CAAC,OAAO,KAAA,QA8T5B;AAOD,gEAAgE;AAChE,iBAAS,MAAM,CAAC,MAAM,KAAA,QA8TrB"}