gitsheets 0.22.4 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +21 -0
- package/bin/gitsheets +5 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +256 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/errors.d.ts +72 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +74 -0
- package/dist/errors.js.map +1 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +12 -0
- package/dist/index.js.map +1 -0
- package/dist/patch.d.ts +2 -0
- package/dist/patch.d.ts.map +1 -0
- package/dist/patch.js +39 -0
- package/dist/patch.js.map +1 -0
- package/dist/path-template/index.d.ts +42 -0
- package/dist/path-template/index.d.ts.map +1 -0
- package/dist/path-template/index.js +288 -0
- package/dist/path-template/index.js.map +1 -0
- package/dist/push-daemon.d.ts +53 -0
- package/dist/push-daemon.d.ts.map +1 -0
- package/dist/push-daemon.js +148 -0
- package/dist/push-daemon.js.map +1 -0
- package/dist/repository.d.ts +67 -0
- package/dist/repository.d.ts.map +1 -0
- package/dist/repository.js +322 -0
- package/dist/repository.js.map +1 -0
- package/dist/sheet.d.ts +107 -0
- package/dist/sheet.d.ts.map +1 -0
- package/dist/sheet.js +605 -0
- package/dist/sheet.js.map +1 -0
- package/dist/store.d.ts +41 -0
- package/dist/store.d.ts.map +1 -0
- package/dist/store.js +49 -0
- package/dist/store.js.map +1 -0
- package/dist/toml.d.ts +11 -0
- package/dist/toml.d.ts.map +1 -0
- package/dist/toml.js +28 -0
- package/dist/toml.js.map +1 -0
- package/dist/transaction.d.ts +96 -0
- package/dist/transaction.d.ts.map +1 -0
- package/dist/transaction.js +227 -0
- package/dist/transaction.js.map +1 -0
- package/dist/validation.d.ts +37 -0
- package/dist/validation.d.ts.map +1 -0
- package/dist/validation.js +105 -0
- package/dist/validation.js.map +1 -0
- package/package.json +41 -35
- package/bin/cli.js +0 -61
- package/commands/edit.js +0 -90
- package/commands/normalize.js +0 -81
- package/commands/query.js +0 -206
- package/commands/read.js +0 -64
- package/commands/singer-target.js +0 -214
- package/commands/upsert.js +0 -260
- package/lib/GitSheets.js +0 -464
- package/lib/Repository.js +0 -88
- package/lib/Sheet.js +0 -625
- package/lib/errors.js +0 -21
- package/lib/hologit.js +0 -1
- package/lib/logger.js +0 -18
- package/lib/path/BaseComponent.js +0 -24
- package/lib/path/ExpressionComponent.js +0 -26
- package/lib/path/FieldComponent.js +0 -13
- package/lib/path/LiteralComponent.js +0 -12
- package/lib/path/Query.js +0 -18
- package/lib/path/Template.js +0 -214
- package/server.js +0 -120
|
@@ -1,214 +0,0 @@
|
|
|
1
|
-
exports.command = 'singer-target [jsonl-file]';
|
|
2
|
-
exports.desc = 'Load one or more streams from a Singer tap';
|
|
3
|
-
exports.builder = {
|
|
4
|
-
'jsonl-file': {
|
|
5
|
-
type: 'string',
|
|
6
|
-
description: 'Read from a jsonl file instead of STDIN',
|
|
7
|
-
},
|
|
8
|
-
working: {
|
|
9
|
-
type: 'boolean',
|
|
10
|
-
default: true,
|
|
11
|
-
defaultDescription: 'true if ref empty',
|
|
12
|
-
},
|
|
13
|
-
ref: {
|
|
14
|
-
type: 'string',
|
|
15
|
-
description: 'Git ref to use as input instead of working tree',
|
|
16
|
-
defaultDescription: '--commit-to',
|
|
17
|
-
},
|
|
18
|
-
'commit-to': {
|
|
19
|
-
type: 'string',
|
|
20
|
-
description: 'Git ref to commit containing gitsheets to update',
|
|
21
|
-
},
|
|
22
|
-
'source-label': {
|
|
23
|
-
type: 'string',
|
|
24
|
-
description: 'A label describing the source for the data to tag the commit with',
|
|
25
|
-
default: 'singer-target',
|
|
26
|
-
},
|
|
27
|
-
root: {
|
|
28
|
-
type: 'string',
|
|
29
|
-
describe: 'Root path to .gitsheets in repository (defaults to GITSHEETS_ROOT or /)',
|
|
30
|
-
default: process.env.GITSHEETS_ROOT || '/',
|
|
31
|
-
defaultDescription: 'GITSHEETS_ROOT || "/"',
|
|
32
|
-
},
|
|
33
|
-
prefix: {
|
|
34
|
-
type: 'string',
|
|
35
|
-
describe: 'Path to prefix after root to all sheet paths (defaults to GITSHEETS_PREFIX or none)',
|
|
36
|
-
default: process.env.GITSHEETS_PREFIX,
|
|
37
|
-
defaultDescription: 'GITSHEETS_PREFIX',
|
|
38
|
-
},
|
|
39
|
-
'delete-missing': {
|
|
40
|
-
type: 'boolean',
|
|
41
|
-
describe: 'Delete all existing records in the sheet that are not present in the new set',
|
|
42
|
-
default: false,
|
|
43
|
-
},
|
|
44
|
-
};
|
|
45
|
-
|
|
46
|
-
exports.handler = async function singerTarget({
|
|
47
|
-
jsonlFile,
|
|
48
|
-
working,
|
|
49
|
-
ref,
|
|
50
|
-
commitTo,
|
|
51
|
-
sourceLabel,
|
|
52
|
-
root,
|
|
53
|
-
prefix,
|
|
54
|
-
deleteMissing,
|
|
55
|
-
}) {
|
|
56
|
-
const logger = require('../lib/logger.js');
|
|
57
|
-
const Repository = require('../lib/Repository.js');
|
|
58
|
-
const { TreeObject } = require('../lib/hologit');
|
|
59
|
-
const path = require('path');
|
|
60
|
-
|
|
61
|
-
const EMPTY_TREE_HASH = TreeObject.getEmptyTreeHash()
|
|
62
|
-
|
|
63
|
-
// apply dynamic defaults
|
|
64
|
-
if (commitTo && !ref) {
|
|
65
|
-
ref = commitTo
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
if (ref) {
|
|
69
|
-
working = false;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
if (commitTo && !commitTo.startsWith('refs/heads/')) {
|
|
73
|
-
commitTo = `refs/heads/${commitTo}`;
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
// get repo interface
|
|
78
|
-
let repo = await Repository.getFromEnvironment({ working: working, ref: ref });
|
|
79
|
-
let git = await repo.getGit()
|
|
80
|
-
|
|
81
|
-
let parentCommitHash = await repo.resolveRef();
|
|
82
|
-
if (!parentCommitHash) {
|
|
83
|
-
// initialize ref or hard crash
|
|
84
|
-
if (commitTo) {
|
|
85
|
-
parentCommitHash = await git.commitTree(EMPTY_TREE_HASH, {
|
|
86
|
-
m: `↥ initialize gitsheets workspace ${commitTo}`,
|
|
87
|
-
});
|
|
88
|
-
repo = await Repository.getFromEnvironment({ ref: parentCommitHash });
|
|
89
|
-
git = await repo.getGit()
|
|
90
|
-
} else {
|
|
91
|
-
throw new Error(`input --ref ${ref} could not be resolved, configure --commit-to to initialize automatically`);
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
// open all sheets
|
|
97
|
-
const sheets = await repo.openSheets({ root, dataTree: prefix });
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
// upsert record(s) into sheets
|
|
101
|
-
const clearedSheets = new Set();
|
|
102
|
-
const writtenStreams = new Set();
|
|
103
|
-
for await (const { type, stream, ...message} of readMessages({ jsonlFile })) {
|
|
104
|
-
console.log(`${type}\t${stream}`, message);
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
// ignore unhandled message types for now
|
|
108
|
-
if (type == 'STATE' || type == 'ACTIVATE_VERSION') {
|
|
109
|
-
console.warn(`ignoring ${type} message`);
|
|
110
|
-
continue;
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
// get sheet
|
|
115
|
-
const sheet = sheets[stream];
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
// create schema if needed
|
|
119
|
-
if (!sheet) {
|
|
120
|
-
if (type == 'SCHEMA') {
|
|
121
|
-
sheets[stream] = await repo.openSheet(stream, {
|
|
122
|
-
root,
|
|
123
|
-
dataTree: prefix,
|
|
124
|
-
config: {
|
|
125
|
-
root: stream,
|
|
126
|
-
path: message.key_properties.map(p => '${{ '+p+' }}').join('/'),
|
|
127
|
-
fields: message.schema && message.schema.properties || null,
|
|
128
|
-
},
|
|
129
|
-
});
|
|
130
|
-
await sheets[stream].writeConfig();
|
|
131
|
-
continue;
|
|
132
|
-
} else {
|
|
133
|
-
throw new Error(`no sheet defined for stream ${stream} and first message was not schema`);
|
|
134
|
-
}
|
|
135
|
-
} else if (type == 'SCHEMA') {
|
|
136
|
-
console.warn('ignoring SCHEMA for already-defined sheet');
|
|
137
|
-
continue;
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
// handle record message
|
|
142
|
-
if (type == 'RECORD') {
|
|
143
|
-
if (deleteMissing && !clearedSheets.has(sheet)) {
|
|
144
|
-
console.log(`clearing sheet ${sheet.name}`);
|
|
145
|
-
await sheet.clear();
|
|
146
|
-
clearedSheets.add(sheet);
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
const { blob: outputBlob, path: outputPath } = await sheet.upsert(message.record);
|
|
150
|
-
console.log(`${outputBlob.hash}\t${outputPath}`);
|
|
151
|
-
writtenStreams.add(stream);
|
|
152
|
-
continue;
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
// hard crash for any unexpected type
|
|
157
|
-
throw new Error(`encountered unknown Singer message type: ${type}`);
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
// write changes to workspace or ref
|
|
162
|
-
const workspace = await repo.getWorkspace();
|
|
163
|
-
|
|
164
|
-
if (working) {
|
|
165
|
-
await workspace.writeWorkingChanges();
|
|
166
|
-
} else if (commitTo) {
|
|
167
|
-
const treeHash = await workspace.root.write();
|
|
168
|
-
|
|
169
|
-
if (treeHash != await git.getTreeHash(parentCommitHash)) {
|
|
170
|
-
let commitTrailers = [
|
|
171
|
-
`Extracted-from: ${sourceLabel}`,
|
|
172
|
-
...Array.from(writtenStreams).map(sheetName => `Extracted-sheet: ${sheetName}`),
|
|
173
|
-
];
|
|
174
|
-
|
|
175
|
-
// TODO: write trailers data on streams/tap/source that loader can anchor to per-sheet
|
|
176
|
-
const commitHash = await git.commitTree(treeHash, {
|
|
177
|
-
p: parentCommitHash,
|
|
178
|
-
m: `⭆ extract ${writtenStreams.size} ${writtenStreams.size==1?'stream':'streams'} from ${sourceLabel}\n\n${commitTrailers.join('\n')}`,
|
|
179
|
-
});
|
|
180
|
-
await git.updateRef(commitTo, commitHash);
|
|
181
|
-
console.log(`committed new tree to "${commitTo}": ${parentCommitHash}->${commitHash}`);
|
|
182
|
-
} else {
|
|
183
|
-
console.log('tree unchanged');
|
|
184
|
-
}
|
|
185
|
-
} else {
|
|
186
|
-
// output tree hash
|
|
187
|
-
console.log(await workspace.root.write());
|
|
188
|
-
}
|
|
189
|
-
};
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
// library
|
|
193
|
-
async function* readMessages ({ jsonlFile = null } = {}) {
|
|
194
|
-
const inputStream = jsonlFile
|
|
195
|
-
? require('fs').createReadStream(jsonlFile)
|
|
196
|
-
: process.stdin;
|
|
197
|
-
|
|
198
|
-
// read input
|
|
199
|
-
let output = '';
|
|
200
|
-
for await (const chunk of inputStream) {
|
|
201
|
-
output += chunk;
|
|
202
|
-
|
|
203
|
-
let eolIndex;
|
|
204
|
-
while ((eolIndex = output.indexOf('\n')) >= 0) {
|
|
205
|
-
yield JSON.parse(output.slice(0, eolIndex));
|
|
206
|
-
|
|
207
|
-
output = output.slice(eolIndex + 1);
|
|
208
|
-
}
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
if (output.length > 0) {
|
|
212
|
-
yield JSON.parse(output);
|
|
213
|
-
}
|
|
214
|
-
}
|
package/commands/upsert.js
DELETED
|
@@ -1,260 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const TOML = require('@iarna/toml');
|
|
3
|
-
const { parse: csvParse } = require('fast-csv');
|
|
4
|
-
const deepmerge = require('deepmerge');
|
|
5
|
-
|
|
6
|
-
const inputFormats = {
|
|
7
|
-
json: readJsonFile,
|
|
8
|
-
toml: readTomlFile,
|
|
9
|
-
csv: readCsvFile,
|
|
10
|
-
};
|
|
11
|
-
|
|
12
|
-
exports.command = 'upsert <sheet> [file]';
|
|
13
|
-
exports.desc = 'Upsert a record into a sheet';
|
|
14
|
-
exports.builder = {
|
|
15
|
-
sheet: {
|
|
16
|
-
describe: 'Name of sheet to upsert into',
|
|
17
|
-
},
|
|
18
|
-
file: {
|
|
19
|
-
describe: 'File to read JSON/TOML record from, or - for JSON from STDIN, or inline JSON',
|
|
20
|
-
default: '-',
|
|
21
|
-
},
|
|
22
|
-
root: {
|
|
23
|
-
type: 'string',
|
|
24
|
-
describe: 'Root path to .gitsheets in repository (defaults to GITSHEETS_ROOT or /)',
|
|
25
|
-
default: process.env.GITSHEETS_ROOT || '/',
|
|
26
|
-
defaultDescription: 'GITSHEETS_ROOT || "/"',
|
|
27
|
-
},
|
|
28
|
-
prefix: {
|
|
29
|
-
type: 'string',
|
|
30
|
-
describe: 'Path to prefix after root to all sheet paths (defaults to GITSHEETS_PREFIX or none)',
|
|
31
|
-
default: process.env.GITSHEETS_PREFIX,
|
|
32
|
-
defaultDescription: 'GITSHEETS_PREFIX',
|
|
33
|
-
},
|
|
34
|
-
format: {
|
|
35
|
-
describe: 'Format to parse input data in (defaults to file extension or json)',
|
|
36
|
-
choices: Object.keys(inputFormats),
|
|
37
|
-
},
|
|
38
|
-
encoding: {
|
|
39
|
-
describe: 'Encoding to read input with',
|
|
40
|
-
default: 'utf8',
|
|
41
|
-
},
|
|
42
|
-
'attachments.<attachment-path>': {
|
|
43
|
-
describe: 'One or more files to attach in the format <extension>:<source-path>',
|
|
44
|
-
},
|
|
45
|
-
'delete-missing': {
|
|
46
|
-
describe: 'Enable to remove all existing records in the sheet that are not present in the new set',
|
|
47
|
-
type: 'boolean',
|
|
48
|
-
default: false,
|
|
49
|
-
},
|
|
50
|
-
'patch-existing': {
|
|
51
|
-
describe: 'For existing records, patch in provided values so that additional properties not included in the input are preserved',
|
|
52
|
-
type: 'boolean',
|
|
53
|
-
default: false,
|
|
54
|
-
},
|
|
55
|
-
};
|
|
56
|
-
|
|
57
|
-
exports.handler = async function upsert({
|
|
58
|
-
sheet: sheetName,
|
|
59
|
-
file = null,
|
|
60
|
-
root = null,
|
|
61
|
-
prefix = null,
|
|
62
|
-
format = null,
|
|
63
|
-
encoding,
|
|
64
|
-
attachments = null,
|
|
65
|
-
deleteMissing,
|
|
66
|
-
patchExisting,
|
|
67
|
-
...argv
|
|
68
|
-
}) {
|
|
69
|
-
const logger = require('../lib/logger.js');
|
|
70
|
-
const Repository = require('../lib/Repository.js')
|
|
71
|
-
const path = require('path');
|
|
72
|
-
|
|
73
|
-
// apply dynamic defaults
|
|
74
|
-
if (!file || file == '-') {
|
|
75
|
-
file = false;
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
if (!format) {
|
|
79
|
-
if (file && file.endsWith('.json')) {
|
|
80
|
-
format = 'json';
|
|
81
|
-
} else if (file && file.endsWith('.toml')) {
|
|
82
|
-
format = 'toml'
|
|
83
|
-
} else if (file && file.endsWith('.csv')) {
|
|
84
|
-
format = 'csv'
|
|
85
|
-
} else {
|
|
86
|
-
format = 'json';
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
// get repo interface
|
|
91
|
-
const repo = await Repository.getFromEnvironment({ working: true });
|
|
92
|
-
logger.debug('instantiated repository:', repo);
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
// get sheet
|
|
96
|
-
const sheet = await repo.openSheet(sheetName, { root, dataTree: prefix });
|
|
97
|
-
|
|
98
|
-
if (!sheet) {
|
|
99
|
-
throw new Error(`sheet '${sheetName}' not found under ${root}/.gitsheets/`);
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
logger.debug('loaded sheet:', sheet);
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
// clear sheet
|
|
106
|
-
let inputSheet = sheet;
|
|
107
|
-
|
|
108
|
-
if (deleteMissing) {
|
|
109
|
-
// re-open input sheet
|
|
110
|
-
inputSheet = await sheet.clone();
|
|
111
|
-
|
|
112
|
-
// clear target sheet
|
|
113
|
-
await sheet.clear();
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
// read incoming record
|
|
118
|
-
const isInlineJson =
|
|
119
|
-
(file[0] == '{' && file[file.length - 1] == '}')
|
|
120
|
-
|| file[0] == '[' && file[file.length - 1] == ']';
|
|
121
|
-
|
|
122
|
-
const inputRecords = isInlineJson
|
|
123
|
-
? readJsonString(file, { encoding })
|
|
124
|
-
: inputFormats[format](file, { encoding });
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
// upsert record(s) into sheet
|
|
129
|
-
for await (let inputRecord of inputRecords) {
|
|
130
|
-
|
|
131
|
-
if (patchExisting) {
|
|
132
|
-
// TODO: move more of this logic inside Sheet class
|
|
133
|
-
|
|
134
|
-
// fetch existing record from inputSheet
|
|
135
|
-
const inputRecordPath = await inputSheet.pathForRecord(await inputSheet.normalizeRecord(inputRecord));
|
|
136
|
-
|
|
137
|
-
if (inputRecordPath) {
|
|
138
|
-
const { root: inputSheetRoot } = await inputSheet.getCachedConfig();
|
|
139
|
-
|
|
140
|
-
// existing record find, merge
|
|
141
|
-
const existingBlob = await inputSheet.dataTree.getChild(`${path.join(inputSheetRoot, inputRecordPath)}.toml`);
|
|
142
|
-
|
|
143
|
-
if (existingBlob) {
|
|
144
|
-
const existingRecord = await inputSheet.readRecord(existingBlob);
|
|
145
|
-
inputRecord = deepmerge(existingRecord, inputRecord);
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
const { blob: outputBlob, path: outputPath } = await sheet.upsert(inputRecord, { patchExisting });
|
|
151
|
-
console.log(`${outputBlob.hash}\t${outputPath}`);
|
|
152
|
-
|
|
153
|
-
if (attachments) {
|
|
154
|
-
for (const attachmentPath in attachments) {
|
|
155
|
-
let attachment = attachments[attachmentPath];
|
|
156
|
-
const splitIndex = attachment.indexOf(':');
|
|
157
|
-
|
|
158
|
-
// determine extension
|
|
159
|
-
let extension;
|
|
160
|
-
if (splitIndex >= 0) {
|
|
161
|
-
extension = attachment.substr(0, splitIndex);
|
|
162
|
-
if (extension) {
|
|
163
|
-
extension = `.${extension}`;
|
|
164
|
-
}
|
|
165
|
-
attachment = attachment.substr(splitIndex + 1);
|
|
166
|
-
} else {
|
|
167
|
-
extension = path.extname(attachment);
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
// prepare blob
|
|
171
|
-
let blob;
|
|
172
|
-
try {
|
|
173
|
-
blob = await repo.writeBlobFromFile(attachment);
|
|
174
|
-
} catch (err) {
|
|
175
|
-
throw new Error(`Could not read ${attachment}: ${err}`);
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
// write attachment
|
|
179
|
-
await sheet.setAttachment(inputRecord, `${attachmentPath}${extension}`, blob);
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
// write changes to workspace
|
|
186
|
-
const workspace = await repo.getWorkspace();
|
|
187
|
-
await workspace.writeWorkingChanges();
|
|
188
|
-
};
|
|
189
|
-
|
|
190
|
-
async function* readJsonString(string, { encoding }) {
|
|
191
|
-
const data = JSON.parse(string);
|
|
192
|
-
|
|
193
|
-
for (const record of Array.isArray(data) ? data : [data]) {
|
|
194
|
-
yield record;
|
|
195
|
-
}
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
async function* readJsonFile(file, { encoding }) {
|
|
199
|
-
const stream = file ? fs.createReadStream(file) : process.stdin;
|
|
200
|
-
|
|
201
|
-
const chunks = [];
|
|
202
|
-
for await (const chunk of stream) {
|
|
203
|
-
chunks.push(chunk);
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
const data = JSON.parse(Buffer.concat(chunks).toString(encoding));
|
|
207
|
-
|
|
208
|
-
for (const record of Array.isArray(data) ? data : [data]) {
|
|
209
|
-
yield record;
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
|
|
213
|
-
async function* readTomlFile(file, { encoding }) {
|
|
214
|
-
const stream = file ? fs.createReadStream(file) : process.stdin;
|
|
215
|
-
|
|
216
|
-
const chunks = [];
|
|
217
|
-
for await (const chunk of stream) {
|
|
218
|
-
chunks.push(chunk);
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
const data = TOML.parse(Buffer.concat(chunks).toString(encoding));
|
|
222
|
-
|
|
223
|
-
yield data;
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
async function* readCsvFile(file, { encoding }) {
|
|
227
|
-
const stream = file ? fs.createReadStream(file) : process.stdin;
|
|
228
|
-
|
|
229
|
-
const csvParseStream = csvParse({ headers: true });
|
|
230
|
-
|
|
231
|
-
stream.pipe(csvParseStream);
|
|
232
|
-
for await (const raw of csvParseStream) {
|
|
233
|
-
const record = {};
|
|
234
|
-
|
|
235
|
-
for (const key in raw) {
|
|
236
|
-
const value = raw[key];
|
|
237
|
-
|
|
238
|
-
if (value === '') {
|
|
239
|
-
continue;
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
const subKeys = key.split('.');
|
|
243
|
-
|
|
244
|
-
let target = record;
|
|
245
|
-
while (subKeys.length > 1) {
|
|
246
|
-
const subKey = subKeys.shift();
|
|
247
|
-
|
|
248
|
-
if (subKey in target) {
|
|
249
|
-
target = target[subKey]
|
|
250
|
-
} else {
|
|
251
|
-
target = target[subKey] = {};
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
target[subKeys[0]] = value;
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
yield record
|
|
259
|
-
}
|
|
260
|
-
}
|