node-es-transformer 1.0.0-alpha1 → 1.0.0-alpha10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -6,7 +6,30 @@
6
6
 
7
7
  A nodejs based library to (re)index and transform data from/to Elasticsearch.
8
8
 
9
- **This is experimental code, use at your own risk.**
9
+ ### Why another reindex/ingestion tool?
10
+
11
+ If you're looking for a nodejs based tool which allows you to ingest large CSV/JSON files in the GigaBytes you've come to the right place. Everything else I've tried with larger files runs out of JS heap, hammers ES with too many single requests, times out or tries to do everything with a single bulk request.
12
+
13
+ While I'd generally recommend using [Logstash](https://www.elastic.co/products/logstash), [filebeat](https://www.elastic.co/products/beats/filebeat), [Ingest Nodes](https://www.elastic.co/guide/en/elasticsearch/reference/master/ingest.html), [Elastic Agent](https://www.elastic.co/guide/en/fleet/current/fleet-overview.html) or [Elasticsearch Transforms](https://www.elastic.co/guide/en/elasticsearch/reference/current/transforms.html) for established use cases, this tool may be of help especially if you feel more at home in the JavaScript/nodejs universe and have use cases with customized ingestion and data transformation needs.
14
+
15
+ **This is experimental code, use at your own risk. Nonetheless, I encourage you to give it a try so I can gather some feedback.**
16
+
17
+ ### So why is this still _alpha_?
18
+
19
+ - The API is not quite final and might change from release to release.
20
+ - The code needs some more safety measures to avoid some possible accidental data loss scenarios.
21
+ - No test coverage yet.
22
+
23
+ ---
24
+
25
+ Now that we've talked about the caveats, let's have a look what you actually get with this tool:
26
+
27
+ ## Features
28
+
29
+ - Buffering/Streaming for both reading and indexing. Files are read using streaming and Elasticsearch ingestion is done using buffered bulk indexing. This is tailored towards ingestion of large files. Successfully tested so far with JSON and CSV files in the range of 20-30 GBytes. On a single machine running both `node-es-transformer` and Elasticsearch ingestion rates up to 20k documents/second were achieved (2,9 GHz Intel Core i7, 16GByte RAM, SSD), depending on document size.
30
+ - Supports wildcards to ingest/transform a range of files in one go.
31
+ - Supports fetching documents from existing indices using search/scroll. This allows you to reindex with custom data transformations just using JavaScript in the `transform` callback.
32
+ - The `transform` callback gives you each source document, but you can split it up in multiple ones and return an array of documents. An example use case for this: Each source document is a Tweet and you want to transform that into an entity centric index based on Hashtags.
10
33
 
11
34
  ## Getting started
12
35
 
@@ -14,31 +37,89 @@ In your node-js project, add `node-es-transformer` as a dependency (`yarn add no
14
37
 
15
38
  Use the library in your code like:
16
39
 
40
+ ### Read from a file
41
+
17
42
  ```javascript
18
- const { transformer } = require('node-es-transformer');
43
+ const transformer = require('node-es-transformer');
19
44
 
20
45
  transformer({
21
46
  fileName: 'filename.json',
22
- indexName: 'my-index',
23
- typeName: 'doc',
47
+ targetIndexName: 'my-index',
48
+ mappings: {
49
+ properties: {
50
+ '@timestamp': {
51
+ type: 'date'
52
+ },
53
+ 'first_name': {
54
+ type: 'keyword'
55
+ },
56
+ 'last_name': {
57
+ type: 'keyword'
58
+ }
59
+ 'full_name': {
60
+ type: 'keyword'
61
+ }
62
+ }
63
+ },
64
+ transform(line) {
65
+ return {
66
+ ...line,
67
+ full_name: `${line.first_name} ${line.last_name}`
68
+ }
69
+ }
70
+ });
71
+ ```
72
+
73
+ ### Read from another index
74
+
75
+ ```javascript
76
+ const transformer = require('node-es-transformer');
77
+
78
+ transformer({
79
+ sourceIndexName: 'my-source-index',
80
+ targetIndexName: 'my-target-index',
81
+ // optional, if you skip mappings, they will be fetched from the source index.
24
82
  mappings: {
25
- doc: {
26
- properties: {
27
- '@timestamp': {
28
- type: 'date'
29
- },
30
- 'field1': {
31
- type: 'keyword'
32
- },
33
- 'field2': {
34
- type: 'keyword'
35
- }
83
+ properties: {
84
+ '@timestamp': {
85
+ type: 'date'
86
+ },
87
+ 'first_name': {
88
+ type: 'keyword'
89
+ },
90
+ 'last_name': {
91
+ type: 'keyword'
36
92
  }
93
+ 'full_name': {
94
+ type: 'keyword'
95
+ }
96
+ }
97
+ },
98
+ transform(doc) {
99
+ return {
100
+ ...doc,
101
+ full_name: `${line.first_name} ${line.last_name}`
37
102
  }
38
103
  }
39
104
  });
40
105
  ```
41
106
 
107
+ ### Options
108
+
109
+ - `deleteIndex`: Setting to automatically delete an existing index, default is `false`.
110
+ - `sourceClientConfig`/`targetClientConfig`: Optional Elasticsearch client options, defaults to `{ node: 'http://localhost:9200' }`.
111
+ - `bufferSize`: The amount of documents inserted with each Elasticsearch bulk insert request, default is `1000`.
112
+ - `fileName`: Source filename to ingest, supports wildcards. If this is set, `sourceIndexName` is not allowed.
113
+ - `splitRegex`: Custom line split regex, defaults to `/\n/`.
114
+ - `sourceIndexName`: The source Elasticsearch index to reindex from. If this is set, `fileName` is not allowed.
115
+ - `targetIndexName`: The target Elasticsearch index where documents will be indexed.
116
+ - `mappings`: Optional Elasticsearch document mappings. If not set and you're reindexing from another index, the mappings from the existing index will be used.
117
+ - `mappingsOverride`: If you're reindexing and this is set to `true`, `mappings` will be applied on top of the source index's mappings. Defaults to `false`.
118
+ - `query`: Optional Elasticsearch [DSL query](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html) to filter documents from the source index.
119
+ - `skipHeader`: If true, skips the first line of the source file. Defaults to `false`.
120
+ - `transform(line)`: A callback function which allows the transformation of a source line into one or several documents.
121
+ - `verbose`: Logging verbosity, defaults to `true`
122
+
42
123
  ## Development
43
124
 
44
125
  Clone this repository and install its dependencies:
@@ -49,12 +130,12 @@ cd node-es-transformer
49
130
  yarn
50
131
  ```
51
132
 
52
- `yarn build` builds the library to `dist`, generating three files:
133
+ `yarn build` builds the library to `dist`, generating two files:
53
134
 
54
- * `dist/node-es-transformer.cjs.js`
55
- A CommonJS bundle, suitable for use in Node.js, that `require`s the external dependency. This corresponds to the `"main"` field in package.json
56
- * `dist/node-es-transformer.esm.js`
57
- an ES module bundle, suitable for use in other people's libraries and applications, that `import`s the external dependency. This corresponds to the `"module"` field in package.json
135
+ - `dist/node-es-transformer.cjs.js`
136
+ A CommonJS bundle, suitable for use in Node.js, that `require`s the external dependency. This corresponds to the `"main"` field in package.json
137
+ - `dist/node-es-transformer.esm.js`
138
+ an ES module bundle, suitable for use in other people's libraries and applications, that `import`s the external dependency. This corresponds to the `"module"` field in package.json
58
139
 
59
140
  `yarn dev` builds the library, then keeps rebuilding it whenever the source files change using [rollup-watch](https://github.com/rollup/rollup-watch).
60
141
 
@@ -1,104 +1,405 @@
1
1
  'use strict';
2
2
 
3
- Object.defineProperty(exports, '__esModule', { value: true });
4
-
5
3
  function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
6
4
 
7
5
  var fs = _interopDefault(require('fs'));
8
6
  var es = _interopDefault(require('event-stream'));
9
- var elasticsearch = _interopDefault(require('elasticsearch'));
10
-
11
- function transformer(ref) {
12
- var deleteIndex = ref.deleteIndex; if ( deleteIndex === void 0 ) deleteIndex = false;
13
- var host = ref.host; if ( host === void 0 ) host = 'localhost';
14
- var port = ref.port; if ( port === void 0 ) port = '9200';
15
- var fileName = ref.fileName;
16
- var indexName = ref.indexName;
17
- var typeName = ref.typeName;
18
- var mappings = ref.mappings;
19
- var transform = ref.transform;
20
- var verbose = ref.verbose; if ( verbose === void 0 ) verbose = true;
21
-
22
-
23
- var client = new elasticsearch.Client({
24
- host: (host + ":" + port)
25
- });
26
-
27
- client.indices.exists({
28
- index: indexName
29
- }, function (err, resp) {
30
- if (resp === false) {
31
- createMapping();
32
- } else {
33
- if (deleteIndex === true) {
34
- client.indices.delete({
35
- index: indexName
36
- }, function (err, resp) {
37
- createMapping();
38
- });
39
- } else {
40
- indexFile();
41
- }
42
- }
43
- });
44
-
45
- function createMapping() {
46
- if (
47
- typeof mappings === 'object' &&
48
- mappings !== null
49
- ) {
50
- client.indices.create({
51
- index: indexName,
52
- body: {
53
- mappings: mappings
54
- }
55
- }, function (err, resp) {
56
- console.log('create mapping', err, resp);
57
- indexFile();
58
- });
59
- } else {
60
- indexFile();
61
- }
62
- }
63
-
64
- function indexFile() {
65
- var docs = [];
66
- var s = fs.createReadStream(fileName)
67
- .pipe(es.split())
68
- .pipe(es.mapSync(function (line) {
69
- s.pause();
70
-
71
- if (line) {
72
- try {
73
- var header = { index: { _index: indexName, _type: typeName } };
74
-
75
- var doc = (typeof transform === 'function') ? transform(line) : line;
76
-
77
- docs.push(header);
78
- docs.push(doc);
79
- } catch (e) {
80
- console.log('error', e);
81
- }
82
- }
83
-
84
- // resume the readstream, possibly from a callback
85
- s.resume();
86
- })
87
- .on('error', function (err) {
88
- console.log('Error while reading file.', err);
89
- })
90
- .on('end', function () {
91
- verbose && console.log('Read entire file.');
92
- client.bulk({
93
- body: docs
94
- }, function (err, resp) {
95
- if (err) {
96
- console.log('Ingest Error:', err);
97
- }
98
- });
99
- })
100
- );
101
- }
7
+ var glob = _interopDefault(require('glob'));
8
+ var cliProgress = _interopDefault(require('cli-progress'));
9
+ var elasticsearch = _interopDefault(require('@elastic/elasticsearch'));
10
+
11
+ function createMappingFactory(ref) {
12
+ var sourceClient = ref.sourceClient;
13
+ var sourceIndexName = ref.sourceIndexName;
14
+ var targetClient = ref.targetClient;
15
+ var targetIndexName = ref.targetIndexName;
16
+ var mappings = ref.mappings;
17
+ var mappingsOverride = ref.mappingsOverride;
18
+ var verbose = ref.verbose;
19
+
20
+ return async function () {
21
+ var targetMappings = mappingsOverride ? undefined : mappings;
22
+
23
+ if (sourceClient && sourceIndexName && typeof targetMappings === 'undefined') {
24
+ try {
25
+ var mapping = await sourceClient.indices.getMapping({ index: sourceIndexName });
26
+ targetMappings = mapping[sourceIndexName].mappings;
27
+ } catch (err) {
28
+ console.log('Error reading source mapping', err);
29
+ return;
30
+ }
31
+ }
32
+
33
+ if (typeof targetMappings === 'object' && targetMappings !== null) {
34
+ if (mappingsOverride) {
35
+ targetMappings = Object.assign({}, targetMappings,
36
+ {properties: Object.assign({}, targetMappings.properties,
37
+ mappings)});
38
+ }
39
+
40
+ try {
41
+ var resp = await targetClient.indices.create(
42
+ {
43
+ index: targetIndexName,
44
+ body: { mappings: targetMappings },
45
+ }
46
+ );
47
+ if (verbose) { console.log('Created target mapping', resp); }
48
+ } catch (err) {
49
+ console.log('Error creating target mapping', err);
50
+ }
51
+ }
52
+ };
53
+ }
54
+
55
+ function fileReaderFactory(indexer, fileName, transform, splitRegex, verbose) {
56
+ function startIndex(files) {
57
+ var file = files.shift();
58
+ var s = fs.createReadStream(file)
59
+ .pipe(es.split(splitRegex))
60
+ .pipe(es.mapSync(function (line) {
61
+ s.pause();
62
+ try {
63
+ var doc = (typeof transform === 'function') ? transform(line) : line;
64
+ // if doc is undefined we'll skip indexing it
65
+ if (typeof doc === 'undefined') {
66
+ s.resume();
67
+ return;
68
+ }
69
+
70
+ // the transform callback may return an array of docs so we can emit
71
+ // multiple docs from a single line
72
+ if (Array.isArray(doc)) {
73
+ doc.forEach(function (d) { return indexer.add(d); });
74
+ return;
75
+ }
76
+
77
+ indexer.add(doc);
78
+ } catch (e) {
79
+ console.log('error', e);
80
+ }
81
+ })
82
+ .on('error', function (err) {
83
+ console.log('Error while reading file.', err);
84
+ })
85
+ .on('end', function () {
86
+ if (verbose) { console.log('Read entire file: ', file); }
87
+ indexer.finish();
88
+ if (files.length > 0) {
89
+ startIndex(files);
90
+ }
91
+ }));
92
+
93
+ indexer.queueEmitter.on('resume', function () {
94
+ s.resume();
95
+ });
96
+ }
97
+
98
+ return function () {
99
+ glob(fileName, function (er, files) {
100
+ startIndex(files);
101
+ });
102
+ };
103
+ }
104
+
105
+ var EventEmitter = require('events');
106
+
107
+ var queueEmitter = new EventEmitter();
108
+
109
+ // a simple helper queue to bulk index documents
110
+ function indexQueueFactory(ref) {
111
+ var client = ref.targetClient;
112
+ var targetIndexName = ref.targetIndexName;
113
+ var bufferSize = ref.bufferSize; if ( bufferSize === void 0 ) bufferSize = 1000;
114
+ var skipHeader = ref.skipHeader; if ( skipHeader === void 0 ) skipHeader = false;
115
+ var verbose = ref.verbose; if ( verbose === void 0 ) verbose = true;
116
+
117
+ var buffer = [];
118
+ var queue = [];
119
+ var ingesting = false;
120
+
121
+ var ingest = async function (b) {
122
+ if (typeof b !== 'undefined') {
123
+ queue.push(b);
124
+ queueEmitter.emit('queue-size', queue.length);
125
+ }
126
+
127
+ if (ingesting === false) {
128
+ var docs = queue.shift();
129
+ queueEmitter.emit('queue-size', queue.length);
130
+ ingesting = true;
131
+ if (verbose) { console.log(("bulk ingest docs: " + (docs.length / 2) + ", queue length: " + (queue.length))); }
132
+
133
+ try {
134
+ await client.bulk({ body: docs });
135
+ ingesting = false;
136
+ if (queue.length > 0) {
137
+ ingest();
138
+ }
139
+ } catch (err) {
140
+ console.log('bulk index error', err);
141
+ }
142
+ }
143
+
144
+ // console.log(`ingest: queue.length ${queue.length}`);
145
+ if (queue.length === 0) {
146
+ queueEmitter.emit('queue-size', 0);
147
+ queueEmitter.emit('resume');
148
+ }
149
+ };
150
+
151
+ return {
152
+ add: function (doc) {
153
+ if (!skipHeader) {
154
+ var header = { index: { _index: targetIndexName } };
155
+ buffer.push(header);
156
+ }
157
+ buffer.push(doc);
158
+
159
+ // console.log(`add: queue.length ${queue.length}`);
160
+ if (queue.length === 0) {
161
+ queueEmitter.emit('resume');
162
+ }
163
+
164
+ if (buffer.length >= (bufferSize * 2)) {
165
+ ingest(buffer);
166
+ buffer = [];
167
+ }
168
+ },
169
+ finish: async function () {
170
+ await ingest(buffer);
171
+ buffer = [];
172
+ queueEmitter.emit('finish');
173
+ },
174
+ queueEmitter: queueEmitter,
175
+ };
176
+ }
177
+
178
+ var MAX_QUEUE_SIZE = 5;
179
+
180
+ // create a new progress bar instance and use shades_classic theme
181
+ var progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
182
+
183
+ function indexReaderFactory(indexer, sourceIndexName, transform, client, query) {
184
+ return async function indexReader() {
185
+ var responseQueue = [];
186
+ var docsNum = 0;
187
+
188
+ function search() {
189
+ return client.search({
190
+ index: sourceIndexName,
191
+ scroll: '30s',
192
+ size: 10000,
193
+ query: query,
194
+ });
195
+ }
196
+
197
+ function scroll(id) {
198
+ return client.scroll({
199
+ scroll_id: id,
200
+ scroll: '30s',
201
+ });
202
+ }
203
+
204
+ // start things off by searching, setting a scroll timeout, and pushing
205
+ // our first response into the queue to be processed
206
+ var se = await search();
207
+ responseQueue.push(se);
208
+ progressBar.start(se.hits.total.value, 0);
209
+
210
+ function processHit(hit) {
211
+ docsNum += 1;
212
+ try {
213
+ var doc = (typeof transform === 'function') ? transform(hit._source) : hit._source; // eslint-disable-line no-underscore-dangle
214
+ // if doc is undefined we'll skip indexing it
215
+ if (typeof doc === 'undefined') {
216
+ return;
217
+ }
218
+
219
+ // the transform callback may return an array of docs so we can emit
220
+ // multiple docs from a single line
221
+ if (Array.isArray(doc)) {
222
+ doc.forEach(function (d) { return indexer.add(d); });
223
+ return;
224
+ }
225
+
226
+ indexer.add(doc);
227
+ } catch (e) {
228
+ console.log('error', e);
229
+ }
230
+ }
231
+
232
+ var ingestQueueSize = 0;
233
+ var scrollId = se._scroll_id; // eslint-disable-line no-underscore-dangle
234
+ var readActive = false;
235
+
236
+ async function processResponseQueue() {
237
+ while (responseQueue.length) {
238
+ readActive = true;
239
+ var response = responseQueue.shift();
240
+
241
+ // collect the docs from this response
242
+ response.hits.hits.forEach(processHit);
243
+
244
+ progressBar.update(docsNum);
245
+
246
+ // check to see if we have collected all of the docs
247
+ // console.log('check count', response.hits.total.value, docsNum);
248
+ if (response.hits.total.value === docsNum) {
249
+ indexer.finish();
250
+ progressBar.stop();
251
+ break;
252
+ }
253
+
254
+ if (ingestQueueSize < MAX_QUEUE_SIZE) {
255
+ // get the next response if there are more docs to fetch
256
+ var sc = await scroll(response._scroll_id); // eslint-disable-line no-await-in-loop,no-underscore-dangle,max-len
257
+ scrollId = sc._scroll_id; // eslint-disable-line no-underscore-dangle
258
+ responseQueue.push(sc);
259
+ } else {
260
+ readActive = false;
261
+ }
262
+ }
263
+ }
264
+
265
+ indexer.queueEmitter.on('queue-size', async function (size) {
266
+ ingestQueueSize = size;
267
+
268
+ if (!readActive && ingestQueueSize < MAX_QUEUE_SIZE) {
269
+ // get the next response if there are more docs to fetch
270
+ var sc = await scroll(scrollId); // eslint-disable-line no-await-in-loop,no-underscore-dangle,max-len
271
+ scrollId = sc._scroll_id; // eslint-disable-line no-underscore-dangle
272
+ responseQueue.push(sc);
273
+ processResponseQueue();
274
+ }
275
+ });
276
+
277
+ indexer.queueEmitter.on('resume', async function () {
278
+ ingestQueueSize = 0;
279
+
280
+ if (readActive) {
281
+ return;
282
+ }
283
+
284
+ // get the next response if there are more docs to fetch
285
+ var sc = await scroll(scrollId); // eslint-disable-line no-await-in-loop,no-underscore-dangle,max-len
286
+ scrollId = sc._scroll_id; // eslint-disable-line no-underscore-dangle
287
+ responseQueue.push(sc);
288
+ processResponseQueue();
289
+ });
290
+
291
+ processResponseQueue();
292
+ };
293
+ }
294
+
295
+ async function transformer(ref) {
296
+ var deleteIndex = ref.deleteIndex; if ( deleteIndex === void 0 ) deleteIndex = false;
297
+ var sourceClientConfig = ref.sourceClientConfig;
298
+ var targetClientConfig = ref.targetClientConfig;
299
+ var bufferSize = ref.bufferSize; if ( bufferSize === void 0 ) bufferSize = 1000;
300
+ var fileName = ref.fileName;
301
+ var splitRegex = ref.splitRegex; if ( splitRegex === void 0 ) splitRegex = /\n/;
302
+ var sourceIndexName = ref.sourceIndexName;
303
+ var targetIndexName = ref.targetIndexName;
304
+ var mappings = ref.mappings;
305
+ var mappingsOverride = ref.mappingsOverride; if ( mappingsOverride === void 0 ) mappingsOverride = false;
306
+ var query = ref.query;
307
+ var skipHeader = ref.skipHeader; if ( skipHeader === void 0 ) skipHeader = false;
308
+ var transform = ref.transform;
309
+ var verbose = ref.verbose; if ( verbose === void 0 ) verbose = true;
310
+
311
+ if (typeof targetIndexName === 'undefined') {
312
+ throw Error('targetIndexName must be specified.');
313
+ }
314
+
315
+ var defaultClientConfig = {
316
+ node: 'http://localhost:9200',
317
+ };
318
+
319
+ var sourceClient = new elasticsearch.Client(sourceClientConfig || defaultClientConfig);
320
+ var targetClient = new elasticsearch.Client(
321
+ targetClientConfig || sourceClientConfig || defaultClientConfig
322
+ );
323
+
324
+ var createMapping = createMappingFactory({
325
+ sourceClient: sourceClient,
326
+ sourceIndexName: sourceIndexName,
327
+ targetClient: targetClient,
328
+ targetIndexName: targetIndexName,
329
+ mappings: mappings,
330
+ mappingsOverride: mappingsOverride,
331
+ verbose: verbose,
332
+ });
333
+ var indexer = indexQueueFactory({
334
+ targetClient: targetClient,
335
+ targetIndexName: targetIndexName,
336
+ bufferSize: bufferSize,
337
+ skipHeader: skipHeader,
338
+ verbose: verbose,
339
+ });
340
+
341
+ function getReader() {
342
+ if (
343
+ typeof fileName !== 'undefined'
344
+ && typeof sourceIndexName !== 'undefined'
345
+ ) {
346
+ throw Error(
347
+ 'Only either one of fileName or sourceIndexName can be specified.'
348
+ );
349
+ }
350
+
351
+ if (
352
+ typeof fileName === 'undefined'
353
+ && typeof sourceIndexName === 'undefined'
354
+ ) {
355
+ throw Error('Either fileName or sourceIndexName must be specified.');
356
+ }
357
+
358
+ if (typeof fileName !== 'undefined') {
359
+ return fileReaderFactory(
360
+ indexer,
361
+ fileName,
362
+ transform,
363
+ splitRegex,
364
+ verbose
365
+ );
366
+ }
367
+
368
+ if (typeof sourceIndexName !== 'undefined') {
369
+ return indexReaderFactory(
370
+ indexer,
371
+ sourceIndexName,
372
+ transform,
373
+ sourceClient,
374
+ query
375
+ );
376
+ }
377
+
378
+ return null;
379
+ }
380
+
381
+ var reader = getReader();
382
+
383
+ try {
384
+ var indexExists = await targetClient.indices.exists({ index: targetIndexName });
385
+
386
+ if (indexExists === false) {
387
+ await createMapping();
388
+ reader();
389
+ } else if (deleteIndex === true) {
390
+ await targetClient.indices.delete({ index: targetIndexName });
391
+ await createMapping();
392
+ reader();
393
+ } else {
394
+ reader();
395
+ }
396
+ } catch (error) {
397
+ console.error('Error checking index existence:', error);
398
+ } finally {
399
+ // targetClient.close();
400
+ }
401
+
402
+ return { events: indexer.queueEmitter };
102
403
  }
103
404
 
104
- exports.transformer = transformer;
405
+ module.exports = transformer;
@@ -1,98 +1,401 @@
1
1
  import fs from 'fs';
2
2
  import es from 'event-stream';
3
- import elasticsearch from 'elasticsearch';
4
-
5
- function transformer(ref) {
6
- var deleteIndex = ref.deleteIndex; if ( deleteIndex === void 0 ) deleteIndex = false;
7
- var host = ref.host; if ( host === void 0 ) host = 'localhost';
8
- var port = ref.port; if ( port === void 0 ) port = '9200';
9
- var fileName = ref.fileName;
10
- var indexName = ref.indexName;
11
- var typeName = ref.typeName;
12
- var mappings = ref.mappings;
13
- var transform = ref.transform;
14
- var verbose = ref.verbose; if ( verbose === void 0 ) verbose = true;
15
-
16
-
17
- var client = new elasticsearch.Client({
18
- host: (host + ":" + port)
19
- });
20
-
21
- client.indices.exists({
22
- index: indexName
23
- }, function (err, resp) {
24
- if (resp === false) {
25
- createMapping();
26
- } else {
27
- if (deleteIndex === true) {
28
- client.indices.delete({
29
- index: indexName
30
- }, function (err, resp) {
31
- createMapping();
32
- });
33
- } else {
34
- indexFile();
35
- }
36
- }
37
- });
38
-
39
- function createMapping() {
40
- if (
41
- typeof mappings === 'object' &&
42
- mappings !== null
43
- ) {
44
- client.indices.create({
45
- index: indexName,
46
- body: {
47
- mappings: mappings
48
- }
49
- }, function (err, resp) {
50
- console.log('create mapping', err, resp);
51
- indexFile();
52
- });
53
- } else {
54
- indexFile();
55
- }
56
- }
57
-
58
- function indexFile() {
59
- var docs = [];
60
- var s = fs.createReadStream(fileName)
61
- .pipe(es.split())
62
- .pipe(es.mapSync(function (line) {
63
- s.pause();
64
-
65
- if (line) {
66
- try {
67
- var header = { index: { _index: indexName, _type: typeName } };
68
-
69
- var doc = (typeof transform === 'function') ? transform(line) : line;
70
-
71
- docs.push(header);
72
- docs.push(doc);
73
- } catch (e) {
74
- console.log('error', e);
75
- }
76
- }
77
-
78
- // resume the readstream, possibly from a callback
79
- s.resume();
80
- })
81
- .on('error', function (err) {
82
- console.log('Error while reading file.', err);
83
- })
84
- .on('end', function () {
85
- verbose && console.log('Read entire file.');
86
- client.bulk({
87
- body: docs
88
- }, function (err, resp) {
89
- if (err) {
90
- console.log('Ingest Error:', err);
91
- }
92
- });
93
- })
94
- );
95
- }
3
+ import glob from 'glob';
4
+ import cliProgress from 'cli-progress';
5
+ import elasticsearch from '@elastic/elasticsearch';
6
+
7
+ function createMappingFactory(ref) {
8
+ var sourceClient = ref.sourceClient;
9
+ var sourceIndexName = ref.sourceIndexName;
10
+ var targetClient = ref.targetClient;
11
+ var targetIndexName = ref.targetIndexName;
12
+ var mappings = ref.mappings;
13
+ var mappingsOverride = ref.mappingsOverride;
14
+ var verbose = ref.verbose;
15
+
16
+ return async function () {
17
+ var targetMappings = mappingsOverride ? undefined : mappings;
18
+
19
+ if (sourceClient && sourceIndexName && typeof targetMappings === 'undefined') {
20
+ try {
21
+ var mapping = await sourceClient.indices.getMapping({ index: sourceIndexName });
22
+ targetMappings = mapping[sourceIndexName].mappings;
23
+ } catch (err) {
24
+ console.log('Error reading source mapping', err);
25
+ return;
26
+ }
27
+ }
28
+
29
+ if (typeof targetMappings === 'object' && targetMappings !== null) {
30
+ if (mappingsOverride) {
31
+ targetMappings = Object.assign({}, targetMappings,
32
+ {properties: Object.assign({}, targetMappings.properties,
33
+ mappings)});
34
+ }
35
+
36
+ try {
37
+ var resp = await targetClient.indices.create(
38
+ {
39
+ index: targetIndexName,
40
+ body: { mappings: targetMappings },
41
+ }
42
+ );
43
+ if (verbose) { console.log('Created target mapping', resp); }
44
+ } catch (err) {
45
+ console.log('Error creating target mapping', err);
46
+ }
47
+ }
48
+ };
49
+ }
50
+
51
+ function fileReaderFactory(indexer, fileName, transform, splitRegex, verbose) {
52
+ function startIndex(files) {
53
+ var file = files.shift();
54
+ var s = fs.createReadStream(file)
55
+ .pipe(es.split(splitRegex))
56
+ .pipe(es.mapSync(function (line) {
57
+ s.pause();
58
+ try {
59
+ var doc = (typeof transform === 'function') ? transform(line) : line;
60
+ // if doc is undefined we'll skip indexing it
61
+ if (typeof doc === 'undefined') {
62
+ s.resume();
63
+ return;
64
+ }
65
+
66
+ // the transform callback may return an array of docs so we can emit
67
+ // multiple docs from a single line
68
+ if (Array.isArray(doc)) {
69
+ doc.forEach(function (d) { return indexer.add(d); });
70
+ return;
71
+ }
72
+
73
+ indexer.add(doc);
74
+ } catch (e) {
75
+ console.log('error', e);
76
+ }
77
+ })
78
+ .on('error', function (err) {
79
+ console.log('Error while reading file.', err);
80
+ })
81
+ .on('end', function () {
82
+ if (verbose) { console.log('Read entire file: ', file); }
83
+ indexer.finish();
84
+ if (files.length > 0) {
85
+ startIndex(files);
86
+ }
87
+ }));
88
+
89
+ indexer.queueEmitter.on('resume', function () {
90
+ s.resume();
91
+ });
92
+ }
93
+
94
+ return function () {
95
+ glob(fileName, function (er, files) {
96
+ startIndex(files);
97
+ });
98
+ };
99
+ }
100
+
101
+ var EventEmitter = require('events');
102
+
103
+ var queueEmitter = new EventEmitter();
104
+
105
+ // a simple helper queue to bulk index documents
106
+ function indexQueueFactory(ref) {
107
+ var client = ref.targetClient;
108
+ var targetIndexName = ref.targetIndexName;
109
+ var bufferSize = ref.bufferSize; if ( bufferSize === void 0 ) bufferSize = 1000;
110
+ var skipHeader = ref.skipHeader; if ( skipHeader === void 0 ) skipHeader = false;
111
+ var verbose = ref.verbose; if ( verbose === void 0 ) verbose = true;
112
+
113
+ var buffer = [];
114
+ var queue = [];
115
+ var ingesting = false;
116
+
117
+ var ingest = async function (b) {
118
+ if (typeof b !== 'undefined') {
119
+ queue.push(b);
120
+ queueEmitter.emit('queue-size', queue.length);
121
+ }
122
+
123
+ if (ingesting === false) {
124
+ var docs = queue.shift();
125
+ queueEmitter.emit('queue-size', queue.length);
126
+ ingesting = true;
127
+ if (verbose) { console.log(("bulk ingest docs: " + (docs.length / 2) + ", queue length: " + (queue.length))); }
128
+
129
+ try {
130
+ await client.bulk({ body: docs });
131
+ ingesting = false;
132
+ if (queue.length > 0) {
133
+ ingest();
134
+ }
135
+ } catch (err) {
136
+ console.log('bulk index error', err);
137
+ }
138
+ }
139
+
140
+ // console.log(`ingest: queue.length ${queue.length}`);
141
+ if (queue.length === 0) {
142
+ queueEmitter.emit('queue-size', 0);
143
+ queueEmitter.emit('resume');
144
+ }
145
+ };
146
+
147
+ return {
148
+ add: function (doc) {
149
+ if (!skipHeader) {
150
+ var header = { index: { _index: targetIndexName } };
151
+ buffer.push(header);
152
+ }
153
+ buffer.push(doc);
154
+
155
+ // console.log(`add: queue.length ${queue.length}`);
156
+ if (queue.length === 0) {
157
+ queueEmitter.emit('resume');
158
+ }
159
+
160
+ if (buffer.length >= (bufferSize * 2)) {
161
+ ingest(buffer);
162
+ buffer = [];
163
+ }
164
+ },
165
+ finish: async function () {
166
+ await ingest(buffer);
167
+ buffer = [];
168
+ queueEmitter.emit('finish');
169
+ },
170
+ queueEmitter: queueEmitter,
171
+ };
172
+ }
173
+
174
+ var MAX_QUEUE_SIZE = 5;
175
+
176
+ // create a new progress bar instance and use shades_classic theme
177
+ var progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
178
+
179
+ function indexReaderFactory(indexer, sourceIndexName, transform, client, query) {
180
+ return async function indexReader() {
181
+ var responseQueue = [];
182
+ var docsNum = 0;
183
+
184
+ function search() {
185
+ return client.search({
186
+ index: sourceIndexName,
187
+ scroll: '30s',
188
+ size: 10000,
189
+ query: query,
190
+ });
191
+ }
192
+
193
+ function scroll(id) {
194
+ return client.scroll({
195
+ scroll_id: id,
196
+ scroll: '30s',
197
+ });
198
+ }
199
+
200
+ // start things off by searching, setting a scroll timeout, and pushing
201
+ // our first response into the queue to be processed
202
+ var se = await search();
203
+ responseQueue.push(se);
204
+ progressBar.start(se.hits.total.value, 0);
205
+
206
+ function processHit(hit) {
207
+ docsNum += 1;
208
+ try {
209
+ var doc = (typeof transform === 'function') ? transform(hit._source) : hit._source; // eslint-disable-line no-underscore-dangle
210
+ // if doc is undefined we'll skip indexing it
211
+ if (typeof doc === 'undefined') {
212
+ return;
213
+ }
214
+
215
+ // the transform callback may return an array of docs so we can emit
216
+ // multiple docs from a single line
217
+ if (Array.isArray(doc)) {
218
+ doc.forEach(function (d) { return indexer.add(d); });
219
+ return;
220
+ }
221
+
222
+ indexer.add(doc);
223
+ } catch (e) {
224
+ console.log('error', e);
225
+ }
226
+ }
227
+
228
+ var ingestQueueSize = 0;
229
+ var scrollId = se._scroll_id; // eslint-disable-line no-underscore-dangle
230
+ var readActive = false;
231
+
232
+ async function processResponseQueue() {
233
+ while (responseQueue.length) {
234
+ readActive = true;
235
+ var response = responseQueue.shift();
236
+
237
+ // collect the docs from this response
238
+ response.hits.hits.forEach(processHit);
239
+
240
+ progressBar.update(docsNum);
241
+
242
+ // check to see if we have collected all of the docs
243
+ // console.log('check count', response.hits.total.value, docsNum);
244
+ if (response.hits.total.value === docsNum) {
245
+ indexer.finish();
246
+ progressBar.stop();
247
+ break;
248
+ }
249
+
250
+ if (ingestQueueSize < MAX_QUEUE_SIZE) {
251
+ // get the next response if there are more docs to fetch
252
+ var sc = await scroll(response._scroll_id); // eslint-disable-line no-await-in-loop,no-underscore-dangle,max-len
253
+ scrollId = sc._scroll_id; // eslint-disable-line no-underscore-dangle
254
+ responseQueue.push(sc);
255
+ } else {
256
+ readActive = false;
257
+ }
258
+ }
259
+ }
260
+
261
+ indexer.queueEmitter.on('queue-size', async function (size) {
262
+ ingestQueueSize = size;
263
+
264
+ if (!readActive && ingestQueueSize < MAX_QUEUE_SIZE) {
265
+ // get the next response if there are more docs to fetch
266
+ var sc = await scroll(scrollId); // eslint-disable-line no-await-in-loop,no-underscore-dangle,max-len
267
+ scrollId = sc._scroll_id; // eslint-disable-line no-underscore-dangle
268
+ responseQueue.push(sc);
269
+ processResponseQueue();
270
+ }
271
+ });
272
+
273
+ indexer.queueEmitter.on('resume', async function () {
274
+ ingestQueueSize = 0;
275
+
276
+ if (readActive) {
277
+ return;
278
+ }
279
+
280
+ // get the next response if there are more docs to fetch
281
+ var sc = await scroll(scrollId); // eslint-disable-line no-await-in-loop,no-underscore-dangle,max-len
282
+ scrollId = sc._scroll_id; // eslint-disable-line no-underscore-dangle
283
+ responseQueue.push(sc);
284
+ processResponseQueue();
285
+ });
286
+
287
+ processResponseQueue();
288
+ };
289
+ }
290
+
291
+ async function transformer(ref) {
292
+ var deleteIndex = ref.deleteIndex; if ( deleteIndex === void 0 ) deleteIndex = false;
293
+ var sourceClientConfig = ref.sourceClientConfig;
294
+ var targetClientConfig = ref.targetClientConfig;
295
+ var bufferSize = ref.bufferSize; if ( bufferSize === void 0 ) bufferSize = 1000;
296
+ var fileName = ref.fileName;
297
+ var splitRegex = ref.splitRegex; if ( splitRegex === void 0 ) splitRegex = /\n/;
298
+ var sourceIndexName = ref.sourceIndexName;
299
+ var targetIndexName = ref.targetIndexName;
300
+ var mappings = ref.mappings;
301
+ var mappingsOverride = ref.mappingsOverride; if ( mappingsOverride === void 0 ) mappingsOverride = false;
302
+ var query = ref.query;
303
+ var skipHeader = ref.skipHeader; if ( skipHeader === void 0 ) skipHeader = false;
304
+ var transform = ref.transform;
305
+ var verbose = ref.verbose; if ( verbose === void 0 ) verbose = true;
306
+
307
+ if (typeof targetIndexName === 'undefined') {
308
+ throw Error('targetIndexName must be specified.');
309
+ }
310
+
311
+ var defaultClientConfig = {
312
+ node: 'http://localhost:9200',
313
+ };
314
+
315
+ var sourceClient = new elasticsearch.Client(sourceClientConfig || defaultClientConfig);
316
+ var targetClient = new elasticsearch.Client(
317
+ targetClientConfig || sourceClientConfig || defaultClientConfig
318
+ );
319
+
320
+ var createMapping = createMappingFactory({
321
+ sourceClient: sourceClient,
322
+ sourceIndexName: sourceIndexName,
323
+ targetClient: targetClient,
324
+ targetIndexName: targetIndexName,
325
+ mappings: mappings,
326
+ mappingsOverride: mappingsOverride,
327
+ verbose: verbose,
328
+ });
329
+ var indexer = indexQueueFactory({
330
+ targetClient: targetClient,
331
+ targetIndexName: targetIndexName,
332
+ bufferSize: bufferSize,
333
+ skipHeader: skipHeader,
334
+ verbose: verbose,
335
+ });
336
+
337
+ function getReader() {
338
+ if (
339
+ typeof fileName !== 'undefined'
340
+ && typeof sourceIndexName !== 'undefined'
341
+ ) {
342
+ throw Error(
343
+ 'Only either one of fileName or sourceIndexName can be specified.'
344
+ );
345
+ }
346
+
347
+ if (
348
+ typeof fileName === 'undefined'
349
+ && typeof sourceIndexName === 'undefined'
350
+ ) {
351
+ throw Error('Either fileName or sourceIndexName must be specified.');
352
+ }
353
+
354
+ if (typeof fileName !== 'undefined') {
355
+ return fileReaderFactory(
356
+ indexer,
357
+ fileName,
358
+ transform,
359
+ splitRegex,
360
+ verbose
361
+ );
362
+ }
363
+
364
+ if (typeof sourceIndexName !== 'undefined') {
365
+ return indexReaderFactory(
366
+ indexer,
367
+ sourceIndexName,
368
+ transform,
369
+ sourceClient,
370
+ query
371
+ );
372
+ }
373
+
374
+ return null;
375
+ }
376
+
377
+ var reader = getReader();
378
+
379
+ try {
380
+ var indexExists = await targetClient.indices.exists({ index: targetIndexName });
381
+
382
+ if (indexExists === false) {
383
+ await createMapping();
384
+ reader();
385
+ } else if (deleteIndex === true) {
386
+ await targetClient.indices.delete({ index: targetIndexName });
387
+ await createMapping();
388
+ reader();
389
+ } else {
390
+ reader();
391
+ }
392
+ } catch (error) {
393
+ console.error('Error checking index existence:', error);
394
+ } finally {
395
+ // targetClient.close();
396
+ }
397
+
398
+ return { events: indexer.queueEmitter };
96
399
  }
97
400
 
98
- export { transformer };
401
+ export default transformer;
package/package.json CHANGED
@@ -1,20 +1,39 @@
1
1
  {
2
2
  "name": "node-es-transformer",
3
- "version": "1.0.0-alpha1",
3
+ "description": "A nodejs based library to (re)index and transform data from/to Elasticsearch.",
4
+ "keywords": [
5
+ "elasticsearch",
6
+ "data-transformation"
7
+ ],
8
+ "private": false,
9
+ "homepage": "https://github.com/walterra/node-es-transformer",
10
+ "repository": "https://github.com/walterra/node-es-transformer",
11
+ "bugs": {
12
+ "url": "https://github.com/walterra/node-es-transformer/issues"
13
+ },
14
+ "license": "Apache-2.0",
15
+ "author": "Walter Rafelsberger <walter@rafelsberger.at>",
16
+ "contributors": [],
17
+ "version": "1.0.0-alpha10",
4
18
  "main": "dist/node-es-transformer.cjs.js",
5
19
  "module": "dist/node-es-transformer.esm.js",
6
20
  "dependencies": {
7
- "elasticsearch": "^15.0.0",
8
- "event-stream": "^3.3.4"
21
+ "@elastic/elasticsearch": "^8.8.1",
22
+ "cli-progress": "^3.12.0",
23
+ "event-stream": "3.3.4",
24
+ "glob": "7.1.2"
9
25
  },
10
26
  "devDependencies": {
11
- "eslint": "^4.19.1",
12
- "eslint-config-airbnb": "^16.1.0",
13
- "eslint-plugin-import": "^2.12.0",
14
- "rollup": "^0.46.0",
15
- "rollup-plugin-buble": "^0.15.0",
16
- "rollup-plugin-commonjs": "^8.0.2",
17
- "rollup-plugin-node-resolve": "^3.0.0"
27
+ "acorn": "^6.4.2",
28
+ "eslint": "8.2.0",
29
+ "eslint-config-airbnb": "19.0.4",
30
+ "eslint-plugin-import": "2.27.5",
31
+ "eslint-plugin-jsx-a11y": "6.7.1",
32
+ "eslint-plugin-react": "7.32.2",
33
+ "rollup": "0.66.6",
34
+ "rollup-plugin-buble": "0.19.6",
35
+ "rollup-plugin-commonjs": "8.0.2",
36
+ "rollup-plugin-node-resolve": "3.0.0"
18
37
  },
19
38
  "scripts": {
20
39
  "build": "rollup -c",