@naturalcycles/datastore-lib 3.24.1 → 3.25.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  /// <reference types="node" />
2
- import { Readable } from 'stream';
2
+ import { Readable } from 'node:stream';
3
3
  import { Query } from '@google-cloud/datastore';
4
4
  import { CommonLogger } from '@naturalcycles/js-lib';
5
5
  import type { ReadableTyped } from '@naturalcycles/nodejs-lib';
@@ -14,6 +14,7 @@ export declare class DatastoreStreamReadable<T = any> extends Readable implement
14
14
  private done;
15
15
  private lastQueryDone?;
16
16
  private totalWait;
17
+ private table;
17
18
  private opt;
18
19
  constructor(q: Query, opt: DatastoreDBStreamOptions, logger: CommonLogger);
19
20
  private runNextQuery;
@@ -1,9 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DatastoreStreamReadable = void 0;
4
- const stream_1 = require("stream");
4
+ const node_stream_1 = require("node:stream");
5
5
  const js_lib_1 = require("@naturalcycles/js-lib");
6
- class DatastoreStreamReadable extends stream_1.Readable {
6
+ class DatastoreStreamReadable extends node_stream_1.Readable {
7
7
  constructor(q, opt, logger) {
8
8
  super({ objectMode: true });
9
9
  this.q = q;
@@ -19,7 +19,8 @@ class DatastoreStreamReadable extends stream_1.Readable {
19
19
  ...opt,
20
20
  };
21
21
  this.originalLimit = q.limitVal;
22
- logger.log(`!! using experimentalCursorStream !! batchSize: ${opt.batchSize}`);
22
+ this.table = q.kinds[0];
23
+ logger.log(`!! using experimentalCursorStream !! ${this.table}, batchSize: ${opt.batchSize}`);
23
24
  }
24
25
  async runNextQuery() {
25
26
  if (this.done)
@@ -39,38 +40,54 @@ class DatastoreStreamReadable extends stream_1.Readable {
39
40
  if (this.endCursor) {
40
41
  q = q.start(this.endCursor);
41
42
  }
43
+ let rows = [];
44
+ let info = {};
42
45
  try {
43
- const [rows, info] = await q.run();
44
- this.rowsRetrieved += rows.length;
45
- this.logger.log(`got ${rows.length} rows, ${this.rowsRetrieved} rowsRetrieved, totalWait: ${(0, js_lib_1._ms)(this.totalWait)}`, info.moreResults);
46
- this.endCursor = info.endCursor;
47
- this.running = false; // ready to take more _reads
48
- this.lastQueryDone = Date.now();
49
- rows.forEach(row => this.push(row));
50
- if (!info.endCursor ||
51
- info.moreResults === 'NO_MORE_RESULTS' ||
52
- (this.originalLimit && this.rowsRetrieved >= this.originalLimit)) {
53
- this.logger.log(`!!!! DONE! ${this.rowsRetrieved} rowsRetrieved, totalWait: ${(0, js_lib_1._ms)(this.totalWait)}`);
54
- this.push(null);
55
- this.done = true;
56
- }
57
- else if (this.opt.singleBatchBuffer) {
58
- // here we don't start next query until we're asked (via next _read call)
59
- // do, let's do nothing
60
- }
61
- else if (this.opt.rssLimitMB) {
62
- const rssMB = Math.round(process.memoryUsage().rss / 1024 / 1024);
63
- if (rssMB <= this.opt.rssLimitMB) {
64
- void this.runNextQuery();
65
- }
66
- else {
67
- this.logger.log(`rssLimitMB reached ${rssMB} > ${this.opt.rssLimitMB}, pausing stream`);
68
- }
69
- }
46
+ await (0, js_lib_1.pRetry)(async () => {
47
+ const res = await q.run();
48
+ rows = res[0];
49
+ info = res[1];
50
+ }, {
51
+ name: `DatastoreStreamReadable.query(${this.table})`,
52
+ maxAttempts: 5,
53
+ delay: 5000,
54
+ delayMultiplier: 2,
55
+ logger: this.logger,
56
+ });
70
57
  }
71
58
  catch (err) {
72
- console.error('DatastoreStreamReadable error!\n', err);
59
+ console.error(`DatastoreStreamReadable error!\n`, {
60
+ table: this.table,
61
+ rowsRetrieved: this.rowsRetrieved,
62
+ }, err);
73
63
  this.emit('error', err);
64
+ return;
65
+ }
66
+ this.rowsRetrieved += rows.length;
67
+ this.logger.log(`got ${rows.length} rows, ${this.rowsRetrieved} rowsRetrieved, totalWait: ${(0, js_lib_1._ms)(this.totalWait)}`, info.moreResults);
68
+ this.endCursor = info.endCursor;
69
+ this.running = false; // ready to take more _reads
70
+ this.lastQueryDone = Date.now();
71
+ rows.forEach(row => this.push(row));
72
+ if (!info.endCursor ||
73
+ info.moreResults === 'NO_MORE_RESULTS' ||
74
+ (this.originalLimit && this.rowsRetrieved >= this.originalLimit)) {
75
+ this.logger.log(`!!!! DONE! ${this.rowsRetrieved} rowsRetrieved, totalWait: ${(0, js_lib_1._ms)(this.totalWait)}`);
76
+ this.push(null);
77
+ this.done = true;
78
+ }
79
+ else if (this.opt.singleBatchBuffer) {
80
+ // here we don't start next query until we're asked (via next _read call)
81
+ // do, let's do nothing
82
+ }
83
+ else if (this.opt.rssLimitMB) {
84
+ const rssMB = Math.round(process.memoryUsage().rss / 1024 / 1024);
85
+ if (rssMB <= this.opt.rssLimitMB) {
86
+ void this.runNextQuery();
87
+ }
88
+ else {
89
+ this.logger.log(`rssLimitMB reached ${rssMB} > ${this.opt.rssLimitMB}, pausing stream`);
90
+ }
74
91
  }
75
92
  }
76
93
  _read() {
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DatastoreDB = void 0;
4
- const stream_1 = require("stream");
4
+ const node_stream_1 = require("node:stream");
5
5
  const db_lib_1 = require("@naturalcycles/db-lib");
6
6
  const js_lib_1 = require("@naturalcycles/js-lib");
7
7
  const colors_1 = require("@naturalcycles/nodejs-lib/dist/colors");
@@ -33,13 +33,12 @@ class DatastoreDB extends db_lib_1.BaseCommonDB {
33
33
  }
34
34
  // @memo() // not used to be able to connect to many DBs in the same server instance
35
35
  ds() {
36
- var _a;
37
36
  if (!this.cachedDatastore) {
38
37
  (0, js_lib_1._assert)(process.env['APP_ENV'] !== 'test', 'DatastoreDB cannot be used in Test env, please use InMemoryDB');
39
38
  // Lazy-loading
40
39
  const datastoreLib = require('@google-cloud/datastore');
41
40
  const DS = datastoreLib.Datastore;
42
- (_a = this.cfg).projectId || (_a.projectId = this.cfg.credentials?.project_id || process.env['GOOGLE_CLOUD_PROJECT']);
41
+ this.cfg.projectId ||= this.cfg.credentials?.project_id || process.env['GOOGLE_CLOUD_PROJECT'];
43
42
  if (this.cfg.projectId) {
44
43
  this.cfg.logger.log(`DatastoreDB connected to ${(0, colors_1.boldWhite)(this.cfg.projectId)}`);
45
44
  }
@@ -68,7 +67,7 @@ class DatastoreDB extends db_lib_1.BaseCommonDB {
68
67
  if (this.cfg.timeout) {
69
68
  // First try
70
69
  try {
71
- const r = await (0, js_lib_1.pTimeout)(this.ds().get(keys), {
70
+ const r = await (0, js_lib_1.pTimeout)(() => this.ds().get(keys), {
72
71
  timeout: this.cfg.timeout,
73
72
  name: `datastore.getByIds(${table})`,
74
73
  });
@@ -81,7 +80,7 @@ class DatastoreDB extends db_lib_1.BaseCommonDB {
81
80
  const DS = datastoreLib.Datastore;
82
81
  this.cachedDatastore = new DS(this.cfg);
83
82
  // Second try (will throw)
84
- const r = await (0, js_lib_1.pTimeout)(this.ds().get(keys), {
83
+ const r = await (0, js_lib_1.pTimeout)(() => this.ds().get(keys), {
85
84
  timeout: this.cfg.timeout,
86
85
  name: `datastore.getByIds(${table}) second try`,
87
86
  errorData: {
@@ -133,14 +132,17 @@ class DatastoreDB extends db_lib_1.BaseCommonDB {
133
132
  ...this.cfg.streamOptions,
134
133
  ..._opt,
135
134
  };
136
- return (opt.experimentalCursorStream
135
+ const stream = (opt.experimentalCursorStream
137
136
  ? new DatastoreStreamReadable_1.DatastoreStreamReadable(q, opt, (0, js_lib_1.commonLoggerMinLevel)(this.cfg.logger, opt.debug ? 'log' : 'warn'))
138
- : this.ds().runQueryStream(q)).pipe(new stream_1.Transform({
137
+ : this.ds().runQueryStream(q))
138
+ .on('error', err => stream.emit('error', err))
139
+ .pipe(new node_stream_1.Transform({
139
140
  objectMode: true,
140
141
  transform: (chunk, _enc, cb) => {
141
142
  cb(null, this.mapId(chunk));
142
143
  },
143
144
  }));
145
+ return stream;
144
146
  }
145
147
  streamQuery(dbQuery, opt) {
146
148
  const q = (0, query_util_1.dbQueryToDatastoreQuery)(dbQuery, this.ds().createQuery(dbQuery.table));
@@ -30,22 +30,34 @@ class DatastoreKeyValueDB {
30
30
  const q = db_lib_1.DBQuery.create(table)
31
31
  .select(['id'])
32
32
  .limit(limit || 0);
33
- return this.db.streamQuery(q).pipe((0, nodejs_lib_1.transformMapSimple)(objectWithId => objectWithId.id, {
33
+ const stream = this.db
34
+ .streamQuery(q)
35
+ .on('error', err => stream.emit('error', err))
36
+ .pipe((0, nodejs_lib_1.transformMapSimple)(objectWithId => objectWithId.id, {
34
37
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
35
38
  }));
39
+ return stream;
36
40
  }
37
41
  streamValues(table, limit) {
38
42
  // `select v` doesn't work for some reason
39
43
  const q = db_lib_1.DBQuery.create(table).limit(limit || 0);
40
- return this.db.streamQuery(q).pipe((0, nodejs_lib_1.transformMapSimple)(obj => obj.v, {
44
+ const stream = this.db
45
+ .streamQuery(q)
46
+ .on('error', err => stream.emit('error', err))
47
+ .pipe((0, nodejs_lib_1.transformMapSimple)(obj => obj.v, {
41
48
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
42
49
  }));
50
+ return stream;
43
51
  }
44
52
  streamEntries(table, limit) {
45
53
  const q = db_lib_1.DBQuery.create(table).limit(limit || 0);
46
- return this.db.streamQuery(q).pipe((0, nodejs_lib_1.transformMapSimple)(obj => [obj.id, obj.v], {
54
+ const stream = this.db
55
+ .streamQuery(q)
56
+ .on('error', err => stream.emit('error', err))
57
+ .pipe((0, nodejs_lib_1.transformMapSimple)(obj => [obj.id, obj.v], {
47
58
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
48
59
  }));
60
+ return stream;
49
61
  }
50
62
  async count(_table) {
51
63
  this.db.cfg.logger.warn(`DatastoreKeyValueDB.count is not supported`);
package/package.json CHANGED
@@ -1,14 +1,14 @@
1
1
  {
2
2
  "name": "@naturalcycles/datastore-lib",
3
- "version": "3.24.1",
3
+ "version": "3.25.1",
4
4
  "description": "Opinionated library to work with Google Datastore",
5
5
  "scripts": {
6
6
  "prepare": "husky install"
7
7
  },
8
8
  "dependencies": {
9
9
  "@google-cloud/datastore": "^7.0.0",
10
- "@naturalcycles/db-lib": "^8.0.0",
11
- "@naturalcycles/js-lib": "^14.0.0",
10
+ "@naturalcycles/db-lib": "^8.46.1",
11
+ "@naturalcycles/js-lib": "^14.116.0",
12
12
  "@naturalcycles/nodejs-lib": "^12.0.0",
13
13
  "grpc": "^1.24.2"
14
14
  },
@@ -31,7 +31,7 @@
31
31
  "main": "dist/index.js",
32
32
  "types": "dist/index.d.ts",
33
33
  "engines": {
34
- "node": ">=14.15.0"
34
+ "node": ">=16.10.0"
35
35
  },
36
36
  "publishConfig": {
37
37
  "access": "public"
@@ -1,6 +1,7 @@
1
- import { Readable } from 'stream'
1
+ import { Readable } from 'node:stream'
2
+ import type { RunQueryInfo } from '@google-cloud/datastore/build/src/query'
2
3
  import { Query } from '@google-cloud/datastore'
3
- import { _ms, CommonLogger } from '@naturalcycles/js-lib'
4
+ import { _ms, CommonLogger, pRetry } from '@naturalcycles/js-lib'
4
5
  import type { ReadableTyped } from '@naturalcycles/nodejs-lib'
5
6
  import type { DatastoreDBStreamOptions } from './datastore.model'
6
7
 
@@ -12,6 +13,7 @@ export class DatastoreStreamReadable<T = any> extends Readable implements Readab
12
13
  private done = false
13
14
  private lastQueryDone?: number
14
15
  private totalWait = 0
16
+ private table: string
15
17
 
16
18
  private opt: DatastoreDBStreamOptions & { batchSize: number }
17
19
 
@@ -25,8 +27,9 @@ export class DatastoreStreamReadable<T = any> extends Readable implements Readab
25
27
  }
26
28
 
27
29
  this.originalLimit = q.limitVal
30
+ this.table = q.kinds[0]!
28
31
 
29
- logger.log(`!! using experimentalCursorStream !! batchSize: ${opt.batchSize}`)
32
+ logger.log(`!! using experimentalCursorStream !! ${this.table}, batchSize: ${opt.batchSize}`)
30
33
  }
31
34
 
32
35
  private async runNextQuery(): Promise<void> {
@@ -52,48 +55,72 @@ export class DatastoreStreamReadable<T = any> extends Readable implements Readab
52
55
  q = q.start(this.endCursor)
53
56
  }
54
57
 
58
+ let rows: T[] = []
59
+ let info: RunQueryInfo = {}
60
+
55
61
  try {
56
- const [rows, info] = await q.run()
62
+ await pRetry(
63
+ async () => {
64
+ const res = await q.run()
65
+ rows = res[0]
66
+ info = res[1]
67
+ },
68
+ {
69
+ name: `DatastoreStreamReadable.query(${this.table})`,
70
+ maxAttempts: 5,
71
+ delay: 5000,
72
+ delayMultiplier: 2,
73
+ logger: this.logger,
74
+ },
75
+ )
76
+ } catch (err) {
77
+ console.error(
78
+ `DatastoreStreamReadable error!\n`,
79
+ {
80
+ table: this.table,
81
+ rowsRetrieved: this.rowsRetrieved,
82
+ },
83
+ err,
84
+ )
85
+ this.emit('error', err)
86
+ return
87
+ }
57
88
 
58
- this.rowsRetrieved += rows.length
89
+ this.rowsRetrieved += rows.length
90
+ this.logger.log(
91
+ `got ${rows.length} rows, ${this.rowsRetrieved} rowsRetrieved, totalWait: ${_ms(
92
+ this.totalWait,
93
+ )}`,
94
+ info.moreResults,
95
+ )
96
+
97
+ this.endCursor = info.endCursor
98
+ this.running = false // ready to take more _reads
99
+ this.lastQueryDone = Date.now()
100
+
101
+ rows.forEach(row => this.push(row))
102
+
103
+ if (
104
+ !info.endCursor ||
105
+ info.moreResults === 'NO_MORE_RESULTS' ||
106
+ (this.originalLimit && this.rowsRetrieved >= this.originalLimit)
107
+ ) {
59
108
  this.logger.log(
60
- `got ${rows.length} rows, ${this.rowsRetrieved} rowsRetrieved, totalWait: ${_ms(
61
- this.totalWait,
62
- )}`,
63
- info.moreResults,
109
+ `!!!! DONE! ${this.rowsRetrieved} rowsRetrieved, totalWait: ${_ms(this.totalWait)}`,
64
110
  )
65
-
66
- this.endCursor = info.endCursor
67
- this.running = false // ready to take more _reads
68
- this.lastQueryDone = Date.now()
69
-
70
- rows.forEach(row => this.push(row))
71
-
72
- if (
73
- !info.endCursor ||
74
- info.moreResults === 'NO_MORE_RESULTS' ||
75
- (this.originalLimit && this.rowsRetrieved >= this.originalLimit)
76
- ) {
77
- this.logger.log(
78
- `!!!! DONE! ${this.rowsRetrieved} rowsRetrieved, totalWait: ${_ms(this.totalWait)}`,
79
- )
80
- this.push(null)
81
- this.done = true
82
- } else if (this.opt.singleBatchBuffer) {
83
- // here we don't start next query until we're asked (via next _read call)
84
- // do, let's do nothing
85
- } else if (this.opt.rssLimitMB) {
86
- const rssMB = Math.round(process.memoryUsage().rss / 1024 / 1024)
87
-
88
- if (rssMB <= this.opt.rssLimitMB) {
89
- void this.runNextQuery()
90
- } else {
91
- this.logger.log(`rssLimitMB reached ${rssMB} > ${this.opt.rssLimitMB}, pausing stream`)
92
- }
111
+ this.push(null)
112
+ this.done = true
113
+ } else if (this.opt.singleBatchBuffer) {
114
+ // here we don't start next query until we're asked (via next _read call)
115
+ // do, let's do nothing
116
+ } else if (this.opt.rssLimitMB) {
117
+ const rssMB = Math.round(process.memoryUsage().rss / 1024 / 1024)
118
+
119
+ if (rssMB <= this.opt.rssLimitMB) {
120
+ void this.runNextQuery()
121
+ } else {
122
+ this.logger.log(`rssLimitMB reached ${rssMB} > ${this.opt.rssLimitMB}, pausing stream`)
93
123
  }
94
- } catch (err) {
95
- console.error('DatastoreStreamReadable error!\n', err)
96
- this.emit('error', err)
97
124
  }
98
125
  }
99
126
 
@@ -1,4 +1,4 @@
1
- import { Transform } from 'stream'
1
+ import { Transform } from 'node:stream'
2
2
  import type { Datastore, Key, Query } from '@google-cloud/datastore'
3
3
  import {
4
4
  BaseCommonDB,
@@ -130,7 +130,7 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
130
130
  if (this.cfg.timeout) {
131
131
  // First try
132
132
  try {
133
- const r = await pTimeout(this.ds().get(keys), {
133
+ const r = await pTimeout(() => this.ds().get(keys), {
134
134
  timeout: this.cfg.timeout,
135
135
  name: `datastore.getByIds(${table})`,
136
136
  })
@@ -144,7 +144,7 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
144
144
  this.cachedDatastore = new DS(this.cfg)
145
145
 
146
146
  // Second try (will throw)
147
- const r = await pTimeout(this.ds().get(keys), {
147
+ const r = await pTimeout(() => this.ds().get(keys), {
148
148
  timeout: this.cfg.timeout,
149
149
  name: `datastore.getByIds(${table}) second try`,
150
150
  errorData: {
@@ -216,7 +216,7 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
216
216
  ..._opt,
217
217
  }
218
218
 
219
- return (
219
+ const stream: ReadableTyped<ROW> = (
220
220
  opt.experimentalCursorStream
221
221
  ? new DatastoreStreamReadable(
222
222
  q,
@@ -224,14 +224,18 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
224
224
  commonLoggerMinLevel(this.cfg.logger, opt.debug ? 'log' : 'warn'),
225
225
  )
226
226
  : this.ds().runQueryStream(q)
227
- ).pipe(
228
- new Transform({
229
- objectMode: true,
230
- transform: (chunk, _enc, cb) => {
231
- cb(null, this.mapId(chunk))
232
- },
233
- }),
234
227
  )
228
+ .on('error', err => stream.emit('error', err))
229
+ .pipe(
230
+ new Transform({
231
+ objectMode: true,
232
+ transform: (chunk, _enc, cb) => {
233
+ cb(null, this.mapId(chunk))
234
+ },
235
+ }),
236
+ )
237
+
238
+ return stream
235
239
  }
236
240
 
237
241
  override streamQuery<ROW extends ObjectWithId>(
@@ -49,32 +49,47 @@ export class DatastoreKeyValueDB implements CommonKeyValueDB {
49
49
  .select(['id'])
50
50
  .limit(limit || 0)
51
51
 
52
- return this.db.streamQuery<KVObject>(q).pipe(
53
- transformMapSimple<ObjectWithId<string>, string>(objectWithId => objectWithId.id, {
54
- errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
55
- }),
56
- )
52
+ const stream: ReadableTyped<string> = this.db
53
+ .streamQuery<KVObject>(q)
54
+ .on('error', err => stream.emit('error', err))
55
+ .pipe(
56
+ transformMapSimple<ObjectWithId<string>, string>(objectWithId => objectWithId.id, {
57
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
58
+ }),
59
+ )
60
+
61
+ return stream
57
62
  }
58
63
 
59
64
  streamValues(table: string, limit?: number): ReadableTyped<Buffer> {
60
65
  // `select v` doesn't work for some reason
61
66
  const q = DBQuery.create(table).limit(limit || 0)
62
67
 
63
- return this.db.streamQuery<KVObject>(q).pipe(
64
- transformMapSimple<{ v: Buffer }, Buffer>(obj => obj.v, {
65
- errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
66
- }),
67
- )
68
+ const stream: ReadableTyped<string> = this.db
69
+ .streamQuery<KVObject>(q)
70
+ .on('error', err => stream.emit('error', err))
71
+ .pipe(
72
+ transformMapSimple<{ v: Buffer }, Buffer>(obj => obj.v, {
73
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
74
+ }),
75
+ )
76
+
77
+ return stream
68
78
  }
69
79
 
70
80
  streamEntries(table: string, limit?: number): ReadableTyped<KeyValueDBTuple> {
71
81
  const q = DBQuery.create(table).limit(limit || 0)
72
82
 
73
- return this.db.streamQuery<KVObject>(q).pipe(
74
- transformMapSimple<KVObject, KeyValueDBTuple>(obj => [obj.id, obj.v], {
75
- errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
76
- }),
77
- )
83
+ const stream: ReadableTyped<string> = this.db
84
+ .streamQuery<KVObject>(q)
85
+ .on('error', err => stream.emit('error', err))
86
+ .pipe(
87
+ transformMapSimple<KVObject, KeyValueDBTuple>(obj => [obj.id, obj.v], {
88
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
89
+ }),
90
+ )
91
+
92
+ return stream
78
93
  }
79
94
 
80
95
  async count(_table: string): Promise<number> {