@naturalcycles/datastore-lib 3.20.1 → 3.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DatastoreStreamReadable.js +4 -0
- package/dist/datastore.db.js +3 -2
- package/dist/datastore.model.d.ts +12 -1
- package/dist/query.util.js +2 -1
- package/package.json +7 -4
- package/src/DatastoreStreamReadable.ts +3 -0
- package/src/datastore.db.ts +6 -3
- package/src/datastore.model.ts +13 -1
- package/src/query.util.ts +2 -1
|
@@ -54,6 +54,10 @@ class DatastoreStreamReadable extends stream_1.Readable {
|
|
|
54
54
|
this.push(null);
|
|
55
55
|
this.done = true;
|
|
56
56
|
}
|
|
57
|
+
else if (this.opt.singleBatchBuffer) {
|
|
58
|
+
// here we don't start next query until we're asked (via next _read call)
|
|
59
|
+
// do, let's do nothing
|
|
60
|
+
}
|
|
57
61
|
else if (this.opt.rssLimitMB) {
|
|
58
62
|
const rssMB = Math.round(process.memoryUsage().rss / 1024 / 1024);
|
|
59
63
|
if (rssMB <= this.opt.rssLimitMB) {
|
package/dist/datastore.db.js
CHANGED
|
@@ -10,7 +10,7 @@ const DatastoreStreamReadable_1 = require("./DatastoreStreamReadable");
|
|
|
10
10
|
const query_util_1 = require("./query.util");
|
|
11
11
|
// Datastore (also Firestore and other Google APIs) supports max 500 of items when saving/deleting, etc.
|
|
12
12
|
const MAX_ITEMS = 500;
|
|
13
|
-
const RETRY_ON = ['GOAWAY', 'UNAVAILABLE', 'UNKNOWN'];
|
|
13
|
+
const RETRY_ON = ['GOAWAY', 'UNAVAILABLE', 'UNKNOWN', 'much contention'];
|
|
14
14
|
// Examples of errors:
|
|
15
15
|
// UNKNOWN: Stream removed
|
|
16
16
|
const methodMap = {
|
|
@@ -303,7 +303,8 @@ class DatastoreDB extends db_lib_1.BaseCommonDB {
|
|
|
303
303
|
stats
|
|
304
304
|
.filter(s => !s.property_name.includes('.') && s.property_name !== 'id') // filter out objectify's "virtual properties"
|
|
305
305
|
.forEach(stats => {
|
|
306
|
-
const {
|
|
306
|
+
const { property_type: dtype } = stats;
|
|
307
|
+
const name = stats.property_name;
|
|
307
308
|
if (dtype === datastore_model_1.DatastoreType.Blob) {
|
|
308
309
|
s.properties[name] = {
|
|
309
310
|
instanceof: 'Buffer',
|
|
@@ -60,7 +60,8 @@ export interface DatastoreDBStreamOptions extends DatastoreDBOptions {
|
|
|
60
60
|
*/
|
|
61
61
|
experimentalCursorStream?: boolean;
|
|
62
62
|
/**
|
|
63
|
-
* Applicable to `experimentalCursorStream
|
|
63
|
+
* Applicable to `experimentalCursorStream`.
|
|
64
|
+
* Defines the size (limit) of each individual query.
|
|
64
65
|
*
|
|
65
66
|
* @default 1000
|
|
66
67
|
*/
|
|
@@ -78,6 +79,16 @@ export interface DatastoreDBStreamOptions extends DatastoreDBOptions {
|
|
|
78
79
|
* @default 1000
|
|
79
80
|
*/
|
|
80
81
|
rssLimitMB?: number;
|
|
82
|
+
/**
|
|
83
|
+
* Applicable to `experimentalCursorStream`
|
|
84
|
+
* Default false.
|
|
85
|
+
* If true, stream will pause until consumer requests more data (via _read).
|
|
86
|
+
* It means it'll run slower, as buffer will be equal to batchSize (1000) at max.
|
|
87
|
+
* There will be gaps in time between "last query loaded" and "next query requested".
|
|
88
|
+
* This mode is useful e.g for DB migrations, where you want to avoid "stale data".
|
|
89
|
+
* So, it minimizes the time between "item loaded" and "item saved" during DB migration.
|
|
90
|
+
*/
|
|
91
|
+
singleBatchBuffer?: boolean;
|
|
81
92
|
/**
|
|
82
93
|
* Set to `true` to log additional debug info, when using experimentalCursorStream.
|
|
83
94
|
*
|
package/dist/query.util.js
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.dbQueryToDatastoreQuery = void 0;
|
|
4
|
-
// import Operator = google.datastore.v1.CompositeFilter.Operator
|
|
5
4
|
const FNAME_MAP = {
|
|
6
5
|
id: '__key__',
|
|
7
6
|
};
|
|
8
7
|
const OP_MAP = {
|
|
9
8
|
'==': '=',
|
|
9
|
+
in: 'IN',
|
|
10
|
+
'not-in': 'NOT_IN',
|
|
10
11
|
};
|
|
11
12
|
function dbQueryToDatastoreQuery(dbQuery, emptyQuery) {
|
|
12
13
|
let q = emptyQuery;
|
package/package.json
CHANGED
|
@@ -1,22 +1,25 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@naturalcycles/datastore-lib",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.23.0",
|
|
4
4
|
"description": "Opinionated library to work with Google Datastore",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"prepare": "husky install"
|
|
7
7
|
},
|
|
8
8
|
"dependencies": {
|
|
9
|
-
"@google-cloud/datastore": "^
|
|
9
|
+
"@google-cloud/datastore": "^7.0.0",
|
|
10
10
|
"@naturalcycles/db-lib": "^8.0.0",
|
|
11
11
|
"@naturalcycles/js-lib": "^14.0.0",
|
|
12
12
|
"@naturalcycles/nodejs-lib": "^12.0.0",
|
|
13
13
|
"grpc": "^1.24.2"
|
|
14
14
|
},
|
|
15
15
|
"devDependencies": {
|
|
16
|
-
"@naturalcycles/dev-lib": "^
|
|
17
|
-
"@types/node": "^
|
|
16
|
+
"@naturalcycles/dev-lib": "^13.0.0",
|
|
17
|
+
"@types/node": "^18.7.2",
|
|
18
18
|
"jest": "^28.1.0"
|
|
19
19
|
},
|
|
20
|
+
"resolutions": {
|
|
21
|
+
"long": "^4.0.0"
|
|
22
|
+
},
|
|
20
23
|
"files": [
|
|
21
24
|
"dist",
|
|
22
25
|
"src",
|
|
@@ -79,6 +79,9 @@ export class DatastoreStreamReadable<T = any> extends Readable implements Readab
|
|
|
79
79
|
)
|
|
80
80
|
this.push(null)
|
|
81
81
|
this.done = true
|
|
82
|
+
} else if (this.opt.singleBatchBuffer) {
|
|
83
|
+
// here we don't start next query until we're asked (via next _read call)
|
|
84
|
+
// do, let's do nothing
|
|
82
85
|
} else if (this.opt.rssLimitMB) {
|
|
83
86
|
const rssMB = Math.round(process.memoryUsage().rss / 1024 / 1024)
|
|
84
87
|
|
package/src/datastore.db.ts
CHANGED
|
@@ -45,7 +45,7 @@ import { dbQueryToDatastoreQuery } from './query.util'
|
|
|
45
45
|
// Datastore (also Firestore and other Google APIs) supports max 500 of items when saving/deleting, etc.
|
|
46
46
|
const MAX_ITEMS = 500
|
|
47
47
|
|
|
48
|
-
const RETRY_ON = ['GOAWAY', 'UNAVAILABLE', 'UNKNOWN']
|
|
48
|
+
const RETRY_ON = ['GOAWAY', 'UNAVAILABLE', 'UNKNOWN', 'much contention']
|
|
49
49
|
// Examples of errors:
|
|
50
50
|
// UNKNOWN: Stream removed
|
|
51
51
|
|
|
@@ -448,7 +448,8 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
|
|
|
448
448
|
stats
|
|
449
449
|
.filter(s => !s.property_name.includes('.') && s.property_name !== 'id') // filter out objectify's "virtual properties"
|
|
450
450
|
.forEach(stats => {
|
|
451
|
-
const {
|
|
451
|
+
const { property_type: dtype } = stats
|
|
452
|
+
const name = stats.property_name as keyof ROW
|
|
452
453
|
|
|
453
454
|
if (dtype === DatastoreType.Blob) {
|
|
454
455
|
s.properties[name] = {
|
|
@@ -488,7 +489,9 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
|
|
|
488
489
|
} as JsonSchemaNull
|
|
489
490
|
}
|
|
490
491
|
} else {
|
|
491
|
-
throw new Error(
|
|
492
|
+
throw new Error(
|
|
493
|
+
`Unknown Datastore Type '${stats.property_type}' for ${table}.${name as string}`,
|
|
494
|
+
)
|
|
492
495
|
}
|
|
493
496
|
})
|
|
494
497
|
|
package/src/datastore.model.ts
CHANGED
|
@@ -71,7 +71,8 @@ export interface DatastoreDBStreamOptions extends DatastoreDBOptions {
|
|
|
71
71
|
experimentalCursorStream?: boolean
|
|
72
72
|
|
|
73
73
|
/**
|
|
74
|
-
* Applicable to `experimentalCursorStream
|
|
74
|
+
* Applicable to `experimentalCursorStream`.
|
|
75
|
+
* Defines the size (limit) of each individual query.
|
|
75
76
|
*
|
|
76
77
|
* @default 1000
|
|
77
78
|
*/
|
|
@@ -91,6 +92,17 @@ export interface DatastoreDBStreamOptions extends DatastoreDBOptions {
|
|
|
91
92
|
*/
|
|
92
93
|
rssLimitMB?: number
|
|
93
94
|
|
|
95
|
+
/**
|
|
96
|
+
* Applicable to `experimentalCursorStream`
|
|
97
|
+
* Default false.
|
|
98
|
+
* If true, stream will pause until consumer requests more data (via _read).
|
|
99
|
+
* It means it'll run slower, as buffer will be equal to batchSize (1000) at max.
|
|
100
|
+
* There will be gaps in time between "last query loaded" and "next query requested".
|
|
101
|
+
* This mode is useful e.g for DB migrations, where you want to avoid "stale data".
|
|
102
|
+
* So, it minimizes the time between "item loaded" and "item saved" during DB migration.
|
|
103
|
+
*/
|
|
104
|
+
singleBatchBuffer?: boolean
|
|
105
|
+
|
|
94
106
|
/**
|
|
95
107
|
* Set to `true` to log additional debug info, when using experimentalCursorStream.
|
|
96
108
|
*
|
package/src/query.util.ts
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { Query } from '@google-cloud/datastore'
|
|
2
2
|
import { DBQuery, DBQueryFilterOperator } from '@naturalcycles/db-lib'
|
|
3
3
|
import { ObjectWithId, StringMap } from '@naturalcycles/js-lib'
|
|
4
|
-
// import Operator = google.datastore.v1.CompositeFilter.Operator
|
|
5
4
|
|
|
6
5
|
const FNAME_MAP: StringMap = {
|
|
7
6
|
id: '__key__',
|
|
@@ -9,6 +8,8 @@ const FNAME_MAP: StringMap = {
|
|
|
9
8
|
|
|
10
9
|
const OP_MAP: Partial<Record<DBQueryFilterOperator, string>> = {
|
|
11
10
|
'==': '=',
|
|
11
|
+
in: 'IN',
|
|
12
|
+
'not-in': 'NOT_IN',
|
|
12
13
|
}
|
|
13
14
|
|
|
14
15
|
export function dbQueryToDatastoreQuery<ROW extends ObjectWithId>(
|