cry-db 2.1.28 → 2.1.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/base.d.ts +2 -1
- package/dist/base.d.ts.map +1 -0
- package/dist/base.js +34 -34
- package/dist/base.js.map +1 -1
- package/dist/db.d.ts +2 -5
- package/dist/db.d.ts.map +1 -0
- package/dist/db.js +20 -24
- package/dist/db.js.map +1 -1
- package/dist/index.d.ts +6 -5
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +39 -6
- package/dist/index.js.map +1 -1
- package/dist/mongo.d.ts +4 -5
- package/dist/mongo.d.ts.map +1 -0
- package/dist/mongo.js +207 -207
- package/dist/mongo.js.map +1 -1
- package/dist/repo.d.ts +4 -3
- package/dist/repo.d.ts.map +1 -0
- package/dist/repo.js +16 -11
- package/dist/repo.js.map +1 -1
- package/dist/types.d.ts +1 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +4 -1
- package/dist/types.js.map +1 -1
- package/package.json +1 -1
package/dist/mongo.js
CHANGED
|
@@ -1,32 +1,32 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.DummyExportToFixTsCompilation = void 0;
|
|
7
|
+
const bcrypt_1 = __importDefault(require("bcrypt"));
|
|
8
|
+
const mongodb_1 = require("mongodb");
|
|
9
|
+
const tiny_typed_emitter_1 = require("tiny-typed-emitter");
|
|
10
|
+
const db_js_1 = require("./db.js");
|
|
11
|
+
const types_js_1 = require("./types.js");
|
|
12
|
+
const lodash_clonedeep_1 = __importDefault(require("lodash.clonedeep"));
|
|
13
|
+
const base_js_1 = require("./base.js");
|
|
14
|
+
const assert = (cond, msg) => {
|
|
15
|
+
if (!cond) {
|
|
16
|
+
console.log("assert failed", cond || msg);
|
|
17
|
+
throw new Error(msg || cond);
|
|
18
|
+
}
|
|
19
|
+
};
|
|
9
20
|
const saltRounds = 10;
|
|
10
21
|
const TRANSACTION_OPTIONS = {
|
|
11
22
|
defaultTransactionOptions: {
|
|
12
|
-
readPreference: new ReadPreference("primary"),
|
|
13
|
-
readConcern: new ReadConcern("local"),
|
|
14
|
-
writeConcern: new WriteConcern("majority")
|
|
23
|
+
readPreference: new mongodb_1.ReadPreference("primary"),
|
|
24
|
+
readConcern: new mongodb_1.ReadConcern("local"),
|
|
25
|
+
writeConcern: new mongodb_1.WriteConcern("majority")
|
|
15
26
|
}
|
|
16
27
|
};
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
revisions;
|
|
20
|
-
softdelete;
|
|
21
|
-
session;
|
|
22
|
-
emittingPublishEvents;
|
|
23
|
-
auditing;
|
|
24
|
-
auditCollectionName;
|
|
25
|
-
auditedCollections;
|
|
26
|
-
emitter;
|
|
27
|
-
user;
|
|
28
|
-
audit;
|
|
29
|
-
syncSupport;
|
|
28
|
+
exports.DummyExportToFixTsCompilation = true;
|
|
29
|
+
class Mongo extends db_js_1.Db {
|
|
30
30
|
constructor(db, url) {
|
|
31
31
|
super(db, url);
|
|
32
32
|
this.revisions = false;
|
|
@@ -37,30 +37,21 @@ export default class Mongo extends Db {
|
|
|
37
37
|
this.auditing = false;
|
|
38
38
|
this.auditCollectionName = "dblog";
|
|
39
39
|
this.auditedCollections = this.auditCollections(process.env.AUDIT_COLLECTIONS || []);
|
|
40
|
-
this.emitter = new TypedEmitter();
|
|
40
|
+
this.emitter = new tiny_typed_emitter_1.TypedEmitter();
|
|
41
41
|
this.user = undefined;
|
|
42
42
|
this.audit = undefined;
|
|
43
|
-
log.debug('new Mongo:', this.url, this.db);
|
|
44
|
-
}
|
|
45
|
-
static newid() {
|
|
46
|
-
return Db.newid();
|
|
47
|
-
}
|
|
48
|
-
static toId(id) {
|
|
49
|
-
return Db.toId(id);
|
|
50
|
-
}
|
|
51
|
-
static objectid(o) {
|
|
52
|
-
return Db.objectid(o);
|
|
43
|
+
db_js_1.log.debug('new Mongo:', this.url, this.db);
|
|
53
44
|
}
|
|
54
45
|
on(evt, listener) {
|
|
55
|
-
log.debug("on", evt, listener);
|
|
46
|
+
db_js_1.log.debug("on", evt, listener);
|
|
56
47
|
this.emitter.on(evt, listener);
|
|
57
48
|
}
|
|
58
49
|
off(evt, listener) {
|
|
59
|
-
log.debug("off", evt, listener);
|
|
50
|
+
db_js_1.log.debug("off", evt, listener);
|
|
60
51
|
this.emitter.off(evt, listener);
|
|
61
52
|
}
|
|
62
53
|
once(evt, listener) {
|
|
63
|
-
log.debug("off", evt, listener);
|
|
54
|
+
db_js_1.log.debug("off", evt, listener);
|
|
64
55
|
this.emitter.off(evt, listener);
|
|
65
56
|
}
|
|
66
57
|
setUser(username) {
|
|
@@ -130,19 +121,19 @@ export default class Mongo extends Db {
|
|
|
130
121
|
return this.emittingPublishEvents;
|
|
131
122
|
}
|
|
132
123
|
async distinct(collection, field) {
|
|
133
|
-
log.debug('distinct called', collection, field);
|
|
124
|
+
db_js_1.log.debug('distinct called', collection, field);
|
|
134
125
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
135
126
|
return await conn.distinct(field);
|
|
136
127
|
}, false, { operation: "distinct", collection, field });
|
|
137
|
-
log.debug('distinct returns', ret);
|
|
128
|
+
db_js_1.log.debug('distinct returns', ret);
|
|
138
129
|
return ret;
|
|
139
130
|
}
|
|
140
131
|
async count(collection, query = {}, opts = {}) {
|
|
141
|
-
log.debug('distinct called', collection, query, opts);
|
|
132
|
+
db_js_1.log.debug('distinct called', collection, query, opts);
|
|
142
133
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
143
134
|
return await conn.countDocuments(query, opts);
|
|
144
135
|
}, false, { operation: "count", collection, query, opts });
|
|
145
|
-
log.debug('count returns', ret);
|
|
136
|
+
db_js_1.log.debug('count returns', ret);
|
|
146
137
|
return ret;
|
|
147
138
|
}
|
|
148
139
|
async find(collection, query = {}, opts = {}) {
|
|
@@ -152,7 +143,7 @@ export default class Mongo extends Db {
|
|
|
152
143
|
if (!query._deleted)
|
|
153
144
|
query._deleted = { $exists: false };
|
|
154
145
|
}
|
|
155
|
-
log.debug('find called', collection, query, opts);
|
|
146
|
+
db_js_1.log.debug('find called', collection, query, opts);
|
|
156
147
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
157
148
|
let optsIn = {};
|
|
158
149
|
if (opts.readPreference)
|
|
@@ -173,7 +164,7 @@ export default class Mongo extends Db {
|
|
|
173
164
|
let res = await r.toArray();
|
|
174
165
|
return this._processReturnedObject(res);
|
|
175
166
|
}, false, { operation: "find", collection, query, opts });
|
|
176
|
-
log.debug('find returns', ret);
|
|
167
|
+
db_js_1.log.debug('find returns', ret);
|
|
177
168
|
return ret;
|
|
178
169
|
}
|
|
179
170
|
async findAll(collection, query = {}, opts = {}) {
|
|
@@ -198,12 +189,12 @@ export default class Mongo extends Db {
|
|
|
198
189
|
r = r.collation(opts.collation);
|
|
199
190
|
return this._processReturnedObject(await r.toArray());
|
|
200
191
|
}, false, { operation: "findAll", collection, query, opts });
|
|
201
|
-
log.debug('findAll returns', ret);
|
|
192
|
+
db_js_1.log.debug('findAll returns', ret);
|
|
202
193
|
return ret;
|
|
203
194
|
}
|
|
204
195
|
async findNewer(collection, timestamp, query = {}, opts = {}) {
|
|
205
196
|
query = this._createQueryForNewer(timestamp, query);
|
|
206
|
-
log.debug('findNewer called', collection, timestamp, query, opts);
|
|
197
|
+
db_js_1.log.debug('findNewer called', collection, timestamp, query, opts);
|
|
207
198
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
208
199
|
let optsIn = {};
|
|
209
200
|
if (opts.readPreference)
|
|
@@ -225,11 +216,11 @@ export default class Mongo extends Db {
|
|
|
225
216
|
r = r.collation(opts.collation);
|
|
226
217
|
return this._processReturnedObject(await r.toArray());
|
|
227
218
|
}, false, { operation: "findNewer", collection, timestamp, query, opts });
|
|
228
|
-
log.debug('findNewer returns', ret);
|
|
219
|
+
db_js_1.log.debug('findNewer returns', ret);
|
|
229
220
|
return ret;
|
|
230
221
|
}
|
|
231
222
|
async findNewerMany(spec = []) {
|
|
232
|
-
log.debug('findNewerMany called', spec);
|
|
223
|
+
db_js_1.log.debug('findNewerMany called', spec);
|
|
233
224
|
let conn = await this.connect();
|
|
234
225
|
const getOneColl = async (coll) => {
|
|
235
226
|
let query = this._createQueryForNewer(coll.timestamp, coll.query);
|
|
@@ -262,7 +253,7 @@ export default class Mongo extends Db {
|
|
|
262
253
|
_createQueryForNewer(timestamp, query) {
|
|
263
254
|
let ts = (timestamp === 1 || timestamp === "1" || timestamp === "0" || timestamp === 0)
|
|
264
255
|
? {}
|
|
265
|
-
: { _ts: { $gt: Base.timestamp(timestamp) } };
|
|
256
|
+
: { _ts: { $gt: base_js_1.Base.timestamp(timestamp) } };
|
|
266
257
|
query = {
|
|
267
258
|
...ts,
|
|
268
259
|
...(query || {}),
|
|
@@ -272,7 +263,7 @@ export default class Mongo extends Db {
|
|
|
272
263
|
}
|
|
273
264
|
async findAfter(collection, csq, query = {}, opts = {}) {
|
|
274
265
|
query._csq = { $gt: csq };
|
|
275
|
-
log.debug('findAfter called', collection, csq, query, opts);
|
|
266
|
+
db_js_1.log.debug('findAfter called', collection, csq, query, opts);
|
|
276
267
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
277
268
|
let optsIn = {};
|
|
278
269
|
if (opts.readPreference)
|
|
@@ -294,11 +285,11 @@ export default class Mongo extends Db {
|
|
|
294
285
|
r = r.collation(opts.collation);
|
|
295
286
|
return this._processReturnedObject(await r.toArray());
|
|
296
287
|
}, false, { operation: "findNewer", collection, csq, query, opts });
|
|
297
|
-
log.debug('findNewer returns', ret);
|
|
288
|
+
db_js_1.log.debug('findNewer returns', ret);
|
|
298
289
|
return ret;
|
|
299
290
|
}
|
|
300
291
|
async findAfterMany(spec = []) {
|
|
301
|
-
log.debug('findAfterMany called', spec);
|
|
292
|
+
db_js_1.log.debug('findAfterMany called', spec);
|
|
302
293
|
let conn = await this.connect();
|
|
303
294
|
const getOneColl = async (coll) => {
|
|
304
295
|
let r = conn
|
|
@@ -330,10 +321,10 @@ export default class Mongo extends Db {
|
|
|
330
321
|
return out;
|
|
331
322
|
}
|
|
332
323
|
async findNewerFromDate(collection, date, query = {}, opts = {}) {
|
|
333
|
-
let ts = new Timestamp(0, new Date(date).valueOf() / 1000);
|
|
334
|
-
log.debug('findNewerFromDate called', collection, date, query, opts);
|
|
324
|
+
let ts = new mongodb_1.Timestamp(0, new Date(date).valueOf() / 1000);
|
|
325
|
+
db_js_1.log.debug('findNewerFromDate called', collection, date, query, opts);
|
|
335
326
|
let ret = await Mongo.prototype.findNewer.call(this, collection, ts, query, opts); // prevent calling Repo.findNewer
|
|
336
|
-
log.debug('findNewerFromDate returns', ret);
|
|
327
|
+
db_js_1.log.debug('findNewerFromDate returns', ret);
|
|
337
328
|
return ret;
|
|
338
329
|
}
|
|
339
330
|
async findOne(collection, query, projection) {
|
|
@@ -343,9 +334,9 @@ export default class Mongo extends Db {
|
|
|
343
334
|
if (!query._deleted)
|
|
344
335
|
query._deleted = { $exists: false };
|
|
345
336
|
// if (!query._blocked) query._blocked = { $exists: false }; // intentionally - blocked records are returned
|
|
346
|
-
log.debug('findOne called', collection, query, projection);
|
|
347
|
-
let ret = await this.executeTransactionally(collection, async (conn) => await conn.findOne(query, { projection
|
|
348
|
-
log.debug('findOne returns', ret);
|
|
337
|
+
db_js_1.log.debug('findOne called', collection, query, projection);
|
|
338
|
+
let ret = await this.executeTransactionally(collection, async (conn) => await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() }), false, { operation: "findOne", collection, query, projection });
|
|
339
|
+
db_js_1.log.debug('findOne returns', ret);
|
|
349
340
|
return this._processReturnedObject(ret);
|
|
350
341
|
}
|
|
351
342
|
async findById(collection, id, projection) {
|
|
@@ -357,15 +348,15 @@ export default class Mongo extends Db {
|
|
|
357
348
|
_id: Mongo._toId(id),
|
|
358
349
|
// _deleted: { $exists: false }
|
|
359
350
|
};
|
|
360
|
-
log.debug('findById called', this.db, collection, id, projection);
|
|
361
|
-
log.trace('findById executing with query', collection, query, projection);
|
|
351
|
+
db_js_1.log.debug('findById called', this.db, collection, id, projection);
|
|
352
|
+
db_js_1.log.trace('findById executing with query', collection, query, projection);
|
|
362
353
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
363
|
-
let r = await conn.findOne(query, { projection
|
|
354
|
+
let r = await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() });
|
|
364
355
|
return r;
|
|
365
356
|
}, false, { operation: "findById", collection, id, projection });
|
|
366
|
-
if (ret
|
|
357
|
+
if (ret === null || ret === void 0 ? void 0 : ret._deleted)
|
|
367
358
|
ret = null;
|
|
368
|
-
log.debug('findById returns', ret);
|
|
359
|
+
db_js_1.log.debug('findById returns', ret);
|
|
369
360
|
return this._processReturnedObject(ret);
|
|
370
361
|
}
|
|
371
362
|
async updateOne(collection, query, update, options = { returnFullObject: false }) {
|
|
@@ -377,10 +368,10 @@ export default class Mongo extends Db {
|
|
|
377
368
|
let opts = {
|
|
378
369
|
upsert: false,
|
|
379
370
|
returnDocument: "after",
|
|
380
|
-
|
|
371
|
+
...this._sessionOpt()
|
|
381
372
|
};
|
|
382
373
|
update = await this._processUpdateObject(update);
|
|
383
|
-
log.debug('updateOne called', collection, query, update);
|
|
374
|
+
db_js_1.log.debug('updateOne called', collection, query, update);
|
|
384
375
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
385
376
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
386
377
|
update.$set = update.$set || {};
|
|
@@ -391,11 +382,11 @@ export default class Mongo extends Db {
|
|
|
391
382
|
let res = await conn.findOneAndUpdate(query, update, opts);
|
|
392
383
|
if (!res.value)
|
|
393
384
|
return null;
|
|
394
|
-
let resObj = this._removeUnchanged(res.value, update, !!options
|
|
385
|
+
let resObj = this._removeUnchanged(res.value, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
395
386
|
await this._publishAndAudit('update', this.db, collection, resObj);
|
|
396
387
|
return resObj;
|
|
397
388
|
}, !!seqKeys, { operation: "updateOne", collection, query, update, options });
|
|
398
|
-
log.debug('updateOne returns', obj);
|
|
389
|
+
db_js_1.log.debug('updateOne returns', obj);
|
|
399
390
|
return this._processReturnedObject(await obj);
|
|
400
391
|
}
|
|
401
392
|
async save(collection, update, id = undefined, options = { returnFullObject: false }) {
|
|
@@ -405,11 +396,11 @@ export default class Mongo extends Db {
|
|
|
405
396
|
let opts = {
|
|
406
397
|
upsert: true,
|
|
407
398
|
returnDocument: "after",
|
|
408
|
-
|
|
399
|
+
...this._sessionOpt()
|
|
409
400
|
};
|
|
410
401
|
let _id = Mongo.toId(id || update._id) || Mongo.newid();
|
|
411
402
|
update = await this._processUpdateObject(update);
|
|
412
|
-
log.debug('save called', collection, id, update);
|
|
403
|
+
db_js_1.log.debug('save called', collection, id, update);
|
|
413
404
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
414
405
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
415
406
|
update.$set = update.$set || {};
|
|
@@ -420,11 +411,11 @@ export default class Mongo extends Db {
|
|
|
420
411
|
let res = await conn.findOneAndUpdate({ _id }, update, opts);
|
|
421
412
|
if (!res.value)
|
|
422
413
|
return null;
|
|
423
|
-
let resObj = this._removeUnchanged(res.value, update, !!options
|
|
414
|
+
let resObj = this._removeUnchanged(res.value, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
424
415
|
await this._publishAndAudit('update', this.db, collection, resObj);
|
|
425
416
|
return resObj;
|
|
426
417
|
}, !!seqKeys, { operation: "save", collection, _id, update, options });
|
|
427
|
-
log.debug('save returns', obj);
|
|
418
|
+
db_js_1.log.debug('save returns', obj);
|
|
428
419
|
return this._processReturnedObject(await obj);
|
|
429
420
|
}
|
|
430
421
|
async update(collection, query, update) {
|
|
@@ -432,7 +423,7 @@ export default class Mongo extends Db {
|
|
|
432
423
|
assert(query);
|
|
433
424
|
assert(update);
|
|
434
425
|
if (this.syncSupport)
|
|
435
|
-
log.warn("update does not increase _csq, avoit it.");
|
|
426
|
+
db_js_1.log.warn("update does not increase _csq, avoit it.");
|
|
436
427
|
if (!Object.keys(update).length)
|
|
437
428
|
return { n: 0, ok: false };
|
|
438
429
|
query = this.replaceIds(query);
|
|
@@ -440,10 +431,10 @@ export default class Mongo extends Db {
|
|
|
440
431
|
let opts = {
|
|
441
432
|
upsert: false,
|
|
442
433
|
returnDocument: "after",
|
|
443
|
-
|
|
434
|
+
...this._sessionOpt()
|
|
444
435
|
};
|
|
445
436
|
update = await this._processUpdateObject(update);
|
|
446
|
-
log.debug('update called', collection, query, update);
|
|
437
|
+
db_js_1.log.debug('update called', collection, query, update);
|
|
447
438
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
448
439
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
449
440
|
update.$set = update.$set || {};
|
|
@@ -451,7 +442,7 @@ export default class Mongo extends Db {
|
|
|
451
442
|
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
452
443
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
453
444
|
delete update.$set;
|
|
454
|
-
log.debug('update called', collection, query, update);
|
|
445
|
+
db_js_1.log.debug('update called', collection, query, update);
|
|
455
446
|
let res = await conn.updateMany(query, update, opts);
|
|
456
447
|
let resObj = {
|
|
457
448
|
n: res.modifiedCount,
|
|
@@ -460,7 +451,7 @@ export default class Mongo extends Db {
|
|
|
460
451
|
await this._publishAndAudit('updateMany', this.db, collection, resObj);
|
|
461
452
|
return resObj;
|
|
462
453
|
}, !!seqKeys, { operation: "update", collection, query, update });
|
|
463
|
-
log.debug('update returns', obj);
|
|
454
|
+
db_js_1.log.debug('update returns', obj);
|
|
464
455
|
return await obj;
|
|
465
456
|
}
|
|
466
457
|
async upsert(collection, query, update, options = { returnFullObject: false }) {
|
|
@@ -475,15 +466,16 @@ export default class Mongo extends Db {
|
|
|
475
466
|
...options,
|
|
476
467
|
upsert: true,
|
|
477
468
|
returnDocument: "after",
|
|
478
|
-
|
|
469
|
+
...this._sessionOpt()
|
|
479
470
|
};
|
|
480
|
-
log.debug('upsert called', collection, query, update);
|
|
471
|
+
db_js_1.log.debug('upsert called', collection, query, update);
|
|
481
472
|
update = await this._processUpdateObject(update);
|
|
482
473
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
483
|
-
log.debug('upsert processed', collection, query, update);
|
|
474
|
+
db_js_1.log.debug('upsert processed', collection, query, update);
|
|
484
475
|
if (Object.keys(query).length === 0)
|
|
485
476
|
query._id = Mongo.newid();
|
|
486
477
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
478
|
+
var _a;
|
|
487
479
|
update.$set = update.$set || {};
|
|
488
480
|
if (seqKeys)
|
|
489
481
|
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
@@ -491,49 +483,51 @@ export default class Mongo extends Db {
|
|
|
491
483
|
delete update.$set;
|
|
492
484
|
let ret = await conn.findOneAndUpdate(query, update, opts);
|
|
493
485
|
if (ret.value) {
|
|
494
|
-
let oper = ret.lastErrorObject
|
|
495
|
-
let retObj = oper === "insert" ? ret.value : this._removeUnchanged(ret.value, update, !!options
|
|
486
|
+
let oper = ((_a = ret.lastErrorObject) === null || _a === void 0 ? void 0 : _a.updatedExisting) ? "update" : "insert";
|
|
487
|
+
let retObj = oper === "insert" ? ret.value : this._removeUnchanged(ret.value, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
496
488
|
await this._publishAndAudit(oper, this.db, collection, retObj);
|
|
497
489
|
return retObj;
|
|
498
490
|
}
|
|
499
491
|
;
|
|
500
492
|
return ret;
|
|
501
493
|
}, !!seqKeys, { operation: "upsert", query, update, options });
|
|
502
|
-
log.debug('upsert returns', ret);
|
|
494
|
+
db_js_1.log.debug('upsert returns', ret);
|
|
503
495
|
return this._processReturnedObject(await ret);
|
|
504
496
|
}
|
|
505
497
|
async insert(collection, insert) {
|
|
506
498
|
assert(collection, "collection can't be null");
|
|
507
499
|
assert(insert, "insert can't be null");
|
|
508
500
|
assert(typeof insert === "object", "insert must be an object");
|
|
509
|
-
log.debug('insert called', collection, insert);
|
|
501
|
+
db_js_1.log.debug('insert called', collection, insert);
|
|
510
502
|
insert = this.replaceIds(insert);
|
|
511
503
|
if (this.revisions) {
|
|
512
504
|
insert._rev = 1;
|
|
513
|
-
insert._ts = Base.timestamp();
|
|
505
|
+
insert._ts = base_js_1.Base.timestamp();
|
|
514
506
|
}
|
|
515
507
|
await this._processHashedKeys(insert);
|
|
516
508
|
let seqKeys = this._findSequenceKeys(insert);
|
|
517
509
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
518
510
|
if (insert)
|
|
519
511
|
insert = await this._processSequenceField(client, collection, insert, seqKeys);
|
|
520
|
-
let obj = await conn.insertOne(insert,
|
|
512
|
+
let obj = await conn.insertOne(insert, this._sessionOpt());
|
|
521
513
|
let fullObj = { _id: obj.insertedId, ...insert };
|
|
522
514
|
await this._publishAndAudit('insert', this.db, collection, fullObj);
|
|
523
515
|
return fullObj;
|
|
524
516
|
}, !!seqKeys, { operation: "insert", collection, insert });
|
|
525
|
-
log.debug('insert returns', ret);
|
|
517
|
+
db_js_1.log.debug('insert returns', ret);
|
|
526
518
|
return this._processReturnedObject(await ret);
|
|
527
519
|
}
|
|
528
520
|
async upsertBatch(collection, batch) {
|
|
521
|
+
var _a;
|
|
529
522
|
assert(collection, "collection can't be null");
|
|
530
523
|
assert(batch, "batch can't be null");
|
|
531
524
|
assert(batch instanceof Array, "batch must be an Array");
|
|
532
|
-
log.debug('upsertBatch called', collection, batch);
|
|
525
|
+
db_js_1.log.debug('upsertBatch called', collection, batch);
|
|
533
526
|
batch = this.replaceIds(batch);
|
|
534
527
|
for (let i = 0; i < batch.length; i++)
|
|
535
|
-
await this._processHashedKeys(batch[i].update);
|
|
528
|
+
await this._processHashedKeys((_a = batch[i]) === null || _a === void 0 ? void 0 : _a.update);
|
|
536
529
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
530
|
+
var _a, _b;
|
|
537
531
|
await this._processSequenceFieldForMany(client, collection, batch.map(b => b.update));
|
|
538
532
|
let publications = [];
|
|
539
533
|
let changes = [];
|
|
@@ -543,13 +537,13 @@ export default class Mongo extends Db {
|
|
|
543
537
|
...(opts || {}),
|
|
544
538
|
upsert: true,
|
|
545
539
|
returnDocument: "after",
|
|
546
|
-
|
|
540
|
+
...this._sessionOpt()
|
|
547
541
|
};
|
|
548
542
|
update = await this._processUpdateObject(update);
|
|
549
543
|
let ret = (await conn.findOneAndUpdate(query, update, options));
|
|
550
|
-
if (ret
|
|
551
|
-
let oper = ret.lastErrorObject
|
|
552
|
-
let retObj = oper === "insert" ? ret.value : this._removeUnchanged(ret.value, update, !!opts
|
|
544
|
+
if ((_a = ret === null || ret === void 0 ? void 0 : ret.value) === null || _a === void 0 ? void 0 : _a._id) {
|
|
545
|
+
let oper = ((_b = ret.lastErrorObject) === null || _b === void 0 ? void 0 : _b.updatedExisting) ? "update" : "insert";
|
|
546
|
+
let retObj = oper === "insert" ? ret.value : this._removeUnchanged(ret.value, update, !!(opts === null || opts === void 0 ? void 0 : opts.returnFullObject));
|
|
553
547
|
publications.push(await this._publishAndAudit(oper, this.db, collection, retObj, true));
|
|
554
548
|
changes.push(retObj);
|
|
555
549
|
}
|
|
@@ -562,28 +556,28 @@ export default class Mongo extends Db {
|
|
|
562
556
|
operation: "batch",
|
|
563
557
|
db: this.db,
|
|
564
558
|
collection,
|
|
565
|
-
data: publications.map(p => p
|
|
559
|
+
data: publications.map(p => p === null || p === void 0 ? void 0 : p.payload).filter(p => !!p)
|
|
566
560
|
}
|
|
567
561
|
});
|
|
568
562
|
}
|
|
569
563
|
return changes;
|
|
570
564
|
}, false, { operation: "upsertBatch", collection, batch });
|
|
571
|
-
log.debug('upsertBatch returns', ret);
|
|
565
|
+
db_js_1.log.debug('upsertBatch returns', ret);
|
|
572
566
|
return ret;
|
|
573
567
|
}
|
|
574
568
|
async insertMany(collection, insert) {
|
|
575
569
|
assert(collection, "collection can't be null");
|
|
576
570
|
assert(insert, "insert can't be null");
|
|
577
571
|
assert(insert instanceof Array, "insert must be an Array");
|
|
578
|
-
log.debug('insertMany called', collection, insert);
|
|
572
|
+
db_js_1.log.debug('insertMany called', collection, insert);
|
|
579
573
|
insert = this.replaceIds(insert);
|
|
580
574
|
for (let i = 0; i < insert.length; i++)
|
|
581
575
|
await this._processHashedKeys(insert[i]);
|
|
582
576
|
if (this.revisions)
|
|
583
|
-
insert.forEach(ins => { ins._rev = 1; ins._ts = Base.timestamp(); });
|
|
577
|
+
insert.forEach(ins => { ins._rev = 1; ins._ts = base_js_1.Base.timestamp(); });
|
|
584
578
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
585
579
|
await this._processSequenceFieldForMany(client, collection, insert);
|
|
586
|
-
let obj = await conn.insertMany(insert,
|
|
580
|
+
let obj = await conn.insertMany(insert, this._sessionOpt());
|
|
587
581
|
let ret = [];
|
|
588
582
|
for (let ns of Object.keys(obj.insertedIds)) {
|
|
589
583
|
let n = Number(ns);
|
|
@@ -600,7 +594,7 @@ export default class Mongo extends Db {
|
|
|
600
594
|
}
|
|
601
595
|
return ret;
|
|
602
596
|
}, false, { operation: "insertMany", collection, insert });
|
|
603
|
-
log.debug('insertMany returns', ret);
|
|
597
|
+
db_js_1.log.debug('insertMany returns', ret);
|
|
604
598
|
return ret;
|
|
605
599
|
}
|
|
606
600
|
async deleteOne(collection, query) {
|
|
@@ -610,25 +604,25 @@ export default class Mongo extends Db {
|
|
|
610
604
|
if (!this.softdelete) {
|
|
611
605
|
let opts = {
|
|
612
606
|
returnDocument: "after",
|
|
613
|
-
|
|
607
|
+
...this._sessionOpt()
|
|
614
608
|
};
|
|
615
|
-
log.debug('deleteOne called', collection, query);
|
|
609
|
+
db_js_1.log.debug('deleteOne called', collection, query);
|
|
616
610
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
617
611
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
618
612
|
if (obj.value)
|
|
619
613
|
await this._publishAndAudit('delete', this.db, collection, obj.value);
|
|
620
614
|
return obj.value;
|
|
621
615
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
622
|
-
log.debug('deleteOne returns', ret);
|
|
616
|
+
db_js_1.log.debug('deleteOne returns', ret);
|
|
623
617
|
return ret;
|
|
624
618
|
}
|
|
625
619
|
else {
|
|
626
620
|
let opts = {
|
|
627
621
|
upsert: true,
|
|
628
622
|
returnDocument: "after",
|
|
629
|
-
|
|
623
|
+
...this._sessionOpt()
|
|
630
624
|
};
|
|
631
|
-
log.debug('deleteOne called', collection, query);
|
|
625
|
+
db_js_1.log.debug('deleteOne called', collection, query);
|
|
632
626
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
633
627
|
let del = {
|
|
634
628
|
$set: { _deleted: new Date() },
|
|
@@ -642,7 +636,7 @@ export default class Mongo extends Db {
|
|
|
642
636
|
await this._publishAndAudit('delete', this.db, collection, obj.value);
|
|
643
637
|
return obj.value;
|
|
644
638
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
645
|
-
log.debug('deleteOne returns', ret);
|
|
639
|
+
db_js_1.log.debug('deleteOne returns', ret);
|
|
646
640
|
return ret;
|
|
647
641
|
}
|
|
648
642
|
}
|
|
@@ -650,10 +644,10 @@ export default class Mongo extends Db {
|
|
|
650
644
|
let opts = {
|
|
651
645
|
upsert: false,
|
|
652
646
|
returnDocument: "after",
|
|
653
|
-
|
|
647
|
+
...this._sessionOpt()
|
|
654
648
|
};
|
|
655
649
|
query = this.replaceIds(query);
|
|
656
|
-
log.debug('blockOne called', collection, query);
|
|
650
|
+
db_js_1.log.debug('blockOne called', collection, query);
|
|
657
651
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
658
652
|
query._blocked = { $exists: 0 };
|
|
659
653
|
let update = {
|
|
@@ -675,17 +669,17 @@ export default class Mongo extends Db {
|
|
|
675
669
|
await this._publishAndAudit('block', this.db, collection, retObj);
|
|
676
670
|
return retObj;
|
|
677
671
|
}, false, { operation: "blockOne", collection, query });
|
|
678
|
-
log.debug('blockOne returns', ret);
|
|
672
|
+
db_js_1.log.debug('blockOne returns', ret);
|
|
679
673
|
return ret;
|
|
680
674
|
}
|
|
681
675
|
async unblockOne(collection, query) {
|
|
682
676
|
let opts = {
|
|
683
677
|
upsert: false,
|
|
684
678
|
returnDocument: "after",
|
|
685
|
-
|
|
679
|
+
...this._sessionOpt()
|
|
686
680
|
};
|
|
687
681
|
query = this.replaceIds(query);
|
|
688
|
-
log.debug('unblockOne called', collection, query);
|
|
682
|
+
db_js_1.log.debug('unblockOne called', collection, query);
|
|
689
683
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
690
684
|
query._blocked = { $exists: 1 };
|
|
691
685
|
let update = {
|
|
@@ -706,7 +700,7 @@ export default class Mongo extends Db {
|
|
|
706
700
|
await this._publishAndAudit('unblock', this.db, collection, retObj);
|
|
707
701
|
return retObj;
|
|
708
702
|
}, false, { operation: "unblockOne", collection, query });
|
|
709
|
-
log.debug('unblockOne returns', ret);
|
|
703
|
+
db_js_1.log.debug('unblockOne returns', ret);
|
|
710
704
|
return ret;
|
|
711
705
|
}
|
|
712
706
|
async hardDeleteOne(collection, query) {
|
|
@@ -715,9 +709,9 @@ export default class Mongo extends Db {
|
|
|
715
709
|
query = this.replaceIds(query);
|
|
716
710
|
let opts = {
|
|
717
711
|
returnDocument: "after",
|
|
718
|
-
|
|
712
|
+
...this._sessionOpt()
|
|
719
713
|
};
|
|
720
|
-
log.debug('hardDeleteOne called', collection, query);
|
|
714
|
+
db_js_1.log.debug('hardDeleteOne called', collection, query);
|
|
721
715
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
722
716
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
723
717
|
if (obj.value) {
|
|
@@ -725,21 +719,21 @@ export default class Mongo extends Db {
|
|
|
725
719
|
}
|
|
726
720
|
return obj.value;
|
|
727
721
|
}, false, { operation: "hardDeleteOne", collection, query });
|
|
728
|
-
log.debug('hardDeleteOne returns', ret);
|
|
722
|
+
db_js_1.log.debug('hardDeleteOne returns', ret);
|
|
729
723
|
return ret;
|
|
730
724
|
}
|
|
731
725
|
async delete(collection, query) {
|
|
732
726
|
assert(collection);
|
|
733
727
|
assert(query);
|
|
734
728
|
if (this.syncSupport)
|
|
735
|
-
log.warn("delete does not increase _csq, avoit it.");
|
|
729
|
+
db_js_1.log.warn("delete does not increase _csq, avoit it.");
|
|
736
730
|
query = this.replaceIds(query);
|
|
737
731
|
if (!this.softdelete) {
|
|
738
732
|
let opts = {
|
|
739
733
|
returnDocument: "after",
|
|
740
|
-
|
|
734
|
+
...this._sessionOpt()
|
|
741
735
|
};
|
|
742
|
-
log.debug('delete called', collection, query);
|
|
736
|
+
db_js_1.log.debug('delete called', collection, query);
|
|
743
737
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
744
738
|
let obj = await conn.deleteMany(query, opts);
|
|
745
739
|
let resObj = {
|
|
@@ -749,17 +743,17 @@ export default class Mongo extends Db {
|
|
|
749
743
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
750
744
|
return resObj;
|
|
751
745
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
752
|
-
log.debug('delete returns', ret);
|
|
746
|
+
db_js_1.log.debug('delete returns', ret);
|
|
753
747
|
return ret;
|
|
754
748
|
}
|
|
755
749
|
else {
|
|
756
750
|
let opts = {
|
|
757
751
|
upsert: false,
|
|
758
752
|
returnDocument: "after",
|
|
759
|
-
|
|
753
|
+
...this._sessionOpt()
|
|
760
754
|
};
|
|
761
755
|
let date = new Date();
|
|
762
|
-
log.debug('delete called', collection, query);
|
|
756
|
+
db_js_1.log.debug('delete called', collection, query);
|
|
763
757
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
764
758
|
let obj = await conn.updateMany(query, { $set: { _deleted: date } }, opts);
|
|
765
759
|
let resObj = {
|
|
@@ -769,7 +763,7 @@ export default class Mongo extends Db {
|
|
|
769
763
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
770
764
|
return resObj;
|
|
771
765
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
772
|
-
log.debug('delete returns', ret);
|
|
766
|
+
db_js_1.log.debug('delete returns', ret);
|
|
773
767
|
return ret;
|
|
774
768
|
}
|
|
775
769
|
}
|
|
@@ -777,13 +771,13 @@ export default class Mongo extends Db {
|
|
|
777
771
|
assert(collection);
|
|
778
772
|
assert(query);
|
|
779
773
|
if (this.syncSupport)
|
|
780
|
-
log.warn("hardDelete does not increase _csq, avoit it.");
|
|
774
|
+
db_js_1.log.warn("hardDelete does not increase _csq, avoit it.");
|
|
781
775
|
query = this.replaceIds(query);
|
|
782
776
|
let opts = {
|
|
783
777
|
returnDocument: "after",
|
|
784
|
-
|
|
778
|
+
...this._sessionOpt()
|
|
785
779
|
};
|
|
786
|
-
log.debug('hardDelete called', collection, query);
|
|
780
|
+
db_js_1.log.debug('hardDelete called', collection, query);
|
|
787
781
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
788
782
|
let obj = await conn.deleteMany(query, opts);
|
|
789
783
|
let resObj = {
|
|
@@ -793,12 +787,12 @@ export default class Mongo extends Db {
|
|
|
793
787
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
794
788
|
return resObj;
|
|
795
789
|
}, false, { operation: "hardDelete", collection, query, softdelete: this.softdelete });
|
|
796
|
-
log.debug('hardDelete returns', ret);
|
|
790
|
+
db_js_1.log.debug('hardDelete returns', ret);
|
|
797
791
|
return ret;
|
|
798
792
|
}
|
|
799
793
|
async testHash(collection, query, field, unhashedValue) {
|
|
800
794
|
let _field;
|
|
801
|
-
log.debug('teshHash called', collection, query, field, unhashedValue);
|
|
795
|
+
db_js_1.log.debug('teshHash called', collection, query, field, unhashedValue);
|
|
802
796
|
if (typeof field === "object") {
|
|
803
797
|
if (Object.keys(field).length === 1)
|
|
804
798
|
[_field, unhashedValue] = Object.entries(field)[0];
|
|
@@ -810,21 +804,21 @@ export default class Mongo extends Db {
|
|
|
810
804
|
if (!/^__hashed__/.test(_field))
|
|
811
805
|
_field = "__hashed__" + _field;
|
|
812
806
|
let conn = await this.connect();
|
|
813
|
-
let obj = await conn.db(this.db).collection(collection).findOne(query, { projection: { [_field]: 1 },
|
|
807
|
+
let obj = await conn.db(this.db).collection(collection).findOne(query, { projection: { [_field]: 1 }, ...this._sessionOpt() });
|
|
814
808
|
if (!obj || !obj[_field]) {
|
|
815
|
-
log.debug('teshHash returns false', obj);
|
|
809
|
+
db_js_1.log.debug('teshHash returns false', obj);
|
|
816
810
|
return false;
|
|
817
811
|
}
|
|
818
|
-
let res = await
|
|
819
|
-
log.debug('teshHash returns', res);
|
|
812
|
+
let res = await bcrypt_1.default.compare(unhashedValue, obj[_field].hash);
|
|
813
|
+
db_js_1.log.debug('teshHash returns', res);
|
|
820
814
|
return res;
|
|
821
815
|
}
|
|
822
816
|
async aggregate(collection, pipeline, opts = {
|
|
823
|
-
readPreference: ReadPreference.SECONDARY_PREFERRED,
|
|
817
|
+
readPreference: mongodb_1.ReadPreference.SECONDARY_PREFERRED,
|
|
824
818
|
}) {
|
|
825
819
|
assert(collection);
|
|
826
820
|
assert(pipeline instanceof Array);
|
|
827
|
-
log.debug('aggregate called', collection, pipeline);
|
|
821
|
+
db_js_1.log.debug('aggregate called', collection, pipeline);
|
|
828
822
|
pipeline = this.replaceIds(pipeline);
|
|
829
823
|
if (this.session)
|
|
830
824
|
opts.session = this.session;
|
|
@@ -832,15 +826,15 @@ export default class Mongo extends Db {
|
|
|
832
826
|
let res = await conn.aggregate(pipeline, opts).toArray();
|
|
833
827
|
return res;
|
|
834
828
|
}, false, { operation: "aggregate", collection, pipeline, opts });
|
|
835
|
-
log.debug('aggregare returns', ret);
|
|
829
|
+
db_js_1.log.debug('aggregare returns', ret);
|
|
836
830
|
return ret;
|
|
837
831
|
}
|
|
838
832
|
async isUnique(collection, field, value, id) {
|
|
839
833
|
assert(collection);
|
|
840
834
|
assert(field);
|
|
841
835
|
assert(value);
|
|
842
|
-
log.debug('isUnuqie called', collection, field, value, id);
|
|
843
|
-
let _id = id
|
|
836
|
+
db_js_1.log.debug('isUnuqie called', collection, field, value, id);
|
|
837
|
+
let _id = id === null || id === void 0 ? void 0 : id.toString();
|
|
844
838
|
let matches = await this.executeTransactionally(collection, async (conn) => {
|
|
845
839
|
let agg = await conn.find({ [field]: value });
|
|
846
840
|
let res = await agg.toArray();
|
|
@@ -853,13 +847,13 @@ export default class Mongo extends Db {
|
|
|
853
847
|
return false;
|
|
854
848
|
}
|
|
855
849
|
}
|
|
856
|
-
log.debug('isUnuqie returns', ret);
|
|
850
|
+
db_js_1.log.debug('isUnuqie returns', ret);
|
|
857
851
|
return ret;
|
|
858
852
|
}
|
|
859
853
|
async collectFieldValues(collection, field, inArray = false, opts) {
|
|
860
854
|
assert(collection);
|
|
861
855
|
assert(field);
|
|
862
|
-
log.debug('collectFieldValues called', collection, field);
|
|
856
|
+
db_js_1.log.debug('collectFieldValues called', collection, field);
|
|
863
857
|
let pipeline = [
|
|
864
858
|
{ $group: { _id: '$' + field } },
|
|
865
859
|
{ $sort: { _id: 1 } }
|
|
@@ -874,33 +868,33 @@ export default class Mongo extends Db {
|
|
|
874
868
|
let res = await agg.toArray();
|
|
875
869
|
return res;
|
|
876
870
|
}, false, { operation: "collectFieldValues", collection, field, inArray, pipeline, opts });
|
|
877
|
-
let ret = res
|
|
878
|
-
log.debug('collectFieldValues returns', ret);
|
|
871
|
+
let ret = res === null || res === void 0 ? void 0 : res.map((v) => v._id);
|
|
872
|
+
db_js_1.log.debug('collectFieldValues returns', ret);
|
|
879
873
|
return ret;
|
|
880
874
|
}
|
|
881
875
|
async dropCollection(collection) {
|
|
882
876
|
assert(collection);
|
|
883
|
-
log.debug('dropCollection called', this.auditCollections);
|
|
877
|
+
db_js_1.log.debug('dropCollection called', this.auditCollections);
|
|
884
878
|
let client = await this.connect();
|
|
885
879
|
let existing = await client.db(this.db).collections();
|
|
886
880
|
if (existing.map((c) => c.s.name).includes(collection)) {
|
|
887
881
|
await client.db(this.db).dropCollection(collection);
|
|
888
882
|
}
|
|
889
|
-
log.debug('dropCollection returns');
|
|
883
|
+
db_js_1.log.debug('dropCollection returns');
|
|
890
884
|
}
|
|
891
885
|
async resetCollectionSync(collection) {
|
|
892
886
|
assert(collection);
|
|
893
|
-
log.debug('resetCollectionSync called for', collection);
|
|
887
|
+
db_js_1.log.debug('resetCollectionSync called for', collection);
|
|
894
888
|
let client = await this.connect();
|
|
895
889
|
await client.db(this.db)
|
|
896
|
-
.collection(SEQUENCES_COLLECTION)
|
|
890
|
+
.collection(types_js_1.SEQUENCES_COLLECTION)
|
|
897
891
|
.findOneAndDelete({ collection });
|
|
898
|
-
log.debug(`resetCollectionSync for ${collection} returns`);
|
|
892
|
+
db_js_1.log.debug(`resetCollectionSync for ${collection} returns`);
|
|
899
893
|
}
|
|
900
894
|
async dropCollections(collections) {
|
|
901
895
|
assert(collections);
|
|
902
896
|
assert(collections instanceof Array);
|
|
903
|
-
log.debug('dropCollections called', this.auditCollections);
|
|
897
|
+
db_js_1.log.debug('dropCollections called', this.auditCollections);
|
|
904
898
|
let client = await this.connect();
|
|
905
899
|
let existing = await client.db(this.db).collections();
|
|
906
900
|
for await (let collection of collections) {
|
|
@@ -908,12 +902,12 @@ export default class Mongo extends Db {
|
|
|
908
902
|
await client.db(this.db).dropCollection(collection);
|
|
909
903
|
}
|
|
910
904
|
}
|
|
911
|
-
log.debug('dropCollections returns');
|
|
905
|
+
db_js_1.log.debug('dropCollections returns');
|
|
912
906
|
}
|
|
913
907
|
async createCollections(collections) {
|
|
914
908
|
assert(collections);
|
|
915
909
|
assert(collections instanceof Array);
|
|
916
|
-
log.debug('createCollections called', this.auditCollections);
|
|
910
|
+
db_js_1.log.debug('createCollections called', this.auditCollections);
|
|
917
911
|
let client = await this.connect();
|
|
918
912
|
let existing = await this.getCollections();
|
|
919
913
|
for await (let collection of collections) {
|
|
@@ -921,21 +915,21 @@ export default class Mongo extends Db {
|
|
|
921
915
|
await client.db(this.db).createCollection(collection);
|
|
922
916
|
}
|
|
923
917
|
}
|
|
924
|
-
log.debug('createCollections returns');
|
|
918
|
+
db_js_1.log.debug('createCollections returns');
|
|
925
919
|
}
|
|
926
920
|
async createCollection(collection) {
|
|
927
921
|
assert(collection);
|
|
928
|
-
log.debug('createCollection called', collection);
|
|
922
|
+
db_js_1.log.debug('createCollection called', collection);
|
|
929
923
|
let client = await this.connect();
|
|
930
924
|
let existing = await this.getCollections();
|
|
931
925
|
if (!existing.includes(collection)) {
|
|
932
926
|
await client.db(this.db).createCollection(collection);
|
|
933
927
|
}
|
|
934
|
-
log.debug('createCollection returns');
|
|
928
|
+
db_js_1.log.debug('createCollection returns');
|
|
935
929
|
}
|
|
936
930
|
async dbLogPurge(collection, _id) {
|
|
937
931
|
assert(collection);
|
|
938
|
-
log.debug('dblogPurge called', collection, _id);
|
|
932
|
+
db_js_1.log.debug('dblogPurge called', collection, _id);
|
|
939
933
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
940
934
|
let cond = { db: this.db, collection, };
|
|
941
935
|
if (_id !== undefined)
|
|
@@ -944,18 +938,18 @@ export default class Mongo extends Db {
|
|
|
944
938
|
let ret = await client
|
|
945
939
|
.db(this.db)
|
|
946
940
|
.collection(this.auditCollectionName)
|
|
947
|
-
.deleteMany(cond,
|
|
941
|
+
.deleteMany(cond, this._sessionOpt());
|
|
948
942
|
return {
|
|
949
943
|
ok: !!ret.acknowledged,
|
|
950
944
|
n: ret.deletedCount
|
|
951
945
|
};
|
|
952
946
|
}, false, { operation: "dbLogPurge", collection, _id });
|
|
953
|
-
log.debug('dblogPurge returns', ret);
|
|
947
|
+
db_js_1.log.debug('dblogPurge returns', ret);
|
|
954
948
|
return ret;
|
|
955
949
|
}
|
|
956
950
|
async dbLogGet(collection, _id) {
|
|
957
951
|
assert(collection);
|
|
958
|
-
log.debug('dblogGet called', collection, _id);
|
|
952
|
+
db_js_1.log.debug('dblogGet called', collection, _id);
|
|
959
953
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
960
954
|
let cond = { db: this.db, collection };
|
|
961
955
|
if (_id)
|
|
@@ -964,12 +958,12 @@ export default class Mongo extends Db {
|
|
|
964
958
|
let ret = await client
|
|
965
959
|
.db(this.db)
|
|
966
960
|
.collection(this.auditCollectionName)
|
|
967
|
-
.find(cond,
|
|
961
|
+
.find(cond, this._sessionOpt())
|
|
968
962
|
.sort({ _id: -1 })
|
|
969
963
|
.toArray();
|
|
970
964
|
return ret;
|
|
971
965
|
}, false, { operation: "dbLogGet", collection, _id });
|
|
972
|
-
log.debug('dblogGet returns', ret);
|
|
966
|
+
db_js_1.log.debug('dblogGet returns', ret);
|
|
973
967
|
return ret;
|
|
974
968
|
}
|
|
975
969
|
// HELPER FUNCTIONS
|
|
@@ -987,9 +981,9 @@ export default class Mongo extends Db {
|
|
|
987
981
|
return undefined;
|
|
988
982
|
if (typeof data === "symbol")
|
|
989
983
|
return data.toString();
|
|
990
|
-
if (data instanceof ObjectId)
|
|
984
|
+
if (data instanceof mongodb_1.ObjectId)
|
|
991
985
|
return data;
|
|
992
|
-
if (data instanceof Timestamp)
|
|
986
|
+
if (data instanceof mongodb_1.Timestamp)
|
|
993
987
|
return data;
|
|
994
988
|
if (data instanceof Date)
|
|
995
989
|
return data;
|
|
@@ -999,17 +993,17 @@ export default class Mongo extends Db {
|
|
|
999
993
|
return data;
|
|
1000
994
|
if (data instanceof String)
|
|
1001
995
|
return data;
|
|
1002
|
-
if (typeof data === "string" && data
|
|
1003
|
-
return new ObjectId(data);
|
|
996
|
+
if (typeof data === "string" && (data === null || data === void 0 ? void 0 : data.match(/^[0-9a-f]{24,24}$/g)))
|
|
997
|
+
return new mongodb_1.ObjectId(data);
|
|
1004
998
|
if (typeof data === "string")
|
|
1005
999
|
return data;
|
|
1006
1000
|
if (data instanceof Array) {
|
|
1007
1001
|
return data.map(d => this.replaceIds(d));
|
|
1008
1002
|
}
|
|
1009
|
-
if (typeof data == 'object' && data
|
|
1010
|
-
return Base.timestamp(data);
|
|
1011
|
-
if (typeof data == 'object' && data
|
|
1012
|
-
return Base.timestamp(data);
|
|
1003
|
+
if (typeof data == 'object' && (data === null || data === void 0 ? void 0 : data.t) && (data === null || data === void 0 ? void 0 : data.i) !== undefined)
|
|
1004
|
+
return base_js_1.Base.timestamp(data);
|
|
1005
|
+
if (typeof data == 'object' && (data === null || data === void 0 ? void 0 : data.high) && (data === null || data === void 0 ? void 0 : data.low) !== undefined)
|
|
1006
|
+
return base_js_1.Base.timestamp(data);
|
|
1013
1007
|
if (typeof data == 'object') {
|
|
1014
1008
|
for (let key in data) {
|
|
1015
1009
|
data[key] = this.replaceIds(data[key]);
|
|
@@ -1032,14 +1026,14 @@ export default class Mongo extends Db {
|
|
|
1032
1026
|
if (this.session)
|
|
1033
1027
|
try {
|
|
1034
1028
|
await this.session.endSession();
|
|
1035
|
-
log.info("session ended");
|
|
1029
|
+
db_js_1.log.info("session ended");
|
|
1036
1030
|
}
|
|
1037
1031
|
catch (err) {
|
|
1038
|
-
log.error(`Error ending session ${err.message}`);
|
|
1032
|
+
db_js_1.log.error(`Error ending session ${err.message}`);
|
|
1039
1033
|
}
|
|
1040
1034
|
try {
|
|
1041
1035
|
await super.close();
|
|
1042
|
-
log.info("connection closed");
|
|
1036
|
+
db_js_1.log.info("connection closed");
|
|
1043
1037
|
}
|
|
1044
1038
|
catch { /** intentionally */ }
|
|
1045
1039
|
this.session = undefined;
|
|
@@ -1059,13 +1053,13 @@ export default class Mongo extends Db {
|
|
|
1059
1053
|
let hadSession = !!this.session;
|
|
1060
1054
|
if (!this.session) {
|
|
1061
1055
|
this.session = client.startSession();
|
|
1062
|
-
log.info("session started");
|
|
1056
|
+
db_js_1.log.info("session started");
|
|
1063
1057
|
}
|
|
1064
1058
|
let session = this.session;
|
|
1065
1059
|
await session.withTransaction(async () => await funct(client, session));
|
|
1066
1060
|
if (!hadSession) {
|
|
1067
1061
|
session.endSession();
|
|
1068
|
-
log.info("session ended");
|
|
1062
|
+
db_js_1.log.info("session ended");
|
|
1069
1063
|
this.session = undefined;
|
|
1070
1064
|
}
|
|
1071
1065
|
return;
|
|
@@ -1081,24 +1075,24 @@ export default class Mongo extends Db {
|
|
|
1081
1075
|
try {
|
|
1082
1076
|
if (!this.session) {
|
|
1083
1077
|
this.session = client.startSession(TRANSACTION_OPTIONS);
|
|
1084
|
-
log.info("session started");
|
|
1078
|
+
db_js_1.log.info("session started");
|
|
1085
1079
|
}
|
|
1086
1080
|
if (!await this.inTransaction()) {
|
|
1087
1081
|
await this.session.startTransaction();
|
|
1088
|
-
log.info("transaction started");
|
|
1082
|
+
db_js_1.log.info("transaction started");
|
|
1089
1083
|
}
|
|
1090
1084
|
}
|
|
1091
1085
|
catch (err) {
|
|
1092
|
-
log.error('startTransaction error', err);
|
|
1086
|
+
db_js_1.log.error('startTransaction error', err);
|
|
1093
1087
|
try {
|
|
1094
1088
|
if (this.session) {
|
|
1095
1089
|
await this.session.endSession();
|
|
1096
|
-
log.info("session ended");
|
|
1090
|
+
db_js_1.log.info("session ended");
|
|
1097
1091
|
}
|
|
1098
1092
|
this.session = undefined;
|
|
1099
1093
|
}
|
|
1100
1094
|
catch (e) {
|
|
1101
|
-
log.error("startTransaction - error in endSession", e.message || e);
|
|
1095
|
+
db_js_1.log.error("startTransaction - error in endSession", e.message || e);
|
|
1102
1096
|
}
|
|
1103
1097
|
return;
|
|
1104
1098
|
}
|
|
@@ -1111,13 +1105,13 @@ export default class Mongo extends Db {
|
|
|
1111
1105
|
return;
|
|
1112
1106
|
let session = this.session;
|
|
1113
1107
|
await session.commitTransaction();
|
|
1114
|
-
log.info("transaction committed");
|
|
1108
|
+
db_js_1.log.info("transaction committed");
|
|
1115
1109
|
session.endSession();
|
|
1116
1110
|
this.session = undefined;
|
|
1117
|
-
log.info("session ended");
|
|
1111
|
+
db_js_1.log.info("session ended");
|
|
1118
1112
|
}
|
|
1119
1113
|
catch (err) {
|
|
1120
|
-
log.error(`commitTransaction error ${err.message || err}`);
|
|
1114
|
+
db_js_1.log.error(`commitTransaction error ${err.message || err}`);
|
|
1121
1115
|
}
|
|
1122
1116
|
}
|
|
1123
1117
|
async abortTransaction() {
|
|
@@ -1128,13 +1122,13 @@ export default class Mongo extends Db {
|
|
|
1128
1122
|
return;
|
|
1129
1123
|
let session = this.session;
|
|
1130
1124
|
await session.abortTransaction();
|
|
1131
|
-
log.info("transaction aborted");
|
|
1125
|
+
db_js_1.log.info("transaction aborted");
|
|
1132
1126
|
await session.endSession();
|
|
1133
1127
|
this.session = undefined;
|
|
1134
|
-
log.info("session ended");
|
|
1128
|
+
db_js_1.log.info("session ended");
|
|
1135
1129
|
}
|
|
1136
1130
|
catch (err) {
|
|
1137
|
-
log.error(`abortTransaction error ${err.message || err}`);
|
|
1131
|
+
db_js_1.log.error(`abortTransaction error ${err.message || err}`);
|
|
1138
1132
|
}
|
|
1139
1133
|
}
|
|
1140
1134
|
async _try_once(useTransaction, f, collection) {
|
|
@@ -1171,9 +1165,9 @@ export default class Mongo extends Db {
|
|
|
1171
1165
|
return await this._try_once(useTransaction, f, collection);
|
|
1172
1166
|
}
|
|
1173
1167
|
catch (err) {
|
|
1174
|
-
log.error(`Mongo command has failed for ${this.db}.${collection} - ${(this.session ? "ROLLBACK - " : "")} ${err.message || err}`);
|
|
1175
|
-
log.error(debugObject);
|
|
1176
|
-
log.debug(err);
|
|
1168
|
+
db_js_1.log.error(`Mongo command has failed for ${this.db}.${collection} - ${(this.session ? "ROLLBACK - " : "")} ${err.message || err}`);
|
|
1169
|
+
db_js_1.log.error(debugObject);
|
|
1170
|
+
db_js_1.log.debug(err);
|
|
1177
1171
|
let x = (err || "").toString();
|
|
1178
1172
|
console.log('x');
|
|
1179
1173
|
let isRepeatable = x.match(/Topology is closed, please connect/i)
|
|
@@ -1182,62 +1176,64 @@ export default class Mongo extends Db {
|
|
|
1182
1176
|
|| x.match(/Topology closed/);
|
|
1183
1177
|
if (isRepeatable) {
|
|
1184
1178
|
try {
|
|
1185
|
-
log.error("Trying to reopen connection and repeat as");
|
|
1179
|
+
db_js_1.log.error("Trying to reopen connection and repeat as");
|
|
1186
1180
|
await this.close();
|
|
1187
1181
|
// a single retry
|
|
1188
1182
|
await super.connect();
|
|
1189
1183
|
let ret = await this._try_once(useTransaction, f, collection);
|
|
1190
|
-
log.error("OK - Retry succeeded.");
|
|
1191
|
-
log.error("");
|
|
1184
|
+
db_js_1.log.error("OK - Retry succeeded.");
|
|
1185
|
+
db_js_1.log.error("");
|
|
1192
1186
|
return ret;
|
|
1193
1187
|
}
|
|
1194
1188
|
catch (err2) {
|
|
1195
1189
|
/* intentional */
|
|
1196
1190
|
if (debugObject)
|
|
1197
|
-
log.error(debugObject);
|
|
1198
|
-
log.error(`FAIL - Retry failed: ${err2.message || err2}`);
|
|
1199
|
-
log.error("");
|
|
1191
|
+
db_js_1.log.error(debugObject);
|
|
1192
|
+
db_js_1.log.error(`FAIL - Retry failed: ${err2.message || err2}`);
|
|
1193
|
+
db_js_1.log.error("");
|
|
1200
1194
|
}
|
|
1201
1195
|
}
|
|
1202
1196
|
throw err;
|
|
1203
1197
|
}
|
|
1204
1198
|
}
|
|
1205
1199
|
async _findLastSequenceForKey(connection, key) {
|
|
1200
|
+
var _a;
|
|
1206
1201
|
let maxfld = await (connection
|
|
1207
|
-
.find({},
|
|
1202
|
+
.find({}, this._sessionOpt())
|
|
1208
1203
|
.sort({ [key]: -1 })
|
|
1209
1204
|
.limit(1)
|
|
1210
1205
|
.toArray());
|
|
1211
1206
|
if (maxfld.length === 0)
|
|
1212
1207
|
return undefined;
|
|
1213
|
-
return parseInt(maxfld[0][key]) || 0;
|
|
1208
|
+
return parseInt((_a = maxfld === null || maxfld === void 0 ? void 0 : maxfld[0]) === null || _a === void 0 ? void 0 : _a[key]) || 0;
|
|
1214
1209
|
}
|
|
1215
1210
|
async _getNextCollectionUpdateSeqNo(collection, conn) {
|
|
1211
|
+
var _a;
|
|
1216
1212
|
let opts = {
|
|
1217
1213
|
upsert: true,
|
|
1218
1214
|
returnDocument: "after",
|
|
1219
1215
|
};
|
|
1220
1216
|
let nextSeq = await (conn.db(this.db)
|
|
1221
|
-
.collection(SEQUENCES_COLLECTION)
|
|
1217
|
+
.collection(types_js_1.SEQUENCES_COLLECTION)
|
|
1222
1218
|
.findOneAndUpdate({ collection }, {
|
|
1223
1219
|
$inc: { seq: 1 },
|
|
1224
1220
|
$currentDate: { last: { $type: "date" }, ts: { $type: "timestamp" } }
|
|
1225
1221
|
}, opts));
|
|
1226
1222
|
conn.db(this.db).collection(collection);
|
|
1227
|
-
return nextSeq
|
|
1223
|
+
return ((_a = nextSeq === null || nextSeq === void 0 ? void 0 : nextSeq.value) === null || _a === void 0 ? void 0 : _a.seq) || 1;
|
|
1228
1224
|
}
|
|
1229
1225
|
_findSequenceKeys(object) {
|
|
1230
1226
|
if (!object)
|
|
1231
1227
|
return;
|
|
1232
1228
|
let seqKeys = Object.keys(object).filter(key => object[key] === 'SEQ_NEXT' || object[key] === 'SEQ_LAST');
|
|
1233
|
-
return (seqKeys
|
|
1229
|
+
return ((seqKeys === null || seqKeys === void 0 ? void 0 : seqKeys.length) > 0 || this.syncSupport) ? { seqKeys } : undefined;
|
|
1234
1230
|
}
|
|
1235
1231
|
async _processSequenceField(client, collection, insert, seqKeys) {
|
|
1236
1232
|
assert(this.client);
|
|
1237
1233
|
if (this.syncSupport) {
|
|
1238
1234
|
insert._csq = (await this._getNextCollectionUpdateSeqNo(collection, client));
|
|
1239
1235
|
}
|
|
1240
|
-
for await (let seqKey of seqKeys
|
|
1236
|
+
for await (let seqKey of (seqKeys === null || seqKeys === void 0 ? void 0 : seqKeys.seqKeys) || []) {
|
|
1241
1237
|
let last = await this._findLastSequenceForKey(client.db(this.db).collection(collection), seqKey);
|
|
1242
1238
|
if (last === undefined) {
|
|
1243
1239
|
await this.createCollection(collection);
|
|
@@ -1251,13 +1247,13 @@ export default class Mongo extends Db {
|
|
|
1251
1247
|
async _processSequenceFieldForMany(connection, collection, inserts) {
|
|
1252
1248
|
assert(this.client);
|
|
1253
1249
|
assert(connection);
|
|
1254
|
-
if (!inserts
|
|
1250
|
+
if (!(inserts === null || inserts === void 0 ? void 0 : inserts.length))
|
|
1255
1251
|
return;
|
|
1256
1252
|
let seqKeys = this._findSequenceKeys(inserts[0]);
|
|
1257
1253
|
let seq = 0;
|
|
1258
1254
|
if (this.syncSupport)
|
|
1259
1255
|
seq = await this._getNextCollectionUpdateSeqNo(collection, connection);
|
|
1260
|
-
for await (let seqKey of seqKeys
|
|
1256
|
+
for await (let seqKey of (seqKeys === null || seqKeys === void 0 ? void 0 : seqKeys.seqKeys) || []) {
|
|
1261
1257
|
let last = await this._findLastSequenceForKey(connection.db(this.db).collection(collection), seqKey);
|
|
1262
1258
|
if (last === undefined) {
|
|
1263
1259
|
try {
|
|
@@ -1314,7 +1310,7 @@ export default class Mongo extends Db {
|
|
|
1314
1310
|
async _publishAndAudit(operation, db, collection, dataToPublish, noEmit) {
|
|
1315
1311
|
if (!dataToPublish._id && !["deleteMany", "updateMany"].includes(operation))
|
|
1316
1312
|
throw new Error(`_publishAndAudit requires _id for ${operation}`);
|
|
1317
|
-
let data =
|
|
1313
|
+
let data = (0, lodash_clonedeep_1.default)(dataToPublish);
|
|
1318
1314
|
if (data._id && /[0-9a-f]{24,24}/i.test(data._id.toString()))
|
|
1319
1315
|
data._id = data._id.toHexString();
|
|
1320
1316
|
let toPublish = undefined;
|
|
@@ -1345,7 +1341,7 @@ export default class Mongo extends Db {
|
|
|
1345
1341
|
return toPublish;
|
|
1346
1342
|
}
|
|
1347
1343
|
emit(what) {
|
|
1348
|
-
log.debug("emitting publish", what);
|
|
1344
|
+
db_js_1.log.debug("emitting publish", what);
|
|
1349
1345
|
this.emitter.emit('publish', what);
|
|
1350
1346
|
}
|
|
1351
1347
|
async _writeAuditRecord(collection, operation, data, user = this.user, audit = this.audit) {
|
|
@@ -1356,7 +1352,7 @@ export default class Mongo extends Db {
|
|
|
1356
1352
|
{ $match: { entityid: Mongo._toId(data._id) } },
|
|
1357
1353
|
{ $sort: { rev: -1 } },
|
|
1358
1354
|
{ $limit: 1 }
|
|
1359
|
-
], { session: this.session })).toArray();
|
|
1355
|
+
], this.session ? { session: this.session } : {})).toArray();
|
|
1360
1356
|
let previousAuditRecord = previousAuditRecords.length ? previousAuditRecords[0] : { rev: 0, changes: {} };
|
|
1361
1357
|
if (previousAuditRecords.length === 0)
|
|
1362
1358
|
await this.createCollection(this.auditCollectionName);
|
|
@@ -1365,9 +1361,9 @@ export default class Mongo extends Db {
|
|
|
1365
1361
|
let auditRecord = {
|
|
1366
1362
|
db: this.db,
|
|
1367
1363
|
collection: collection,
|
|
1368
|
-
entityid: Base.objectid(data._id),
|
|
1369
|
-
rev: previousAuditRecord.rev + 1,
|
|
1370
|
-
ts: Base.timestamp(),
|
|
1364
|
+
entityid: base_js_1.Base.objectid(data._id),
|
|
1365
|
+
rev: ((previousAuditRecord === null || previousAuditRecord === void 0 ? void 0 : previousAuditRecord.rev) || 0) + 1,
|
|
1366
|
+
ts: base_js_1.Base.timestamp(),
|
|
1371
1367
|
on: new Date(),
|
|
1372
1368
|
operation: operation,
|
|
1373
1369
|
changes: dataNoId,
|
|
@@ -1376,11 +1372,14 @@ export default class Mongo extends Db {
|
|
|
1376
1372
|
auditRecord.user = user;
|
|
1377
1373
|
if (audit)
|
|
1378
1374
|
auditRecord.audit = audit;
|
|
1379
|
-
log.trace('AUDITING', auditRecord);
|
|
1375
|
+
db_js_1.log.trace('AUDITING', auditRecord);
|
|
1380
1376
|
let ret = await client.db(this.db)
|
|
1381
1377
|
.collection(this.auditCollectionName)
|
|
1382
|
-
.insertOne(auditRecord,
|
|
1383
|
-
log.debug('AUDITED', auditRecord, ret.insertedId);
|
|
1378
|
+
.insertOne(auditRecord, this._sessionOpt());
|
|
1379
|
+
db_js_1.log.debug('AUDITED', auditRecord, ret.insertedId);
|
|
1380
|
+
}
|
|
1381
|
+
_sessionOpt() {
|
|
1382
|
+
return this.session ? { session: this.session } : {};
|
|
1384
1383
|
}
|
|
1385
1384
|
async _processUpdateObject(update) {
|
|
1386
1385
|
await this._processHashedKeys(update);
|
|
@@ -1419,8 +1418,8 @@ export default class Mongo extends Db {
|
|
|
1419
1418
|
for await (let key of Object.keys(update)) {
|
|
1420
1419
|
let shouldBeHashed = /^__hashed_(.+)$/.test(key);
|
|
1421
1420
|
if (shouldBeHashed) {
|
|
1422
|
-
let salt = await
|
|
1423
|
-
let hash = await
|
|
1421
|
+
let salt = await bcrypt_1.default.genSalt(saltRounds);
|
|
1422
|
+
let hash = await bcrypt_1.default.hash(update[key], salt);
|
|
1424
1423
|
update[key] = { salt, hash };
|
|
1425
1424
|
}
|
|
1426
1425
|
}
|
|
@@ -1444,5 +1443,6 @@ export default class Mongo extends Db {
|
|
|
1444
1443
|
return ret;
|
|
1445
1444
|
}
|
|
1446
1445
|
}
|
|
1446
|
+
exports.default = Mongo;
|
|
1447
1447
|
module.exports = Mongo;
|
|
1448
1448
|
//# sourceMappingURL=mongo.js.map
|