cry-db 2.1.34 → 2.1.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/base.js +32 -28
- package/dist/base.js.map +1 -1
- package/dist/db.js +14 -10
- package/dist/db.js.map +1 -1
- package/dist/index.js +39 -6
- package/dist/index.js.map +1 -1
- package/dist/mongo.js +148 -141
- package/dist/mongo.js.map +1 -1
- package/dist/repo.js +16 -9
- package/dist/repo.js.map +1 -1
- package/dist/types.js +4 -1
- package/dist/types.js.map +1 -1
- package/package.json +1 -2
package/dist/mongo.js
CHANGED
|
@@ -1,10 +1,16 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.DummyExportToFixTsCompilation = void 0;
|
|
7
|
+
const bcrypt_1 = __importDefault(require("bcrypt"));
|
|
8
|
+
const mongodb_1 = require("mongodb");
|
|
9
|
+
const tiny_typed_emitter_1 = require("tiny-typed-emitter");
|
|
10
|
+
const db_js_1 = require("./db.js");
|
|
11
|
+
const types_js_1 = require("./types.js");
|
|
12
|
+
const lodash_clonedeep_1 = __importDefault(require("lodash.clonedeep"));
|
|
13
|
+
const base_js_1 = require("./base.js");
|
|
8
14
|
const assert = (cond, msg) => {
|
|
9
15
|
if (!cond) {
|
|
10
16
|
console.log("assert failed", cond || msg);
|
|
@@ -14,13 +20,13 @@ const assert = (cond, msg) => {
|
|
|
14
20
|
const saltRounds = 10;
|
|
15
21
|
const TRANSACTION_OPTIONS = {
|
|
16
22
|
defaultTransactionOptions: {
|
|
17
|
-
readPreference: new ReadPreference("primary"),
|
|
18
|
-
readConcern: new ReadConcern("local"),
|
|
19
|
-
writeConcern: new WriteConcern("majority")
|
|
23
|
+
readPreference: new mongodb_1.ReadPreference("primary"),
|
|
24
|
+
readConcern: new mongodb_1.ReadConcern("local"),
|
|
25
|
+
writeConcern: new mongodb_1.WriteConcern("majority")
|
|
20
26
|
}
|
|
21
27
|
};
|
|
22
|
-
|
|
23
|
-
|
|
28
|
+
exports.DummyExportToFixTsCompilation = true;
|
|
29
|
+
class Mongo extends db_js_1.Db {
|
|
24
30
|
constructor(db, url) {
|
|
25
31
|
super(db, url);
|
|
26
32
|
this.revisions = false;
|
|
@@ -31,21 +37,21 @@ export default class Mongo extends Db {
|
|
|
31
37
|
this.auditing = false;
|
|
32
38
|
this.auditCollectionName = "dblog";
|
|
33
39
|
this.auditedCollections = this.auditCollections(process.env.AUDIT_COLLECTIONS || []);
|
|
34
|
-
this.emitter = new TypedEmitter();
|
|
40
|
+
this.emitter = new tiny_typed_emitter_1.TypedEmitter();
|
|
35
41
|
this.user = undefined;
|
|
36
42
|
this.audit = undefined;
|
|
37
|
-
log.debug('new Mongo:', this.url, this.db);
|
|
43
|
+
db_js_1.log.debug('new Mongo:', this.url, this.db);
|
|
38
44
|
}
|
|
39
45
|
on(evt, listener) {
|
|
40
|
-
log.debug("on", evt, listener);
|
|
46
|
+
db_js_1.log.debug("on", evt, listener);
|
|
41
47
|
this.emitter.on(evt, listener);
|
|
42
48
|
}
|
|
43
49
|
off(evt, listener) {
|
|
44
|
-
log.debug("off", evt, listener);
|
|
50
|
+
db_js_1.log.debug("off", evt, listener);
|
|
45
51
|
this.emitter.off(evt, listener);
|
|
46
52
|
}
|
|
47
53
|
once(evt, listener) {
|
|
48
|
-
log.debug("off", evt, listener);
|
|
54
|
+
db_js_1.log.debug("off", evt, listener);
|
|
49
55
|
this.emitter.off(evt, listener);
|
|
50
56
|
}
|
|
51
57
|
setUser(username) {
|
|
@@ -115,19 +121,19 @@ export default class Mongo extends Db {
|
|
|
115
121
|
return this.emittingPublishEvents;
|
|
116
122
|
}
|
|
117
123
|
async distinct(collection, field) {
|
|
118
|
-
log.debug('distinct called', collection, field);
|
|
124
|
+
db_js_1.log.debug('distinct called', collection, field);
|
|
119
125
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
120
126
|
return await conn.distinct(field);
|
|
121
127
|
}, false, { operation: "distinct", collection, field });
|
|
122
|
-
log.debug('distinct returns', ret);
|
|
128
|
+
db_js_1.log.debug('distinct returns', ret);
|
|
123
129
|
return ret;
|
|
124
130
|
}
|
|
125
131
|
async count(collection, query = {}, opts = {}) {
|
|
126
|
-
log.debug('distinct called', collection, query, opts);
|
|
132
|
+
db_js_1.log.debug('distinct called', collection, query, opts);
|
|
127
133
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
128
134
|
return await conn.countDocuments(query, opts);
|
|
129
135
|
}, false, { operation: "count", collection, query, opts });
|
|
130
|
-
log.debug('count returns', ret);
|
|
136
|
+
db_js_1.log.debug('count returns', ret);
|
|
131
137
|
return ret;
|
|
132
138
|
}
|
|
133
139
|
async find(collection, query = {}, opts = {}) {
|
|
@@ -137,7 +143,7 @@ export default class Mongo extends Db {
|
|
|
137
143
|
if (!query._deleted)
|
|
138
144
|
query._deleted = { $exists: false };
|
|
139
145
|
}
|
|
140
|
-
log.debug('find called', collection, query, opts);
|
|
146
|
+
db_js_1.log.debug('find called', collection, query, opts);
|
|
141
147
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
142
148
|
let optsIn = {};
|
|
143
149
|
if (opts.readPreference)
|
|
@@ -158,7 +164,7 @@ export default class Mongo extends Db {
|
|
|
158
164
|
let res = await r.toArray();
|
|
159
165
|
return this._processReturnedObject(res);
|
|
160
166
|
}, false, { operation: "find", collection, query, opts });
|
|
161
|
-
log.debug('find returns', ret);
|
|
167
|
+
db_js_1.log.debug('find returns', ret);
|
|
162
168
|
return ret;
|
|
163
169
|
}
|
|
164
170
|
async findAll(collection, query = {}, opts = {}) {
|
|
@@ -183,12 +189,12 @@ export default class Mongo extends Db {
|
|
|
183
189
|
r = r.collation(opts.collation);
|
|
184
190
|
return this._processReturnedObject(await r.toArray());
|
|
185
191
|
}, false, { operation: "findAll", collection, query, opts });
|
|
186
|
-
log.debug('findAll returns', ret);
|
|
192
|
+
db_js_1.log.debug('findAll returns', ret);
|
|
187
193
|
return ret;
|
|
188
194
|
}
|
|
189
195
|
async findNewer(collection, timestamp, query = {}, opts = {}) {
|
|
190
196
|
query = this._createQueryForNewer(timestamp, query);
|
|
191
|
-
log.debug('findNewer called', collection, timestamp, query, opts);
|
|
197
|
+
db_js_1.log.debug('findNewer called', collection, timestamp, query, opts);
|
|
192
198
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
193
199
|
let optsIn = {};
|
|
194
200
|
if (opts.readPreference)
|
|
@@ -210,11 +216,11 @@ export default class Mongo extends Db {
|
|
|
210
216
|
r = r.collation(opts.collation);
|
|
211
217
|
return this._processReturnedObject(await r.toArray());
|
|
212
218
|
}, false, { operation: "findNewer", collection, timestamp, query, opts });
|
|
213
|
-
log.debug('findNewer returns', ret);
|
|
219
|
+
db_js_1.log.debug('findNewer returns', ret);
|
|
214
220
|
return ret;
|
|
215
221
|
}
|
|
216
222
|
async findNewerMany(spec = []) {
|
|
217
|
-
log.debug('findNewerMany called', spec);
|
|
223
|
+
db_js_1.log.debug('findNewerMany called', spec);
|
|
218
224
|
let conn = await this.connect();
|
|
219
225
|
const getOneColl = async (coll) => {
|
|
220
226
|
let query = this._createQueryForNewer(coll.timestamp, coll.query);
|
|
@@ -247,7 +253,7 @@ export default class Mongo extends Db {
|
|
|
247
253
|
_createQueryForNewer(timestamp, query) {
|
|
248
254
|
let ts = (timestamp === 1 || timestamp === "1" || timestamp === "0" || timestamp === 0)
|
|
249
255
|
? {}
|
|
250
|
-
: { _ts: { $gt: Base.timestamp(timestamp) } };
|
|
256
|
+
: { _ts: { $gt: base_js_1.Base.timestamp(timestamp) } };
|
|
251
257
|
query = {
|
|
252
258
|
...ts,
|
|
253
259
|
...(query || {}),
|
|
@@ -257,7 +263,7 @@ export default class Mongo extends Db {
|
|
|
257
263
|
}
|
|
258
264
|
async findAfter(collection, csq, query = {}, opts = {}) {
|
|
259
265
|
query._csq = { $gt: csq };
|
|
260
|
-
log.debug('findAfter called', collection, csq, query, opts);
|
|
266
|
+
db_js_1.log.debug('findAfter called', collection, csq, query, opts);
|
|
261
267
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
262
268
|
let optsIn = {};
|
|
263
269
|
if (opts.readPreference)
|
|
@@ -279,11 +285,11 @@ export default class Mongo extends Db {
|
|
|
279
285
|
r = r.collation(opts.collation);
|
|
280
286
|
return this._processReturnedObject(await r.toArray());
|
|
281
287
|
}, false, { operation: "findNewer", collection, csq, query, opts });
|
|
282
|
-
log.debug('findNewer returns', ret);
|
|
288
|
+
db_js_1.log.debug('findNewer returns', ret);
|
|
283
289
|
return ret;
|
|
284
290
|
}
|
|
285
291
|
async findAfterMany(spec = []) {
|
|
286
|
-
log.debug('findAfterMany called', spec);
|
|
292
|
+
db_js_1.log.debug('findAfterMany called', spec);
|
|
287
293
|
let conn = await this.connect();
|
|
288
294
|
const getOneColl = async (coll) => {
|
|
289
295
|
let r = conn
|
|
@@ -315,10 +321,10 @@ export default class Mongo extends Db {
|
|
|
315
321
|
return out;
|
|
316
322
|
}
|
|
317
323
|
async findNewerFromDate(collection, date, query = {}, opts = {}) {
|
|
318
|
-
let ts = new Timestamp(0, new Date(date).valueOf() / 1000);
|
|
319
|
-
log.debug('findNewerFromDate called', collection, date, query, opts);
|
|
324
|
+
let ts = new mongodb_1.Timestamp(0, new Date(date).valueOf() / 1000);
|
|
325
|
+
db_js_1.log.debug('findNewerFromDate called', collection, date, query, opts);
|
|
320
326
|
let ret = await Mongo.prototype.findNewer.call(this, collection, ts, query, opts); // prevent calling Repo.findNewer
|
|
321
|
-
log.debug('findNewerFromDate returns', ret);
|
|
327
|
+
db_js_1.log.debug('findNewerFromDate returns', ret);
|
|
322
328
|
return ret;
|
|
323
329
|
}
|
|
324
330
|
async findOne(collection, query, projection) {
|
|
@@ -328,9 +334,9 @@ export default class Mongo extends Db {
|
|
|
328
334
|
if (!query._deleted)
|
|
329
335
|
query._deleted = { $exists: false };
|
|
330
336
|
// if (!query._blocked) query._blocked = { $exists: false }; // intentionally - blocked records are returned
|
|
331
|
-
log.debug('findOne called', collection, query, projection);
|
|
337
|
+
db_js_1.log.debug('findOne called', collection, query, projection);
|
|
332
338
|
let ret = await this.executeTransactionally(collection, async (conn) => await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() }), false, { operation: "findOne", collection, query, projection });
|
|
333
|
-
log.debug('findOne returns', ret);
|
|
339
|
+
db_js_1.log.debug('findOne returns', ret);
|
|
334
340
|
return this._processReturnedObject(ret);
|
|
335
341
|
}
|
|
336
342
|
async findById(collection, id, projection) {
|
|
@@ -342,15 +348,15 @@ export default class Mongo extends Db {
|
|
|
342
348
|
_id: Mongo._toId(id),
|
|
343
349
|
// _deleted: { $exists: false }
|
|
344
350
|
};
|
|
345
|
-
log.debug('findById called', this.db, collection, id, projection);
|
|
346
|
-
log.trace('findById executing with query', collection, query, projection);
|
|
351
|
+
db_js_1.log.debug('findById called', this.db, collection, id, projection);
|
|
352
|
+
db_js_1.log.trace('findById executing with query', collection, query, projection);
|
|
347
353
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
348
354
|
let r = await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() });
|
|
349
355
|
return r;
|
|
350
356
|
}, false, { operation: "findById", collection, id, projection });
|
|
351
357
|
if (ret === null || ret === void 0 ? void 0 : ret._deleted)
|
|
352
358
|
ret = null;
|
|
353
|
-
log.debug('findById returns', ret);
|
|
359
|
+
db_js_1.log.debug('findById returns', ret);
|
|
354
360
|
return this._processReturnedObject(ret);
|
|
355
361
|
}
|
|
356
362
|
async updateOne(collection, query, update, options = { returnFullObject: false }) {
|
|
@@ -365,7 +371,7 @@ export default class Mongo extends Db {
|
|
|
365
371
|
...this._sessionOpt()
|
|
366
372
|
};
|
|
367
373
|
update = await this._processUpdateObject(update);
|
|
368
|
-
log.debug('updateOne called', collection, query, update);
|
|
374
|
+
db_js_1.log.debug('updateOne called', collection, query, update);
|
|
369
375
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
370
376
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
371
377
|
update.$set = update.$set || {};
|
|
@@ -380,7 +386,7 @@ export default class Mongo extends Db {
|
|
|
380
386
|
await this._publishAndAudit('update', this.db, collection, resObj);
|
|
381
387
|
return resObj;
|
|
382
388
|
}, !!seqKeys, { operation: "updateOne", collection, query, update, options });
|
|
383
|
-
log.debug('updateOne returns', obj);
|
|
389
|
+
db_js_1.log.debug('updateOne returns', obj);
|
|
384
390
|
return this._processReturnedObject(await obj);
|
|
385
391
|
}
|
|
386
392
|
async save(collection, update, id = undefined, options = { returnFullObject: false }) {
|
|
@@ -394,7 +400,7 @@ export default class Mongo extends Db {
|
|
|
394
400
|
};
|
|
395
401
|
let _id = Mongo.toId(id || update._id) || Mongo.newid();
|
|
396
402
|
update = await this._processUpdateObject(update);
|
|
397
|
-
log.debug('save called', collection, id, update);
|
|
403
|
+
db_js_1.log.debug('save called', collection, id, update);
|
|
398
404
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
399
405
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
400
406
|
update.$set = update.$set || {};
|
|
@@ -409,7 +415,7 @@ export default class Mongo extends Db {
|
|
|
409
415
|
await this._publishAndAudit('update', this.db, collection, resObj);
|
|
410
416
|
return resObj;
|
|
411
417
|
}, !!seqKeys, { operation: "save", collection, _id, update, options });
|
|
412
|
-
log.debug('save returns', obj);
|
|
418
|
+
db_js_1.log.debug('save returns', obj);
|
|
413
419
|
return this._processReturnedObject(await obj);
|
|
414
420
|
}
|
|
415
421
|
async update(collection, query, update) {
|
|
@@ -417,7 +423,7 @@ export default class Mongo extends Db {
|
|
|
417
423
|
assert(query);
|
|
418
424
|
assert(update);
|
|
419
425
|
if (this.syncSupport)
|
|
420
|
-
log.warn("update does not increase _csq, avoit it.");
|
|
426
|
+
db_js_1.log.warn("update does not increase _csq, avoit it.");
|
|
421
427
|
if (!Object.keys(update).length)
|
|
422
428
|
return { n: 0, ok: false };
|
|
423
429
|
query = this.replaceIds(query);
|
|
@@ -428,7 +434,7 @@ export default class Mongo extends Db {
|
|
|
428
434
|
...this._sessionOpt()
|
|
429
435
|
};
|
|
430
436
|
update = await this._processUpdateObject(update);
|
|
431
|
-
log.debug('update called', collection, query, update);
|
|
437
|
+
db_js_1.log.debug('update called', collection, query, update);
|
|
432
438
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
433
439
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
434
440
|
update.$set = update.$set || {};
|
|
@@ -436,7 +442,7 @@ export default class Mongo extends Db {
|
|
|
436
442
|
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
437
443
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
438
444
|
delete update.$set;
|
|
439
|
-
log.debug('update called', collection, query, update);
|
|
445
|
+
db_js_1.log.debug('update called', collection, query, update);
|
|
440
446
|
let res = await conn.updateMany(query, update, opts);
|
|
441
447
|
let resObj = {
|
|
442
448
|
n: res.modifiedCount,
|
|
@@ -445,7 +451,7 @@ export default class Mongo extends Db {
|
|
|
445
451
|
await this._publishAndAudit('updateMany', this.db, collection, resObj);
|
|
446
452
|
return resObj;
|
|
447
453
|
}, !!seqKeys, { operation: "update", collection, query, update });
|
|
448
|
-
log.debug('update returns', obj);
|
|
454
|
+
db_js_1.log.debug('update returns', obj);
|
|
449
455
|
return await obj;
|
|
450
456
|
}
|
|
451
457
|
async upsert(collection, query, update, options = { returnFullObject: false }) {
|
|
@@ -462,10 +468,10 @@ export default class Mongo extends Db {
|
|
|
462
468
|
returnDocument: "after",
|
|
463
469
|
...this._sessionOpt()
|
|
464
470
|
};
|
|
465
|
-
log.debug('upsert called', collection, query, update);
|
|
471
|
+
db_js_1.log.debug('upsert called', collection, query, update);
|
|
466
472
|
update = await this._processUpdateObject(update);
|
|
467
473
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
468
|
-
log.debug('upsert processed', collection, query, update);
|
|
474
|
+
db_js_1.log.debug('upsert processed', collection, query, update);
|
|
469
475
|
if (Object.keys(query).length === 0)
|
|
470
476
|
query._id = Mongo.newid();
|
|
471
477
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
@@ -485,18 +491,18 @@ export default class Mongo extends Db {
|
|
|
485
491
|
;
|
|
486
492
|
return ret;
|
|
487
493
|
}, !!seqKeys, { operation: "upsert", query, update, options });
|
|
488
|
-
log.debug('upsert returns', ret);
|
|
494
|
+
db_js_1.log.debug('upsert returns', ret);
|
|
489
495
|
return this._processReturnedObject(await ret);
|
|
490
496
|
}
|
|
491
497
|
async insert(collection, insert) {
|
|
492
498
|
assert(collection, "collection can't be null");
|
|
493
499
|
assert(insert, "insert can't be null");
|
|
494
500
|
assert(typeof insert === "object", "insert must be an object");
|
|
495
|
-
log.debug('insert called', collection, insert);
|
|
501
|
+
db_js_1.log.debug('insert called', collection, insert);
|
|
496
502
|
insert = this.replaceIds(insert);
|
|
497
503
|
if (this.revisions) {
|
|
498
504
|
insert._rev = 1;
|
|
499
|
-
insert._ts = Base.timestamp();
|
|
505
|
+
insert._ts = base_js_1.Base.timestamp();
|
|
500
506
|
}
|
|
501
507
|
await this._processHashedKeys(insert);
|
|
502
508
|
let seqKeys = this._findSequenceKeys(insert);
|
|
@@ -508,7 +514,7 @@ export default class Mongo extends Db {
|
|
|
508
514
|
await this._publishAndAudit('insert', this.db, collection, fullObj);
|
|
509
515
|
return fullObj;
|
|
510
516
|
}, !!seqKeys, { operation: "insert", collection, insert });
|
|
511
|
-
log.debug('insert returns', ret);
|
|
517
|
+
db_js_1.log.debug('insert returns', ret);
|
|
512
518
|
return this._processReturnedObject(await ret);
|
|
513
519
|
}
|
|
514
520
|
async upsertBatch(collection, batch) {
|
|
@@ -516,7 +522,7 @@ export default class Mongo extends Db {
|
|
|
516
522
|
assert(collection, "collection can't be null");
|
|
517
523
|
assert(batch, "batch can't be null");
|
|
518
524
|
assert(batch instanceof Array, "batch must be an Array");
|
|
519
|
-
log.debug('upsertBatch called', collection, batch);
|
|
525
|
+
db_js_1.log.debug('upsertBatch called', collection, batch);
|
|
520
526
|
batch = this.replaceIds(batch);
|
|
521
527
|
for (let i = 0; i < batch.length; i++)
|
|
522
528
|
await this._processHashedKeys((_a = batch[i]) === null || _a === void 0 ? void 0 : _a.update);
|
|
@@ -556,19 +562,19 @@ export default class Mongo extends Db {
|
|
|
556
562
|
}
|
|
557
563
|
return changes;
|
|
558
564
|
}, false, { operation: "upsertBatch", collection, batch });
|
|
559
|
-
log.debug('upsertBatch returns', ret);
|
|
565
|
+
db_js_1.log.debug('upsertBatch returns', ret);
|
|
560
566
|
return ret;
|
|
561
567
|
}
|
|
562
568
|
async insertMany(collection, insert) {
|
|
563
569
|
assert(collection, "collection can't be null");
|
|
564
570
|
assert(insert, "insert can't be null");
|
|
565
571
|
assert(insert instanceof Array, "insert must be an Array");
|
|
566
|
-
log.debug('insertMany called', collection, insert);
|
|
572
|
+
db_js_1.log.debug('insertMany called', collection, insert);
|
|
567
573
|
insert = this.replaceIds(insert);
|
|
568
574
|
for (let i = 0; i < insert.length; i++)
|
|
569
575
|
await this._processHashedKeys(insert[i]);
|
|
570
576
|
if (this.revisions)
|
|
571
|
-
insert.forEach(ins => { ins._rev = 1; ins._ts = Base.timestamp(); });
|
|
577
|
+
insert.forEach(ins => { ins._rev = 1; ins._ts = base_js_1.Base.timestamp(); });
|
|
572
578
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
573
579
|
await this._processSequenceFieldForMany(client, collection, insert);
|
|
574
580
|
let obj = await conn.insertMany(insert, this._sessionOpt());
|
|
@@ -588,7 +594,7 @@ export default class Mongo extends Db {
|
|
|
588
594
|
}
|
|
589
595
|
return ret;
|
|
590
596
|
}, false, { operation: "insertMany", collection, insert });
|
|
591
|
-
log.debug('insertMany returns', ret);
|
|
597
|
+
db_js_1.log.debug('insertMany returns', ret);
|
|
592
598
|
return ret;
|
|
593
599
|
}
|
|
594
600
|
async deleteOne(collection, query) {
|
|
@@ -600,14 +606,14 @@ export default class Mongo extends Db {
|
|
|
600
606
|
returnDocument: "after",
|
|
601
607
|
...this._sessionOpt()
|
|
602
608
|
};
|
|
603
|
-
log.debug('deleteOne called', collection, query);
|
|
609
|
+
db_js_1.log.debug('deleteOne called', collection, query);
|
|
604
610
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
605
611
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
606
612
|
if (obj.value)
|
|
607
613
|
await this._publishAndAudit('delete', this.db, collection, obj.value);
|
|
608
614
|
return obj.value;
|
|
609
615
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
610
|
-
log.debug('deleteOne returns', ret);
|
|
616
|
+
db_js_1.log.debug('deleteOne returns', ret);
|
|
611
617
|
return ret;
|
|
612
618
|
}
|
|
613
619
|
else {
|
|
@@ -616,7 +622,7 @@ export default class Mongo extends Db {
|
|
|
616
622
|
returnDocument: "after",
|
|
617
623
|
...this._sessionOpt()
|
|
618
624
|
};
|
|
619
|
-
log.debug('deleteOne called', collection, query);
|
|
625
|
+
db_js_1.log.debug('deleteOne called', collection, query);
|
|
620
626
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
621
627
|
let del = {
|
|
622
628
|
$set: { _deleted: new Date() },
|
|
@@ -630,7 +636,7 @@ export default class Mongo extends Db {
|
|
|
630
636
|
await this._publishAndAudit('delete', this.db, collection, obj.value);
|
|
631
637
|
return obj.value;
|
|
632
638
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
633
|
-
log.debug('deleteOne returns', ret);
|
|
639
|
+
db_js_1.log.debug('deleteOne returns', ret);
|
|
634
640
|
return ret;
|
|
635
641
|
}
|
|
636
642
|
}
|
|
@@ -641,7 +647,7 @@ export default class Mongo extends Db {
|
|
|
641
647
|
...this._sessionOpt()
|
|
642
648
|
};
|
|
643
649
|
query = this.replaceIds(query);
|
|
644
|
-
log.debug('blockOne called', collection, query);
|
|
650
|
+
db_js_1.log.debug('blockOne called', collection, query);
|
|
645
651
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
646
652
|
query._blocked = { $exists: 0 };
|
|
647
653
|
let update = {
|
|
@@ -663,7 +669,7 @@ export default class Mongo extends Db {
|
|
|
663
669
|
await this._publishAndAudit('block', this.db, collection, retObj);
|
|
664
670
|
return retObj;
|
|
665
671
|
}, false, { operation: "blockOne", collection, query });
|
|
666
|
-
log.debug('blockOne returns', ret);
|
|
672
|
+
db_js_1.log.debug('blockOne returns', ret);
|
|
667
673
|
return ret;
|
|
668
674
|
}
|
|
669
675
|
async unblockOne(collection, query) {
|
|
@@ -673,7 +679,7 @@ export default class Mongo extends Db {
|
|
|
673
679
|
...this._sessionOpt()
|
|
674
680
|
};
|
|
675
681
|
query = this.replaceIds(query);
|
|
676
|
-
log.debug('unblockOne called', collection, query);
|
|
682
|
+
db_js_1.log.debug('unblockOne called', collection, query);
|
|
677
683
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
678
684
|
query._blocked = { $exists: 1 };
|
|
679
685
|
let update = {
|
|
@@ -694,7 +700,7 @@ export default class Mongo extends Db {
|
|
|
694
700
|
await this._publishAndAudit('unblock', this.db, collection, retObj);
|
|
695
701
|
return retObj;
|
|
696
702
|
}, false, { operation: "unblockOne", collection, query });
|
|
697
|
-
log.debug('unblockOne returns', ret);
|
|
703
|
+
db_js_1.log.debug('unblockOne returns', ret);
|
|
698
704
|
return ret;
|
|
699
705
|
}
|
|
700
706
|
async hardDeleteOne(collection, query) {
|
|
@@ -705,7 +711,7 @@ export default class Mongo extends Db {
|
|
|
705
711
|
returnDocument: "after",
|
|
706
712
|
...this._sessionOpt()
|
|
707
713
|
};
|
|
708
|
-
log.debug('hardDeleteOne called', collection, query);
|
|
714
|
+
db_js_1.log.debug('hardDeleteOne called', collection, query);
|
|
709
715
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
710
716
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
711
717
|
if (obj.value) {
|
|
@@ -713,21 +719,21 @@ export default class Mongo extends Db {
|
|
|
713
719
|
}
|
|
714
720
|
return obj.value;
|
|
715
721
|
}, false, { operation: "hardDeleteOne", collection, query });
|
|
716
|
-
log.debug('hardDeleteOne returns', ret);
|
|
722
|
+
db_js_1.log.debug('hardDeleteOne returns', ret);
|
|
717
723
|
return ret;
|
|
718
724
|
}
|
|
719
725
|
async delete(collection, query) {
|
|
720
726
|
assert(collection);
|
|
721
727
|
assert(query);
|
|
722
728
|
if (this.syncSupport)
|
|
723
|
-
log.warn("delete does not increase _csq, avoit it.");
|
|
729
|
+
db_js_1.log.warn("delete does not increase _csq, avoit it.");
|
|
724
730
|
query = this.replaceIds(query);
|
|
725
731
|
if (!this.softdelete) {
|
|
726
732
|
let opts = {
|
|
727
733
|
returnDocument: "after",
|
|
728
734
|
...this._sessionOpt()
|
|
729
735
|
};
|
|
730
|
-
log.debug('delete called', collection, query);
|
|
736
|
+
db_js_1.log.debug('delete called', collection, query);
|
|
731
737
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
732
738
|
let obj = await conn.deleteMany(query, opts);
|
|
733
739
|
let resObj = {
|
|
@@ -737,7 +743,7 @@ export default class Mongo extends Db {
|
|
|
737
743
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
738
744
|
return resObj;
|
|
739
745
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
740
|
-
log.debug('delete returns', ret);
|
|
746
|
+
db_js_1.log.debug('delete returns', ret);
|
|
741
747
|
return ret;
|
|
742
748
|
}
|
|
743
749
|
else {
|
|
@@ -747,7 +753,7 @@ export default class Mongo extends Db {
|
|
|
747
753
|
...this._sessionOpt()
|
|
748
754
|
};
|
|
749
755
|
let date = new Date();
|
|
750
|
-
log.debug('delete called', collection, query);
|
|
756
|
+
db_js_1.log.debug('delete called', collection, query);
|
|
751
757
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
752
758
|
let obj = await conn.updateMany(query, { $set: { _deleted: date } }, opts);
|
|
753
759
|
let resObj = {
|
|
@@ -757,7 +763,7 @@ export default class Mongo extends Db {
|
|
|
757
763
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
758
764
|
return resObj;
|
|
759
765
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
760
|
-
log.debug('delete returns', ret);
|
|
766
|
+
db_js_1.log.debug('delete returns', ret);
|
|
761
767
|
return ret;
|
|
762
768
|
}
|
|
763
769
|
}
|
|
@@ -765,13 +771,13 @@ export default class Mongo extends Db {
|
|
|
765
771
|
assert(collection);
|
|
766
772
|
assert(query);
|
|
767
773
|
if (this.syncSupport)
|
|
768
|
-
log.warn("hardDelete does not increase _csq, avoit it.");
|
|
774
|
+
db_js_1.log.warn("hardDelete does not increase _csq, avoit it.");
|
|
769
775
|
query = this.replaceIds(query);
|
|
770
776
|
let opts = {
|
|
771
777
|
returnDocument: "after",
|
|
772
778
|
...this._sessionOpt()
|
|
773
779
|
};
|
|
774
|
-
log.debug('hardDelete called', collection, query);
|
|
780
|
+
db_js_1.log.debug('hardDelete called', collection, query);
|
|
775
781
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
776
782
|
let obj = await conn.deleteMany(query, opts);
|
|
777
783
|
let resObj = {
|
|
@@ -781,12 +787,12 @@ export default class Mongo extends Db {
|
|
|
781
787
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
782
788
|
return resObj;
|
|
783
789
|
}, false, { operation: "hardDelete", collection, query, softdelete: this.softdelete });
|
|
784
|
-
log.debug('hardDelete returns', ret);
|
|
790
|
+
db_js_1.log.debug('hardDelete returns', ret);
|
|
785
791
|
return ret;
|
|
786
792
|
}
|
|
787
793
|
async testHash(collection, query, field, unhashedValue) {
|
|
788
794
|
let _field;
|
|
789
|
-
log.debug('teshHash called', collection, query, field, unhashedValue);
|
|
795
|
+
db_js_1.log.debug('teshHash called', collection, query, field, unhashedValue);
|
|
790
796
|
if (typeof field === "object") {
|
|
791
797
|
if (Object.keys(field).length === 1)
|
|
792
798
|
[_field, unhashedValue] = Object.entries(field)[0];
|
|
@@ -800,19 +806,19 @@ export default class Mongo extends Db {
|
|
|
800
806
|
let conn = await this.connect();
|
|
801
807
|
let obj = await conn.db(this.db).collection(collection).findOne(query, { projection: { [_field]: 1 }, ...this._sessionOpt() });
|
|
802
808
|
if (!obj || !obj[_field]) {
|
|
803
|
-
log.debug('teshHash returns false', obj);
|
|
809
|
+
db_js_1.log.debug('teshHash returns false', obj);
|
|
804
810
|
return false;
|
|
805
811
|
}
|
|
806
|
-
let res = await
|
|
807
|
-
log.debug('teshHash returns', res);
|
|
812
|
+
let res = await bcrypt_1.default.compare(unhashedValue, obj[_field].hash);
|
|
813
|
+
db_js_1.log.debug('teshHash returns', res);
|
|
808
814
|
return res;
|
|
809
815
|
}
|
|
810
816
|
async aggregate(collection, pipeline, opts = {
|
|
811
|
-
readPreference: ReadPreference.SECONDARY_PREFERRED,
|
|
817
|
+
readPreference: mongodb_1.ReadPreference.SECONDARY_PREFERRED,
|
|
812
818
|
}) {
|
|
813
819
|
assert(collection);
|
|
814
820
|
assert(pipeline instanceof Array);
|
|
815
|
-
log.debug('aggregate called', collection, pipeline);
|
|
821
|
+
db_js_1.log.debug('aggregate called', collection, pipeline);
|
|
816
822
|
pipeline = this.replaceIds(pipeline);
|
|
817
823
|
if (this.session)
|
|
818
824
|
opts.session = this.session;
|
|
@@ -820,14 +826,14 @@ export default class Mongo extends Db {
|
|
|
820
826
|
let res = await conn.aggregate(pipeline, opts).toArray();
|
|
821
827
|
return res;
|
|
822
828
|
}, false, { operation: "aggregate", collection, pipeline, opts });
|
|
823
|
-
log.debug('aggregare returns', ret);
|
|
829
|
+
db_js_1.log.debug('aggregare returns', ret);
|
|
824
830
|
return ret;
|
|
825
831
|
}
|
|
826
832
|
async isUnique(collection, field, value, id) {
|
|
827
833
|
assert(collection);
|
|
828
834
|
assert(field);
|
|
829
835
|
assert(value);
|
|
830
|
-
log.debug('isUnuqie called', collection, field, value, id);
|
|
836
|
+
db_js_1.log.debug('isUnuqie called', collection, field, value, id);
|
|
831
837
|
let _id = id === null || id === void 0 ? void 0 : id.toString();
|
|
832
838
|
let matches = await this.executeTransactionally(collection, async (conn) => {
|
|
833
839
|
let agg = await conn.find({ [field]: value });
|
|
@@ -841,13 +847,13 @@ export default class Mongo extends Db {
|
|
|
841
847
|
return false;
|
|
842
848
|
}
|
|
843
849
|
}
|
|
844
|
-
log.debug('isUnuqie returns', ret);
|
|
850
|
+
db_js_1.log.debug('isUnuqie returns', ret);
|
|
845
851
|
return ret;
|
|
846
852
|
}
|
|
847
853
|
async collectFieldValues(collection, field, inArray = false, opts) {
|
|
848
854
|
assert(collection);
|
|
849
855
|
assert(field);
|
|
850
|
-
log.debug('collectFieldValues called', collection, field);
|
|
856
|
+
db_js_1.log.debug('collectFieldValues called', collection, field);
|
|
851
857
|
let pipeline = [
|
|
852
858
|
{ $group: { _id: '$' + field } },
|
|
853
859
|
{ $sort: { _id: 1 } }
|
|
@@ -863,32 +869,32 @@ export default class Mongo extends Db {
|
|
|
863
869
|
return res;
|
|
864
870
|
}, false, { operation: "collectFieldValues", collection, field, inArray, pipeline, opts });
|
|
865
871
|
let ret = res === null || res === void 0 ? void 0 : res.map((v) => v._id);
|
|
866
|
-
log.debug('collectFieldValues returns', ret);
|
|
872
|
+
db_js_1.log.debug('collectFieldValues returns', ret);
|
|
867
873
|
return ret;
|
|
868
874
|
}
|
|
869
875
|
async dropCollection(collection) {
|
|
870
876
|
assert(collection);
|
|
871
|
-
log.debug('dropCollection called', this.auditCollections);
|
|
877
|
+
db_js_1.log.debug('dropCollection called', this.auditCollections);
|
|
872
878
|
let client = await this.connect();
|
|
873
879
|
let existing = await client.db(this.db).collections();
|
|
874
880
|
if (existing.map((c) => c.s.name).includes(collection)) {
|
|
875
881
|
await client.db(this.db).dropCollection(collection);
|
|
876
882
|
}
|
|
877
|
-
log.debug('dropCollection returns');
|
|
883
|
+
db_js_1.log.debug('dropCollection returns');
|
|
878
884
|
}
|
|
879
885
|
async resetCollectionSync(collection) {
|
|
880
886
|
assert(collection);
|
|
881
|
-
log.debug('resetCollectionSync called for', collection);
|
|
887
|
+
db_js_1.log.debug('resetCollectionSync called for', collection);
|
|
882
888
|
let client = await this.connect();
|
|
883
889
|
await client.db(this.db)
|
|
884
|
-
.collection(SEQUENCES_COLLECTION)
|
|
890
|
+
.collection(types_js_1.SEQUENCES_COLLECTION)
|
|
885
891
|
.findOneAndDelete({ collection });
|
|
886
|
-
log.debug(`resetCollectionSync for ${collection} returns`);
|
|
892
|
+
db_js_1.log.debug(`resetCollectionSync for ${collection} returns`);
|
|
887
893
|
}
|
|
888
894
|
async dropCollections(collections) {
|
|
889
895
|
assert(collections);
|
|
890
896
|
assert(collections instanceof Array);
|
|
891
|
-
log.debug('dropCollections called', this.auditCollections);
|
|
897
|
+
db_js_1.log.debug('dropCollections called', this.auditCollections);
|
|
892
898
|
let client = await this.connect();
|
|
893
899
|
let existing = await client.db(this.db).collections();
|
|
894
900
|
for await (let collection of collections) {
|
|
@@ -896,12 +902,12 @@ export default class Mongo extends Db {
|
|
|
896
902
|
await client.db(this.db).dropCollection(collection);
|
|
897
903
|
}
|
|
898
904
|
}
|
|
899
|
-
log.debug('dropCollections returns');
|
|
905
|
+
db_js_1.log.debug('dropCollections returns');
|
|
900
906
|
}
|
|
901
907
|
async createCollections(collections) {
|
|
902
908
|
assert(collections);
|
|
903
909
|
assert(collections instanceof Array);
|
|
904
|
-
log.debug('createCollections called', this.auditCollections);
|
|
910
|
+
db_js_1.log.debug('createCollections called', this.auditCollections);
|
|
905
911
|
let client = await this.connect();
|
|
906
912
|
let existing = await this.getCollections();
|
|
907
913
|
for await (let collection of collections) {
|
|
@@ -909,21 +915,21 @@ export default class Mongo extends Db {
|
|
|
909
915
|
await client.db(this.db).createCollection(collection);
|
|
910
916
|
}
|
|
911
917
|
}
|
|
912
|
-
log.debug('createCollections returns');
|
|
918
|
+
db_js_1.log.debug('createCollections returns');
|
|
913
919
|
}
|
|
914
920
|
async createCollection(collection) {
|
|
915
921
|
assert(collection);
|
|
916
|
-
log.debug('createCollection called', collection);
|
|
922
|
+
db_js_1.log.debug('createCollection called', collection);
|
|
917
923
|
let client = await this.connect();
|
|
918
924
|
let existing = await this.getCollections();
|
|
919
925
|
if (!existing.includes(collection)) {
|
|
920
926
|
await client.db(this.db).createCollection(collection);
|
|
921
927
|
}
|
|
922
|
-
log.debug('createCollection returns');
|
|
928
|
+
db_js_1.log.debug('createCollection returns');
|
|
923
929
|
}
|
|
924
930
|
async dbLogPurge(collection, _id) {
|
|
925
931
|
assert(collection);
|
|
926
|
-
log.debug('dblogPurge called', collection, _id);
|
|
932
|
+
db_js_1.log.debug('dblogPurge called', collection, _id);
|
|
927
933
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
928
934
|
let cond = { db: this.db, collection, };
|
|
929
935
|
if (_id !== undefined)
|
|
@@ -938,12 +944,12 @@ export default class Mongo extends Db {
|
|
|
938
944
|
n: ret.deletedCount
|
|
939
945
|
};
|
|
940
946
|
}, false, { operation: "dbLogPurge", collection, _id });
|
|
941
|
-
log.debug('dblogPurge returns', ret);
|
|
947
|
+
db_js_1.log.debug('dblogPurge returns', ret);
|
|
942
948
|
return ret;
|
|
943
949
|
}
|
|
944
950
|
async dbLogGet(collection, _id) {
|
|
945
951
|
assert(collection);
|
|
946
|
-
log.debug('dblogGet called', collection, _id);
|
|
952
|
+
db_js_1.log.debug('dblogGet called', collection, _id);
|
|
947
953
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
948
954
|
let cond = { db: this.db, collection };
|
|
949
955
|
if (_id)
|
|
@@ -957,7 +963,7 @@ export default class Mongo extends Db {
|
|
|
957
963
|
.toArray();
|
|
958
964
|
return ret;
|
|
959
965
|
}, false, { operation: "dbLogGet", collection, _id });
|
|
960
|
-
log.debug('dblogGet returns', ret);
|
|
966
|
+
db_js_1.log.debug('dblogGet returns', ret);
|
|
961
967
|
return ret;
|
|
962
968
|
}
|
|
963
969
|
// HELPER FUNCTIONS
|
|
@@ -975,9 +981,9 @@ export default class Mongo extends Db {
|
|
|
975
981
|
return undefined;
|
|
976
982
|
if (typeof data === "symbol")
|
|
977
983
|
return data.toString();
|
|
978
|
-
if (data instanceof ObjectId)
|
|
984
|
+
if (data instanceof mongodb_1.ObjectId)
|
|
979
985
|
return data;
|
|
980
|
-
if (data instanceof Timestamp)
|
|
986
|
+
if (data instanceof mongodb_1.Timestamp)
|
|
981
987
|
return data;
|
|
982
988
|
if (data instanceof Date)
|
|
983
989
|
return data;
|
|
@@ -988,16 +994,16 @@ export default class Mongo extends Db {
|
|
|
988
994
|
if (data instanceof String)
|
|
989
995
|
return data;
|
|
990
996
|
if (typeof data === "string" && (data === null || data === void 0 ? void 0 : data.match(/^[0-9a-f]{24,24}$/g)))
|
|
991
|
-
return new ObjectId(data);
|
|
997
|
+
return new mongodb_1.ObjectId(data);
|
|
992
998
|
if (typeof data === "string")
|
|
993
999
|
return data;
|
|
994
1000
|
if (data instanceof Array) {
|
|
995
1001
|
return data.map(d => this.replaceIds(d));
|
|
996
1002
|
}
|
|
997
1003
|
if (typeof data == 'object' && (data === null || data === void 0 ? void 0 : data.t) && (data === null || data === void 0 ? void 0 : data.i) !== undefined)
|
|
998
|
-
return Base.timestamp(data);
|
|
1004
|
+
return base_js_1.Base.timestamp(data);
|
|
999
1005
|
if (typeof data == 'object' && (data === null || data === void 0 ? void 0 : data.high) && (data === null || data === void 0 ? void 0 : data.low) !== undefined)
|
|
1000
|
-
return Base.timestamp(data);
|
|
1006
|
+
return base_js_1.Base.timestamp(data);
|
|
1001
1007
|
if (typeof data == 'object') {
|
|
1002
1008
|
for (let key in data) {
|
|
1003
1009
|
data[key] = this.replaceIds(data[key]);
|
|
@@ -1020,14 +1026,14 @@ export default class Mongo extends Db {
|
|
|
1020
1026
|
if (this.session)
|
|
1021
1027
|
try {
|
|
1022
1028
|
await this.session.endSession();
|
|
1023
|
-
log.info("session ended");
|
|
1029
|
+
db_js_1.log.info("session ended");
|
|
1024
1030
|
}
|
|
1025
1031
|
catch (err) {
|
|
1026
|
-
log.error(`Error ending session ${err.message}`);
|
|
1032
|
+
db_js_1.log.error(`Error ending session ${err.message}`);
|
|
1027
1033
|
}
|
|
1028
1034
|
try {
|
|
1029
1035
|
await super.close();
|
|
1030
|
-
log.info("connection closed");
|
|
1036
|
+
db_js_1.log.info("connection closed");
|
|
1031
1037
|
}
|
|
1032
1038
|
catch { /** intentionally */ }
|
|
1033
1039
|
this.session = undefined;
|
|
@@ -1047,13 +1053,13 @@ export default class Mongo extends Db {
|
|
|
1047
1053
|
let hadSession = !!this.session;
|
|
1048
1054
|
if (!this.session) {
|
|
1049
1055
|
this.session = client.startSession();
|
|
1050
|
-
log.info("session started");
|
|
1056
|
+
db_js_1.log.info("session started");
|
|
1051
1057
|
}
|
|
1052
1058
|
let session = this.session;
|
|
1053
1059
|
await session.withTransaction(async () => await funct(client, session));
|
|
1054
1060
|
if (!hadSession) {
|
|
1055
1061
|
session.endSession();
|
|
1056
|
-
log.info("session ended");
|
|
1062
|
+
db_js_1.log.info("session ended");
|
|
1057
1063
|
this.session = undefined;
|
|
1058
1064
|
}
|
|
1059
1065
|
return;
|
|
@@ -1069,24 +1075,24 @@ export default class Mongo extends Db {
|
|
|
1069
1075
|
try {
|
|
1070
1076
|
if (!this.session) {
|
|
1071
1077
|
this.session = client.startSession(TRANSACTION_OPTIONS);
|
|
1072
|
-
log.info("session started");
|
|
1078
|
+
db_js_1.log.info("session started");
|
|
1073
1079
|
}
|
|
1074
1080
|
if (!await this.inTransaction()) {
|
|
1075
1081
|
await this.session.startTransaction();
|
|
1076
|
-
log.info("transaction started");
|
|
1082
|
+
db_js_1.log.info("transaction started");
|
|
1077
1083
|
}
|
|
1078
1084
|
}
|
|
1079
1085
|
catch (err) {
|
|
1080
|
-
log.error('startTransaction error', err);
|
|
1086
|
+
db_js_1.log.error('startTransaction error', err);
|
|
1081
1087
|
try {
|
|
1082
1088
|
if (this.session) {
|
|
1083
1089
|
await this.session.endSession();
|
|
1084
|
-
log.info("session ended");
|
|
1090
|
+
db_js_1.log.info("session ended");
|
|
1085
1091
|
}
|
|
1086
1092
|
this.session = undefined;
|
|
1087
1093
|
}
|
|
1088
1094
|
catch (e) {
|
|
1089
|
-
log.error("startTransaction - error in endSession", e.message || e);
|
|
1095
|
+
db_js_1.log.error("startTransaction - error in endSession", e.message || e);
|
|
1090
1096
|
}
|
|
1091
1097
|
return;
|
|
1092
1098
|
}
|
|
@@ -1099,13 +1105,13 @@ export default class Mongo extends Db {
|
|
|
1099
1105
|
return;
|
|
1100
1106
|
let session = this.session;
|
|
1101
1107
|
await session.commitTransaction();
|
|
1102
|
-
log.info("transaction committed");
|
|
1108
|
+
db_js_1.log.info("transaction committed");
|
|
1103
1109
|
session.endSession();
|
|
1104
1110
|
this.session = undefined;
|
|
1105
|
-
log.info("session ended");
|
|
1111
|
+
db_js_1.log.info("session ended");
|
|
1106
1112
|
}
|
|
1107
1113
|
catch (err) {
|
|
1108
|
-
log.error(`commitTransaction error ${err.message || err}`);
|
|
1114
|
+
db_js_1.log.error(`commitTransaction error ${err.message || err}`);
|
|
1109
1115
|
}
|
|
1110
1116
|
}
|
|
1111
1117
|
async abortTransaction() {
|
|
@@ -1116,13 +1122,13 @@ export default class Mongo extends Db {
|
|
|
1116
1122
|
return;
|
|
1117
1123
|
let session = this.session;
|
|
1118
1124
|
await session.abortTransaction();
|
|
1119
|
-
log.info("transaction aborted");
|
|
1125
|
+
db_js_1.log.info("transaction aborted");
|
|
1120
1126
|
await session.endSession();
|
|
1121
1127
|
this.session = undefined;
|
|
1122
|
-
log.info("session ended");
|
|
1128
|
+
db_js_1.log.info("session ended");
|
|
1123
1129
|
}
|
|
1124
1130
|
catch (err) {
|
|
1125
|
-
log.error(`abortTransaction error ${err.message || err}`);
|
|
1131
|
+
db_js_1.log.error(`abortTransaction error ${err.message || err}`);
|
|
1126
1132
|
}
|
|
1127
1133
|
}
|
|
1128
1134
|
async _try_once(useTransaction, f, collection) {
|
|
@@ -1159,9 +1165,9 @@ export default class Mongo extends Db {
|
|
|
1159
1165
|
return await this._try_once(useTransaction, f, collection);
|
|
1160
1166
|
}
|
|
1161
1167
|
catch (err) {
|
|
1162
|
-
log.error(`Mongo command has failed for ${this.db}.${collection} - ${(this.session ? "ROLLBACK - " : "")} ${err.message || err}`);
|
|
1163
|
-
log.error(debugObject);
|
|
1164
|
-
log.debug(err);
|
|
1168
|
+
db_js_1.log.error(`Mongo command has failed for ${this.db}.${collection} - ${(this.session ? "ROLLBACK - " : "")} ${err.message || err}`);
|
|
1169
|
+
db_js_1.log.error(debugObject);
|
|
1170
|
+
db_js_1.log.debug(err);
|
|
1165
1171
|
let x = (err || "").toString();
|
|
1166
1172
|
console.log('x');
|
|
1167
1173
|
let isRepeatable = x.match(/Topology is closed, please connect/i)
|
|
@@ -1170,21 +1176,21 @@ export default class Mongo extends Db {
|
|
|
1170
1176
|
|| x.match(/Topology closed/);
|
|
1171
1177
|
if (isRepeatable) {
|
|
1172
1178
|
try {
|
|
1173
|
-
log.error("Trying to reopen connection and repeat as");
|
|
1179
|
+
db_js_1.log.error("Trying to reopen connection and repeat as");
|
|
1174
1180
|
await this.close();
|
|
1175
1181
|
// a single retry
|
|
1176
1182
|
await super.connect();
|
|
1177
1183
|
let ret = await this._try_once(useTransaction, f, collection);
|
|
1178
|
-
log.error("OK - Retry succeeded.");
|
|
1179
|
-
log.error("");
|
|
1184
|
+
db_js_1.log.error("OK - Retry succeeded.");
|
|
1185
|
+
db_js_1.log.error("");
|
|
1180
1186
|
return ret;
|
|
1181
1187
|
}
|
|
1182
1188
|
catch (err2) {
|
|
1183
1189
|
/* intentional */
|
|
1184
1190
|
if (debugObject)
|
|
1185
|
-
log.error(debugObject);
|
|
1186
|
-
log.error(`FAIL - Retry failed: ${err2.message || err2}`);
|
|
1187
|
-
log.error("");
|
|
1191
|
+
db_js_1.log.error(debugObject);
|
|
1192
|
+
db_js_1.log.error(`FAIL - Retry failed: ${err2.message || err2}`);
|
|
1193
|
+
db_js_1.log.error("");
|
|
1188
1194
|
}
|
|
1189
1195
|
}
|
|
1190
1196
|
throw err;
|
|
@@ -1208,7 +1214,7 @@ export default class Mongo extends Db {
|
|
|
1208
1214
|
returnDocument: "after",
|
|
1209
1215
|
};
|
|
1210
1216
|
let nextSeq = await (conn.db(this.db)
|
|
1211
|
-
.collection(SEQUENCES_COLLECTION)
|
|
1217
|
+
.collection(types_js_1.SEQUENCES_COLLECTION)
|
|
1212
1218
|
.findOneAndUpdate({ collection }, {
|
|
1213
1219
|
$inc: { seq: 1 },
|
|
1214
1220
|
$currentDate: { last: { $type: "date" }, ts: { $type: "timestamp" } }
|
|
@@ -1304,7 +1310,7 @@ export default class Mongo extends Db {
|
|
|
1304
1310
|
async _publishAndAudit(operation, db, collection, dataToPublish, noEmit) {
|
|
1305
1311
|
if (!dataToPublish._id && !["deleteMany", "updateMany"].includes(operation))
|
|
1306
1312
|
throw new Error(`_publishAndAudit requires _id for ${operation}`);
|
|
1307
|
-
let data =
|
|
1313
|
+
let data = (0, lodash_clonedeep_1.default)(dataToPublish);
|
|
1308
1314
|
if (data._id && /[0-9a-f]{24,24}/i.test(data._id.toString()))
|
|
1309
1315
|
data._id = data._id.toHexString();
|
|
1310
1316
|
let toPublish = undefined;
|
|
@@ -1335,7 +1341,7 @@ export default class Mongo extends Db {
|
|
|
1335
1341
|
return toPublish;
|
|
1336
1342
|
}
|
|
1337
1343
|
emit(what) {
|
|
1338
|
-
log.debug("emitting publish", what);
|
|
1344
|
+
db_js_1.log.debug("emitting publish", what);
|
|
1339
1345
|
this.emitter.emit('publish', what);
|
|
1340
1346
|
}
|
|
1341
1347
|
async _writeAuditRecord(collection, operation, data, user = this.user, audit = this.audit) {
|
|
@@ -1355,9 +1361,9 @@ export default class Mongo extends Db {
|
|
|
1355
1361
|
let auditRecord = {
|
|
1356
1362
|
db: this.db,
|
|
1357
1363
|
collection: collection,
|
|
1358
|
-
entityid: Base.objectid(data._id),
|
|
1364
|
+
entityid: base_js_1.Base.objectid(data._id),
|
|
1359
1365
|
rev: ((previousAuditRecord === null || previousAuditRecord === void 0 ? void 0 : previousAuditRecord.rev) || 0) + 1,
|
|
1360
|
-
ts: Base.timestamp(),
|
|
1366
|
+
ts: base_js_1.Base.timestamp(),
|
|
1361
1367
|
on: new Date(),
|
|
1362
1368
|
operation: operation,
|
|
1363
1369
|
changes: dataNoId,
|
|
@@ -1366,11 +1372,11 @@ export default class Mongo extends Db {
|
|
|
1366
1372
|
auditRecord.user = user;
|
|
1367
1373
|
if (audit)
|
|
1368
1374
|
auditRecord.audit = audit;
|
|
1369
|
-
log.trace('AUDITING', auditRecord);
|
|
1375
|
+
db_js_1.log.trace('AUDITING', auditRecord);
|
|
1370
1376
|
let ret = await client.db(this.db)
|
|
1371
1377
|
.collection(this.auditCollectionName)
|
|
1372
1378
|
.insertOne(auditRecord, this._sessionOpt());
|
|
1373
|
-
log.debug('AUDITED', auditRecord, ret.insertedId);
|
|
1379
|
+
db_js_1.log.debug('AUDITED', auditRecord, ret.insertedId);
|
|
1374
1380
|
}
|
|
1375
1381
|
_sessionOpt() {
|
|
1376
1382
|
return this.session ? { session: this.session } : {};
|
|
@@ -1412,8 +1418,8 @@ export default class Mongo extends Db {
|
|
|
1412
1418
|
for await (let key of Object.keys(update)) {
|
|
1413
1419
|
let shouldBeHashed = /^__hashed_(.+)$/.test(key);
|
|
1414
1420
|
if (shouldBeHashed) {
|
|
1415
|
-
let salt = await
|
|
1416
|
-
let hash = await
|
|
1421
|
+
let salt = await bcrypt_1.default.genSalt(saltRounds);
|
|
1422
|
+
let hash = await bcrypt_1.default.hash(update[key], salt);
|
|
1417
1423
|
update[key] = { salt, hash };
|
|
1418
1424
|
}
|
|
1419
1425
|
}
|
|
@@ -1437,5 +1443,6 @@ export default class Mongo extends Db {
|
|
|
1437
1443
|
return ret;
|
|
1438
1444
|
}
|
|
1439
1445
|
}
|
|
1446
|
+
exports.default = Mongo;
|
|
1440
1447
|
module.exports = Mongo;
|
|
1441
1448
|
//# sourceMappingURL=mongo.js.map
|