cry-db 2.1.33 → 2.1.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/base.d.ts.map +1 -1
- package/dist/base.js +28 -32
- package/dist/base.js.map +1 -1
- package/dist/db.js +9 -14
- package/dist/db.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +6 -39
- package/dist/index.js.map +1 -1
- package/dist/mongo.d.ts +5 -6
- package/dist/mongo.d.ts.map +1 -1
- package/dist/mongo.js +141 -149
- package/dist/mongo.js.map +1 -1
- package/dist/repo.d.ts +8 -8
- package/dist/repo.d.ts.map +1 -1
- package/dist/repo.js +9 -16
- package/dist/repo.js.map +1 -1
- package/dist/types.d.ts +4 -5
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js +1 -4
- package/dist/types.js.map +1 -1
- package/package.json +4 -2
package/dist/mongo.js
CHANGED
|
@@ -1,16 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
};
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const mongodb_1 = require("mongodb");
|
|
9
|
-
const tiny_typed_emitter_1 = require("tiny-typed-emitter");
|
|
10
|
-
const db_js_1 = require("./db.js");
|
|
11
|
-
const types_js_1 = require("./types.js");
|
|
12
|
-
const lodash_clonedeep_1 = __importDefault(require("lodash.clonedeep"));
|
|
13
|
-
const base_js_1 = require("./base.js");
|
|
1
|
+
import * as bcrypt from 'bcrypt';
|
|
2
|
+
import { cloneDeep } from "lodash-es";
|
|
3
|
+
import { ObjectId, ReadConcern, ReadPreference, Timestamp, WriteConcern } from 'mongodb';
|
|
4
|
+
import { TypedEmitter } from "tiny-typed-emitter";
|
|
5
|
+
import { Db, log } from './db.js';
|
|
6
|
+
import { SEQUENCES_COLLECTION } from './types.js';
|
|
7
|
+
import { Base } from "./base.js";
|
|
14
8
|
const assert = (cond, msg) => {
|
|
15
9
|
if (!cond) {
|
|
16
10
|
console.log("assert failed", cond || msg);
|
|
@@ -20,13 +14,13 @@ const assert = (cond, msg) => {
|
|
|
20
14
|
const saltRounds = 10;
|
|
21
15
|
const TRANSACTION_OPTIONS = {
|
|
22
16
|
defaultTransactionOptions: {
|
|
23
|
-
readPreference: new
|
|
24
|
-
readConcern: new
|
|
25
|
-
writeConcern: new
|
|
17
|
+
readPreference: new ReadPreference("primary"),
|
|
18
|
+
readConcern: new ReadConcern("local"),
|
|
19
|
+
writeConcern: new WriteConcern("majority")
|
|
26
20
|
}
|
|
27
21
|
};
|
|
28
|
-
|
|
29
|
-
class Mongo extends
|
|
22
|
+
// export const DummyExportToFixTsCompilation = true;
|
|
23
|
+
export class Mongo extends Db {
|
|
30
24
|
constructor(db, url) {
|
|
31
25
|
super(db, url);
|
|
32
26
|
this.revisions = false;
|
|
@@ -37,21 +31,21 @@ class Mongo extends db_js_1.Db {
|
|
|
37
31
|
this.auditing = false;
|
|
38
32
|
this.auditCollectionName = "dblog";
|
|
39
33
|
this.auditedCollections = this.auditCollections(process.env.AUDIT_COLLECTIONS || []);
|
|
40
|
-
this.emitter = new
|
|
34
|
+
this.emitter = new TypedEmitter();
|
|
41
35
|
this.user = undefined;
|
|
42
36
|
this.audit = undefined;
|
|
43
|
-
|
|
37
|
+
log.debug('new Mongo:', this.url, this.db);
|
|
44
38
|
}
|
|
45
39
|
on(evt, listener) {
|
|
46
|
-
|
|
40
|
+
log.debug("on", evt, listener);
|
|
47
41
|
this.emitter.on(evt, listener);
|
|
48
42
|
}
|
|
49
43
|
off(evt, listener) {
|
|
50
|
-
|
|
44
|
+
log.debug("off", evt, listener);
|
|
51
45
|
this.emitter.off(evt, listener);
|
|
52
46
|
}
|
|
53
47
|
once(evt, listener) {
|
|
54
|
-
|
|
48
|
+
log.debug("off", evt, listener);
|
|
55
49
|
this.emitter.off(evt, listener);
|
|
56
50
|
}
|
|
57
51
|
setUser(username) {
|
|
@@ -121,19 +115,19 @@ class Mongo extends db_js_1.Db {
|
|
|
121
115
|
return this.emittingPublishEvents;
|
|
122
116
|
}
|
|
123
117
|
async distinct(collection, field) {
|
|
124
|
-
|
|
118
|
+
log.debug('distinct called', collection, field);
|
|
125
119
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
126
120
|
return await conn.distinct(field);
|
|
127
121
|
}, false, { operation: "distinct", collection, field });
|
|
128
|
-
|
|
122
|
+
log.debug('distinct returns', ret);
|
|
129
123
|
return ret;
|
|
130
124
|
}
|
|
131
125
|
async count(collection, query = {}, opts = {}) {
|
|
132
|
-
|
|
126
|
+
log.debug('distinct called', collection, query, opts);
|
|
133
127
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
134
128
|
return await conn.countDocuments(query, opts);
|
|
135
129
|
}, false, { operation: "count", collection, query, opts });
|
|
136
|
-
|
|
130
|
+
log.debug('count returns', ret);
|
|
137
131
|
return ret;
|
|
138
132
|
}
|
|
139
133
|
async find(collection, query = {}, opts = {}) {
|
|
@@ -143,7 +137,7 @@ class Mongo extends db_js_1.Db {
|
|
|
143
137
|
if (!query._deleted)
|
|
144
138
|
query._deleted = { $exists: false };
|
|
145
139
|
}
|
|
146
|
-
|
|
140
|
+
log.debug('find called', collection, query, opts);
|
|
147
141
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
148
142
|
let optsIn = {};
|
|
149
143
|
if (opts.readPreference)
|
|
@@ -164,7 +158,7 @@ class Mongo extends db_js_1.Db {
|
|
|
164
158
|
let res = await r.toArray();
|
|
165
159
|
return this._processReturnedObject(res);
|
|
166
160
|
}, false, { operation: "find", collection, query, opts });
|
|
167
|
-
|
|
161
|
+
log.debug('find returns', ret);
|
|
168
162
|
return ret;
|
|
169
163
|
}
|
|
170
164
|
async findAll(collection, query = {}, opts = {}) {
|
|
@@ -189,12 +183,12 @@ class Mongo extends db_js_1.Db {
|
|
|
189
183
|
r = r.collation(opts.collation);
|
|
190
184
|
return this._processReturnedObject(await r.toArray());
|
|
191
185
|
}, false, { operation: "findAll", collection, query, opts });
|
|
192
|
-
|
|
186
|
+
log.debug('findAll returns', ret);
|
|
193
187
|
return ret;
|
|
194
188
|
}
|
|
195
189
|
async findNewer(collection, timestamp, query = {}, opts = {}) {
|
|
196
190
|
query = this._createQueryForNewer(timestamp, query);
|
|
197
|
-
|
|
191
|
+
log.debug('findNewer called', collection, timestamp, query, opts);
|
|
198
192
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
199
193
|
let optsIn = {};
|
|
200
194
|
if (opts.readPreference)
|
|
@@ -216,11 +210,11 @@ class Mongo extends db_js_1.Db {
|
|
|
216
210
|
r = r.collation(opts.collation);
|
|
217
211
|
return this._processReturnedObject(await r.toArray());
|
|
218
212
|
}, false, { operation: "findNewer", collection, timestamp, query, opts });
|
|
219
|
-
|
|
213
|
+
log.debug('findNewer returns', ret);
|
|
220
214
|
return ret;
|
|
221
215
|
}
|
|
222
216
|
async findNewerMany(spec = []) {
|
|
223
|
-
|
|
217
|
+
log.debug('findNewerMany called', spec);
|
|
224
218
|
let conn = await this.connect();
|
|
225
219
|
const getOneColl = async (coll) => {
|
|
226
220
|
let query = this._createQueryForNewer(coll.timestamp, coll.query);
|
|
@@ -253,7 +247,7 @@ class Mongo extends db_js_1.Db {
|
|
|
253
247
|
_createQueryForNewer(timestamp, query) {
|
|
254
248
|
let ts = (timestamp === 1 || timestamp === "1" || timestamp === "0" || timestamp === 0)
|
|
255
249
|
? {}
|
|
256
|
-
: { _ts: { $gt:
|
|
250
|
+
: { _ts: { $gt: Base.timestamp(timestamp) } };
|
|
257
251
|
query = {
|
|
258
252
|
...ts,
|
|
259
253
|
...(query || {}),
|
|
@@ -263,7 +257,7 @@ class Mongo extends db_js_1.Db {
|
|
|
263
257
|
}
|
|
264
258
|
async findAfter(collection, csq, query = {}, opts = {}) {
|
|
265
259
|
query._csq = { $gt: csq };
|
|
266
|
-
|
|
260
|
+
log.debug('findAfter called', collection, csq, query, opts);
|
|
267
261
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
268
262
|
let optsIn = {};
|
|
269
263
|
if (opts.readPreference)
|
|
@@ -285,11 +279,11 @@ class Mongo extends db_js_1.Db {
|
|
|
285
279
|
r = r.collation(opts.collation);
|
|
286
280
|
return this._processReturnedObject(await r.toArray());
|
|
287
281
|
}, false, { operation: "findNewer", collection, csq, query, opts });
|
|
288
|
-
|
|
282
|
+
log.debug('findNewer returns', ret);
|
|
289
283
|
return ret;
|
|
290
284
|
}
|
|
291
285
|
async findAfterMany(spec = []) {
|
|
292
|
-
|
|
286
|
+
log.debug('findAfterMany called', spec);
|
|
293
287
|
let conn = await this.connect();
|
|
294
288
|
const getOneColl = async (coll) => {
|
|
295
289
|
let r = conn
|
|
@@ -321,10 +315,10 @@ class Mongo extends db_js_1.Db {
|
|
|
321
315
|
return out;
|
|
322
316
|
}
|
|
323
317
|
async findNewerFromDate(collection, date, query = {}, opts = {}) {
|
|
324
|
-
let ts = new
|
|
325
|
-
|
|
318
|
+
let ts = new Timestamp(0, new Date(date).valueOf() / 1000);
|
|
319
|
+
log.debug('findNewerFromDate called', collection, date, query, opts);
|
|
326
320
|
let ret = await Mongo.prototype.findNewer.call(this, collection, ts, query, opts); // prevent calling Repo.findNewer
|
|
327
|
-
|
|
321
|
+
log.debug('findNewerFromDate returns', ret);
|
|
328
322
|
return ret;
|
|
329
323
|
}
|
|
330
324
|
async findOne(collection, query, projection) {
|
|
@@ -334,9 +328,9 @@ class Mongo extends db_js_1.Db {
|
|
|
334
328
|
if (!query._deleted)
|
|
335
329
|
query._deleted = { $exists: false };
|
|
336
330
|
// if (!query._blocked) query._blocked = { $exists: false }; // intentionally - blocked records are returned
|
|
337
|
-
|
|
331
|
+
log.debug('findOne called', collection, query, projection);
|
|
338
332
|
let ret = await this.executeTransactionally(collection, async (conn) => await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() }), false, { operation: "findOne", collection, query, projection });
|
|
339
|
-
|
|
333
|
+
log.debug('findOne returns', ret);
|
|
340
334
|
return this._processReturnedObject(ret);
|
|
341
335
|
}
|
|
342
336
|
async findById(collection, id, projection) {
|
|
@@ -348,15 +342,15 @@ class Mongo extends db_js_1.Db {
|
|
|
348
342
|
_id: Mongo._toId(id),
|
|
349
343
|
// _deleted: { $exists: false }
|
|
350
344
|
};
|
|
351
|
-
|
|
352
|
-
|
|
345
|
+
log.debug('findById called', this.db, collection, id, projection);
|
|
346
|
+
log.trace('findById executing with query', collection, query, projection);
|
|
353
347
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
354
348
|
let r = await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() });
|
|
355
349
|
return r;
|
|
356
350
|
}, false, { operation: "findById", collection, id, projection });
|
|
357
351
|
if (ret === null || ret === void 0 ? void 0 : ret._deleted)
|
|
358
352
|
ret = null;
|
|
359
|
-
|
|
353
|
+
log.debug('findById returns', ret);
|
|
360
354
|
return this._processReturnedObject(ret);
|
|
361
355
|
}
|
|
362
356
|
async updateOne(collection, query, update, options = { returnFullObject: false }) {
|
|
@@ -371,7 +365,7 @@ class Mongo extends db_js_1.Db {
|
|
|
371
365
|
...this._sessionOpt()
|
|
372
366
|
};
|
|
373
367
|
update = await this._processUpdateObject(update);
|
|
374
|
-
|
|
368
|
+
log.debug('updateOne called', collection, query, update);
|
|
375
369
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
376
370
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
377
371
|
update.$set = update.$set || {};
|
|
@@ -386,7 +380,7 @@ class Mongo extends db_js_1.Db {
|
|
|
386
380
|
await this._publishAndAudit('update', this.db, collection, resObj);
|
|
387
381
|
return resObj;
|
|
388
382
|
}, !!seqKeys, { operation: "updateOne", collection, query, update, options });
|
|
389
|
-
|
|
383
|
+
log.debug('updateOne returns', obj);
|
|
390
384
|
return this._processReturnedObject(await obj);
|
|
391
385
|
}
|
|
392
386
|
async save(collection, update, id = undefined, options = { returnFullObject: false }) {
|
|
@@ -400,7 +394,7 @@ class Mongo extends db_js_1.Db {
|
|
|
400
394
|
};
|
|
401
395
|
let _id = Mongo.toId(id || update._id) || Mongo.newid();
|
|
402
396
|
update = await this._processUpdateObject(update);
|
|
403
|
-
|
|
397
|
+
log.debug('save called', collection, id, update);
|
|
404
398
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
405
399
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
406
400
|
update.$set = update.$set || {};
|
|
@@ -415,7 +409,7 @@ class Mongo extends db_js_1.Db {
|
|
|
415
409
|
await this._publishAndAudit('update', this.db, collection, resObj);
|
|
416
410
|
return resObj;
|
|
417
411
|
}, !!seqKeys, { operation: "save", collection, _id, update, options });
|
|
418
|
-
|
|
412
|
+
log.debug('save returns', obj);
|
|
419
413
|
return this._processReturnedObject(await obj);
|
|
420
414
|
}
|
|
421
415
|
async update(collection, query, update) {
|
|
@@ -423,7 +417,7 @@ class Mongo extends db_js_1.Db {
|
|
|
423
417
|
assert(query);
|
|
424
418
|
assert(update);
|
|
425
419
|
if (this.syncSupport)
|
|
426
|
-
|
|
420
|
+
log.warn("update does not increase _csq, avoit it.");
|
|
427
421
|
if (!Object.keys(update).length)
|
|
428
422
|
return { n: 0, ok: false };
|
|
429
423
|
query = this.replaceIds(query);
|
|
@@ -434,7 +428,7 @@ class Mongo extends db_js_1.Db {
|
|
|
434
428
|
...this._sessionOpt()
|
|
435
429
|
};
|
|
436
430
|
update = await this._processUpdateObject(update);
|
|
437
|
-
|
|
431
|
+
log.debug('update called', collection, query, update);
|
|
438
432
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
439
433
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
440
434
|
update.$set = update.$set || {};
|
|
@@ -442,7 +436,7 @@ class Mongo extends db_js_1.Db {
|
|
|
442
436
|
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
443
437
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
444
438
|
delete update.$set;
|
|
445
|
-
|
|
439
|
+
log.debug('update called', collection, query, update);
|
|
446
440
|
let res = await conn.updateMany(query, update, opts);
|
|
447
441
|
let resObj = {
|
|
448
442
|
n: res.modifiedCount,
|
|
@@ -451,7 +445,7 @@ class Mongo extends db_js_1.Db {
|
|
|
451
445
|
await this._publishAndAudit('updateMany', this.db, collection, resObj);
|
|
452
446
|
return resObj;
|
|
453
447
|
}, !!seqKeys, { operation: "update", collection, query, update });
|
|
454
|
-
|
|
448
|
+
log.debug('update returns', obj);
|
|
455
449
|
return await obj;
|
|
456
450
|
}
|
|
457
451
|
async upsert(collection, query, update, options = { returnFullObject: false }) {
|
|
@@ -468,10 +462,10 @@ class Mongo extends db_js_1.Db {
|
|
|
468
462
|
returnDocument: "after",
|
|
469
463
|
...this._sessionOpt()
|
|
470
464
|
};
|
|
471
|
-
|
|
465
|
+
log.debug('upsert called', collection, query, update);
|
|
472
466
|
update = await this._processUpdateObject(update);
|
|
473
467
|
let seqKeys = this._findSequenceKeys(update.$set);
|
|
474
|
-
|
|
468
|
+
log.debug('upsert processed', collection, query, update);
|
|
475
469
|
if (Object.keys(query).length === 0)
|
|
476
470
|
query._id = Mongo.newid();
|
|
477
471
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
@@ -491,18 +485,18 @@ class Mongo extends db_js_1.Db {
|
|
|
491
485
|
;
|
|
492
486
|
return ret;
|
|
493
487
|
}, !!seqKeys, { operation: "upsert", query, update, options });
|
|
494
|
-
|
|
488
|
+
log.debug('upsert returns', ret);
|
|
495
489
|
return this._processReturnedObject(await ret);
|
|
496
490
|
}
|
|
497
491
|
async insert(collection, insert) {
|
|
498
492
|
assert(collection, "collection can't be null");
|
|
499
493
|
assert(insert, "insert can't be null");
|
|
500
494
|
assert(typeof insert === "object", "insert must be an object");
|
|
501
|
-
|
|
495
|
+
log.debug('insert called', collection, insert);
|
|
502
496
|
insert = this.replaceIds(insert);
|
|
503
497
|
if (this.revisions) {
|
|
504
498
|
insert._rev = 1;
|
|
505
|
-
insert._ts =
|
|
499
|
+
insert._ts = Base.timestamp();
|
|
506
500
|
}
|
|
507
501
|
await this._processHashedKeys(insert);
|
|
508
502
|
let seqKeys = this._findSequenceKeys(insert);
|
|
@@ -514,7 +508,7 @@ class Mongo extends db_js_1.Db {
|
|
|
514
508
|
await this._publishAndAudit('insert', this.db, collection, fullObj);
|
|
515
509
|
return fullObj;
|
|
516
510
|
}, !!seqKeys, { operation: "insert", collection, insert });
|
|
517
|
-
|
|
511
|
+
log.debug('insert returns', ret);
|
|
518
512
|
return this._processReturnedObject(await ret);
|
|
519
513
|
}
|
|
520
514
|
async upsertBatch(collection, batch) {
|
|
@@ -522,7 +516,7 @@ class Mongo extends db_js_1.Db {
|
|
|
522
516
|
assert(collection, "collection can't be null");
|
|
523
517
|
assert(batch, "batch can't be null");
|
|
524
518
|
assert(batch instanceof Array, "batch must be an Array");
|
|
525
|
-
|
|
519
|
+
log.debug('upsertBatch called', collection, batch);
|
|
526
520
|
batch = this.replaceIds(batch);
|
|
527
521
|
for (let i = 0; i < batch.length; i++)
|
|
528
522
|
await this._processHashedKeys((_a = batch[i]) === null || _a === void 0 ? void 0 : _a.update);
|
|
@@ -562,19 +556,19 @@ class Mongo extends db_js_1.Db {
|
|
|
562
556
|
}
|
|
563
557
|
return changes;
|
|
564
558
|
}, false, { operation: "upsertBatch", collection, batch });
|
|
565
|
-
|
|
559
|
+
log.debug('upsertBatch returns', ret);
|
|
566
560
|
return ret;
|
|
567
561
|
}
|
|
568
562
|
async insertMany(collection, insert) {
|
|
569
563
|
assert(collection, "collection can't be null");
|
|
570
564
|
assert(insert, "insert can't be null");
|
|
571
565
|
assert(insert instanceof Array, "insert must be an Array");
|
|
572
|
-
|
|
566
|
+
log.debug('insertMany called', collection, insert);
|
|
573
567
|
insert = this.replaceIds(insert);
|
|
574
568
|
for (let i = 0; i < insert.length; i++)
|
|
575
569
|
await this._processHashedKeys(insert[i]);
|
|
576
570
|
if (this.revisions)
|
|
577
|
-
insert.forEach(ins => { ins._rev = 1; ins._ts =
|
|
571
|
+
insert.forEach(ins => { ins._rev = 1; ins._ts = Base.timestamp(); });
|
|
578
572
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
579
573
|
await this._processSequenceFieldForMany(client, collection, insert);
|
|
580
574
|
let obj = await conn.insertMany(insert, this._sessionOpt());
|
|
@@ -594,7 +588,7 @@ class Mongo extends db_js_1.Db {
|
|
|
594
588
|
}
|
|
595
589
|
return ret;
|
|
596
590
|
}, false, { operation: "insertMany", collection, insert });
|
|
597
|
-
|
|
591
|
+
log.debug('insertMany returns', ret);
|
|
598
592
|
return ret;
|
|
599
593
|
}
|
|
600
594
|
async deleteOne(collection, query) {
|
|
@@ -606,14 +600,14 @@ class Mongo extends db_js_1.Db {
|
|
|
606
600
|
returnDocument: "after",
|
|
607
601
|
...this._sessionOpt()
|
|
608
602
|
};
|
|
609
|
-
|
|
603
|
+
log.debug('deleteOne called', collection, query);
|
|
610
604
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
611
605
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
612
606
|
if (obj.value)
|
|
613
607
|
await this._publishAndAudit('delete', this.db, collection, obj.value);
|
|
614
608
|
return obj.value;
|
|
615
609
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
616
|
-
|
|
610
|
+
log.debug('deleteOne returns', ret);
|
|
617
611
|
return ret;
|
|
618
612
|
}
|
|
619
613
|
else {
|
|
@@ -622,7 +616,7 @@ class Mongo extends db_js_1.Db {
|
|
|
622
616
|
returnDocument: "after",
|
|
623
617
|
...this._sessionOpt()
|
|
624
618
|
};
|
|
625
|
-
|
|
619
|
+
log.debug('deleteOne called', collection, query);
|
|
626
620
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
627
621
|
let del = {
|
|
628
622
|
$set: { _deleted: new Date() },
|
|
@@ -636,7 +630,7 @@ class Mongo extends db_js_1.Db {
|
|
|
636
630
|
await this._publishAndAudit('delete', this.db, collection, obj.value);
|
|
637
631
|
return obj.value;
|
|
638
632
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
639
|
-
|
|
633
|
+
log.debug('deleteOne returns', ret);
|
|
640
634
|
return ret;
|
|
641
635
|
}
|
|
642
636
|
}
|
|
@@ -647,7 +641,7 @@ class Mongo extends db_js_1.Db {
|
|
|
647
641
|
...this._sessionOpt()
|
|
648
642
|
};
|
|
649
643
|
query = this.replaceIds(query);
|
|
650
|
-
|
|
644
|
+
log.debug('blockOne called', collection, query);
|
|
651
645
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
652
646
|
query._blocked = { $exists: 0 };
|
|
653
647
|
let update = {
|
|
@@ -669,7 +663,7 @@ class Mongo extends db_js_1.Db {
|
|
|
669
663
|
await this._publishAndAudit('block', this.db, collection, retObj);
|
|
670
664
|
return retObj;
|
|
671
665
|
}, false, { operation: "blockOne", collection, query });
|
|
672
|
-
|
|
666
|
+
log.debug('blockOne returns', ret);
|
|
673
667
|
return ret;
|
|
674
668
|
}
|
|
675
669
|
async unblockOne(collection, query) {
|
|
@@ -679,7 +673,7 @@ class Mongo extends db_js_1.Db {
|
|
|
679
673
|
...this._sessionOpt()
|
|
680
674
|
};
|
|
681
675
|
query = this.replaceIds(query);
|
|
682
|
-
|
|
676
|
+
log.debug('unblockOne called', collection, query);
|
|
683
677
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
684
678
|
query._blocked = { $exists: 1 };
|
|
685
679
|
let update = {
|
|
@@ -700,7 +694,7 @@ class Mongo extends db_js_1.Db {
|
|
|
700
694
|
await this._publishAndAudit('unblock', this.db, collection, retObj);
|
|
701
695
|
return retObj;
|
|
702
696
|
}, false, { operation: "unblockOne", collection, query });
|
|
703
|
-
|
|
697
|
+
log.debug('unblockOne returns', ret);
|
|
704
698
|
return ret;
|
|
705
699
|
}
|
|
706
700
|
async hardDeleteOne(collection, query) {
|
|
@@ -711,7 +705,7 @@ class Mongo extends db_js_1.Db {
|
|
|
711
705
|
returnDocument: "after",
|
|
712
706
|
...this._sessionOpt()
|
|
713
707
|
};
|
|
714
|
-
|
|
708
|
+
log.debug('hardDeleteOne called', collection, query);
|
|
715
709
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
716
710
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
717
711
|
if (obj.value) {
|
|
@@ -719,21 +713,21 @@ class Mongo extends db_js_1.Db {
|
|
|
719
713
|
}
|
|
720
714
|
return obj.value;
|
|
721
715
|
}, false, { operation: "hardDeleteOne", collection, query });
|
|
722
|
-
|
|
716
|
+
log.debug('hardDeleteOne returns', ret);
|
|
723
717
|
return ret;
|
|
724
718
|
}
|
|
725
719
|
async delete(collection, query) {
|
|
726
720
|
assert(collection);
|
|
727
721
|
assert(query);
|
|
728
722
|
if (this.syncSupport)
|
|
729
|
-
|
|
723
|
+
log.warn("delete does not increase _csq, avoit it.");
|
|
730
724
|
query = this.replaceIds(query);
|
|
731
725
|
if (!this.softdelete) {
|
|
732
726
|
let opts = {
|
|
733
727
|
returnDocument: "after",
|
|
734
728
|
...this._sessionOpt()
|
|
735
729
|
};
|
|
736
|
-
|
|
730
|
+
log.debug('delete called', collection, query);
|
|
737
731
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
738
732
|
let obj = await conn.deleteMany(query, opts);
|
|
739
733
|
let resObj = {
|
|
@@ -743,7 +737,7 @@ class Mongo extends db_js_1.Db {
|
|
|
743
737
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
744
738
|
return resObj;
|
|
745
739
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
746
|
-
|
|
740
|
+
log.debug('delete returns', ret);
|
|
747
741
|
return ret;
|
|
748
742
|
}
|
|
749
743
|
else {
|
|
@@ -753,7 +747,7 @@ class Mongo extends db_js_1.Db {
|
|
|
753
747
|
...this._sessionOpt()
|
|
754
748
|
};
|
|
755
749
|
let date = new Date();
|
|
756
|
-
|
|
750
|
+
log.debug('delete called', collection, query);
|
|
757
751
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
758
752
|
let obj = await conn.updateMany(query, { $set: { _deleted: date } }, opts);
|
|
759
753
|
let resObj = {
|
|
@@ -763,7 +757,7 @@ class Mongo extends db_js_1.Db {
|
|
|
763
757
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
764
758
|
return resObj;
|
|
765
759
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
766
|
-
|
|
760
|
+
log.debug('delete returns', ret);
|
|
767
761
|
return ret;
|
|
768
762
|
}
|
|
769
763
|
}
|
|
@@ -771,13 +765,13 @@ class Mongo extends db_js_1.Db {
|
|
|
771
765
|
assert(collection);
|
|
772
766
|
assert(query);
|
|
773
767
|
if (this.syncSupport)
|
|
774
|
-
|
|
768
|
+
log.warn("hardDelete does not increase _csq, avoit it.");
|
|
775
769
|
query = this.replaceIds(query);
|
|
776
770
|
let opts = {
|
|
777
771
|
returnDocument: "after",
|
|
778
772
|
...this._sessionOpt()
|
|
779
773
|
};
|
|
780
|
-
|
|
774
|
+
log.debug('hardDelete called', collection, query);
|
|
781
775
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
782
776
|
let obj = await conn.deleteMany(query, opts);
|
|
783
777
|
let resObj = {
|
|
@@ -787,12 +781,12 @@ class Mongo extends db_js_1.Db {
|
|
|
787
781
|
await this._publishAndAudit('deleteMany', this.db, collection, resObj);
|
|
788
782
|
return resObj;
|
|
789
783
|
}, false, { operation: "hardDelete", collection, query, softdelete: this.softdelete });
|
|
790
|
-
|
|
784
|
+
log.debug('hardDelete returns', ret);
|
|
791
785
|
return ret;
|
|
792
786
|
}
|
|
793
787
|
async testHash(collection, query, field, unhashedValue) {
|
|
794
788
|
let _field;
|
|
795
|
-
|
|
789
|
+
log.debug('teshHash called', collection, query, field, unhashedValue);
|
|
796
790
|
if (typeof field === "object") {
|
|
797
791
|
if (Object.keys(field).length === 1)
|
|
798
792
|
[_field, unhashedValue] = Object.entries(field)[0];
|
|
@@ -806,19 +800,19 @@ class Mongo extends db_js_1.Db {
|
|
|
806
800
|
let conn = await this.connect();
|
|
807
801
|
let obj = await conn.db(this.db).collection(collection).findOne(query, { projection: { [_field]: 1 }, ...this._sessionOpt() });
|
|
808
802
|
if (!obj || !obj[_field]) {
|
|
809
|
-
|
|
803
|
+
log.debug('teshHash returns false', obj);
|
|
810
804
|
return false;
|
|
811
805
|
}
|
|
812
|
-
let res = await
|
|
813
|
-
|
|
806
|
+
let res = await bcrypt.compare(unhashedValue, obj[_field].hash);
|
|
807
|
+
log.debug('teshHash returns', res);
|
|
814
808
|
return res;
|
|
815
809
|
}
|
|
816
810
|
async aggregate(collection, pipeline, opts = {
|
|
817
|
-
readPreference:
|
|
811
|
+
readPreference: ReadPreference.SECONDARY_PREFERRED,
|
|
818
812
|
}) {
|
|
819
813
|
assert(collection);
|
|
820
814
|
assert(pipeline instanceof Array);
|
|
821
|
-
|
|
815
|
+
log.debug('aggregate called', collection, pipeline);
|
|
822
816
|
pipeline = this.replaceIds(pipeline);
|
|
823
817
|
if (this.session)
|
|
824
818
|
opts.session = this.session;
|
|
@@ -826,14 +820,14 @@ class Mongo extends db_js_1.Db {
|
|
|
826
820
|
let res = await conn.aggregate(pipeline, opts).toArray();
|
|
827
821
|
return res;
|
|
828
822
|
}, false, { operation: "aggregate", collection, pipeline, opts });
|
|
829
|
-
|
|
823
|
+
log.debug('aggregare returns', ret);
|
|
830
824
|
return ret;
|
|
831
825
|
}
|
|
832
826
|
async isUnique(collection, field, value, id) {
|
|
833
827
|
assert(collection);
|
|
834
828
|
assert(field);
|
|
835
829
|
assert(value);
|
|
836
|
-
|
|
830
|
+
log.debug('isUnuqie called', collection, field, value, id);
|
|
837
831
|
let _id = id === null || id === void 0 ? void 0 : id.toString();
|
|
838
832
|
let matches = await this.executeTransactionally(collection, async (conn) => {
|
|
839
833
|
let agg = await conn.find({ [field]: value });
|
|
@@ -847,13 +841,13 @@ class Mongo extends db_js_1.Db {
|
|
|
847
841
|
return false;
|
|
848
842
|
}
|
|
849
843
|
}
|
|
850
|
-
|
|
844
|
+
log.debug('isUnuqie returns', ret);
|
|
851
845
|
return ret;
|
|
852
846
|
}
|
|
853
847
|
async collectFieldValues(collection, field, inArray = false, opts) {
|
|
854
848
|
assert(collection);
|
|
855
849
|
assert(field);
|
|
856
|
-
|
|
850
|
+
log.debug('collectFieldValues called', collection, field);
|
|
857
851
|
let pipeline = [
|
|
858
852
|
{ $group: { _id: '$' + field } },
|
|
859
853
|
{ $sort: { _id: 1 } }
|
|
@@ -869,32 +863,32 @@ class Mongo extends db_js_1.Db {
|
|
|
869
863
|
return res;
|
|
870
864
|
}, false, { operation: "collectFieldValues", collection, field, inArray, pipeline, opts });
|
|
871
865
|
let ret = res === null || res === void 0 ? void 0 : res.map((v) => v._id);
|
|
872
|
-
|
|
866
|
+
log.debug('collectFieldValues returns', ret);
|
|
873
867
|
return ret;
|
|
874
868
|
}
|
|
875
869
|
async dropCollection(collection) {
|
|
876
870
|
assert(collection);
|
|
877
|
-
|
|
871
|
+
log.debug('dropCollection called', this.auditCollections);
|
|
878
872
|
let client = await this.connect();
|
|
879
873
|
let existing = await client.db(this.db).collections();
|
|
880
874
|
if (existing.map((c) => c.s.name).includes(collection)) {
|
|
881
875
|
await client.db(this.db).dropCollection(collection);
|
|
882
876
|
}
|
|
883
|
-
|
|
877
|
+
log.debug('dropCollection returns');
|
|
884
878
|
}
|
|
885
879
|
async resetCollectionSync(collection) {
|
|
886
880
|
assert(collection);
|
|
887
|
-
|
|
881
|
+
log.debug('resetCollectionSync called for', collection);
|
|
888
882
|
let client = await this.connect();
|
|
889
883
|
await client.db(this.db)
|
|
890
|
-
.collection(
|
|
884
|
+
.collection(SEQUENCES_COLLECTION)
|
|
891
885
|
.findOneAndDelete({ collection });
|
|
892
|
-
|
|
886
|
+
log.debug(`resetCollectionSync for ${collection} returns`);
|
|
893
887
|
}
|
|
894
888
|
async dropCollections(collections) {
|
|
895
889
|
assert(collections);
|
|
896
890
|
assert(collections instanceof Array);
|
|
897
|
-
|
|
891
|
+
log.debug('dropCollections called', this.auditCollections);
|
|
898
892
|
let client = await this.connect();
|
|
899
893
|
let existing = await client.db(this.db).collections();
|
|
900
894
|
for await (let collection of collections) {
|
|
@@ -902,12 +896,12 @@ class Mongo extends db_js_1.Db {
|
|
|
902
896
|
await client.db(this.db).dropCollection(collection);
|
|
903
897
|
}
|
|
904
898
|
}
|
|
905
|
-
|
|
899
|
+
log.debug('dropCollections returns');
|
|
906
900
|
}
|
|
907
901
|
async createCollections(collections) {
|
|
908
902
|
assert(collections);
|
|
909
903
|
assert(collections instanceof Array);
|
|
910
|
-
|
|
904
|
+
log.debug('createCollections called', this.auditCollections);
|
|
911
905
|
let client = await this.connect();
|
|
912
906
|
let existing = await this.getCollections();
|
|
913
907
|
for await (let collection of collections) {
|
|
@@ -915,21 +909,21 @@ class Mongo extends db_js_1.Db {
|
|
|
915
909
|
await client.db(this.db).createCollection(collection);
|
|
916
910
|
}
|
|
917
911
|
}
|
|
918
|
-
|
|
912
|
+
log.debug('createCollections returns');
|
|
919
913
|
}
|
|
920
914
|
async createCollection(collection) {
|
|
921
915
|
assert(collection);
|
|
922
|
-
|
|
916
|
+
log.debug('createCollection called', collection);
|
|
923
917
|
let client = await this.connect();
|
|
924
918
|
let existing = await this.getCollections();
|
|
925
919
|
if (!existing.includes(collection)) {
|
|
926
920
|
await client.db(this.db).createCollection(collection);
|
|
927
921
|
}
|
|
928
|
-
|
|
922
|
+
log.debug('createCollection returns');
|
|
929
923
|
}
|
|
930
924
|
async dbLogPurge(collection, _id) {
|
|
931
925
|
assert(collection);
|
|
932
|
-
|
|
926
|
+
log.debug('dblogPurge called', collection, _id);
|
|
933
927
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
934
928
|
let cond = { db: this.db, collection, };
|
|
935
929
|
if (_id !== undefined)
|
|
@@ -944,12 +938,12 @@ class Mongo extends db_js_1.Db {
|
|
|
944
938
|
n: ret.deletedCount
|
|
945
939
|
};
|
|
946
940
|
}, false, { operation: "dbLogPurge", collection, _id });
|
|
947
|
-
|
|
941
|
+
log.debug('dblogPurge returns', ret);
|
|
948
942
|
return ret;
|
|
949
943
|
}
|
|
950
944
|
async dbLogGet(collection, _id) {
|
|
951
945
|
assert(collection);
|
|
952
|
-
|
|
946
|
+
log.debug('dblogGet called', collection, _id);
|
|
953
947
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
954
948
|
let cond = { db: this.db, collection };
|
|
955
949
|
if (_id)
|
|
@@ -963,7 +957,7 @@ class Mongo extends db_js_1.Db {
|
|
|
963
957
|
.toArray();
|
|
964
958
|
return ret;
|
|
965
959
|
}, false, { operation: "dbLogGet", collection, _id });
|
|
966
|
-
|
|
960
|
+
log.debug('dblogGet returns', ret);
|
|
967
961
|
return ret;
|
|
968
962
|
}
|
|
969
963
|
// HELPER FUNCTIONS
|
|
@@ -981,9 +975,9 @@ class Mongo extends db_js_1.Db {
|
|
|
981
975
|
return undefined;
|
|
982
976
|
if (typeof data === "symbol")
|
|
983
977
|
return data.toString();
|
|
984
|
-
if (data instanceof
|
|
978
|
+
if (data instanceof ObjectId)
|
|
985
979
|
return data;
|
|
986
|
-
if (data instanceof
|
|
980
|
+
if (data instanceof Timestamp)
|
|
987
981
|
return data;
|
|
988
982
|
if (data instanceof Date)
|
|
989
983
|
return data;
|
|
@@ -994,16 +988,16 @@ class Mongo extends db_js_1.Db {
|
|
|
994
988
|
if (data instanceof String)
|
|
995
989
|
return data;
|
|
996
990
|
if (typeof data === "string" && (data === null || data === void 0 ? void 0 : data.match(/^[0-9a-f]{24,24}$/g)))
|
|
997
|
-
return new
|
|
991
|
+
return new ObjectId(data);
|
|
998
992
|
if (typeof data === "string")
|
|
999
993
|
return data;
|
|
1000
994
|
if (data instanceof Array) {
|
|
1001
995
|
return data.map(d => this.replaceIds(d));
|
|
1002
996
|
}
|
|
1003
997
|
if (typeof data == 'object' && (data === null || data === void 0 ? void 0 : data.t) && (data === null || data === void 0 ? void 0 : data.i) !== undefined)
|
|
1004
|
-
return
|
|
998
|
+
return Base.timestamp(data);
|
|
1005
999
|
if (typeof data == 'object' && (data === null || data === void 0 ? void 0 : data.high) && (data === null || data === void 0 ? void 0 : data.low) !== undefined)
|
|
1006
|
-
return
|
|
1000
|
+
return Base.timestamp(data);
|
|
1007
1001
|
if (typeof data == 'object') {
|
|
1008
1002
|
for (let key in data) {
|
|
1009
1003
|
data[key] = this.replaceIds(data[key]);
|
|
@@ -1026,14 +1020,14 @@ class Mongo extends db_js_1.Db {
|
|
|
1026
1020
|
if (this.session)
|
|
1027
1021
|
try {
|
|
1028
1022
|
await this.session.endSession();
|
|
1029
|
-
|
|
1023
|
+
log.info("session ended");
|
|
1030
1024
|
}
|
|
1031
1025
|
catch (err) {
|
|
1032
|
-
|
|
1026
|
+
log.error(`Error ending session ${err.message}`);
|
|
1033
1027
|
}
|
|
1034
1028
|
try {
|
|
1035
1029
|
await super.close();
|
|
1036
|
-
|
|
1030
|
+
log.info("connection closed");
|
|
1037
1031
|
}
|
|
1038
1032
|
catch { /** intentionally */ }
|
|
1039
1033
|
this.session = undefined;
|
|
@@ -1053,13 +1047,13 @@ class Mongo extends db_js_1.Db {
|
|
|
1053
1047
|
let hadSession = !!this.session;
|
|
1054
1048
|
if (!this.session) {
|
|
1055
1049
|
this.session = client.startSession();
|
|
1056
|
-
|
|
1050
|
+
log.info("session started");
|
|
1057
1051
|
}
|
|
1058
1052
|
let session = this.session;
|
|
1059
1053
|
await session.withTransaction(async () => await funct(client, session));
|
|
1060
1054
|
if (!hadSession) {
|
|
1061
1055
|
session.endSession();
|
|
1062
|
-
|
|
1056
|
+
log.info("session ended");
|
|
1063
1057
|
this.session = undefined;
|
|
1064
1058
|
}
|
|
1065
1059
|
return;
|
|
@@ -1075,24 +1069,24 @@ class Mongo extends db_js_1.Db {
|
|
|
1075
1069
|
try {
|
|
1076
1070
|
if (!this.session) {
|
|
1077
1071
|
this.session = client.startSession(TRANSACTION_OPTIONS);
|
|
1078
|
-
|
|
1072
|
+
log.info("session started");
|
|
1079
1073
|
}
|
|
1080
1074
|
if (!await this.inTransaction()) {
|
|
1081
1075
|
await this.session.startTransaction();
|
|
1082
|
-
|
|
1076
|
+
log.info("transaction started");
|
|
1083
1077
|
}
|
|
1084
1078
|
}
|
|
1085
1079
|
catch (err) {
|
|
1086
|
-
|
|
1080
|
+
log.error('startTransaction error', err);
|
|
1087
1081
|
try {
|
|
1088
1082
|
if (this.session) {
|
|
1089
1083
|
await this.session.endSession();
|
|
1090
|
-
|
|
1084
|
+
log.info("session ended");
|
|
1091
1085
|
}
|
|
1092
1086
|
this.session = undefined;
|
|
1093
1087
|
}
|
|
1094
1088
|
catch (e) {
|
|
1095
|
-
|
|
1089
|
+
log.error("startTransaction - error in endSession", e.message || e);
|
|
1096
1090
|
}
|
|
1097
1091
|
return;
|
|
1098
1092
|
}
|
|
@@ -1105,13 +1099,13 @@ class Mongo extends db_js_1.Db {
|
|
|
1105
1099
|
return;
|
|
1106
1100
|
let session = this.session;
|
|
1107
1101
|
await session.commitTransaction();
|
|
1108
|
-
|
|
1102
|
+
log.info("transaction committed");
|
|
1109
1103
|
session.endSession();
|
|
1110
1104
|
this.session = undefined;
|
|
1111
|
-
|
|
1105
|
+
log.info("session ended");
|
|
1112
1106
|
}
|
|
1113
1107
|
catch (err) {
|
|
1114
|
-
|
|
1108
|
+
log.error(`commitTransaction error ${err.message || err}`);
|
|
1115
1109
|
}
|
|
1116
1110
|
}
|
|
1117
1111
|
async abortTransaction() {
|
|
@@ -1122,13 +1116,13 @@ class Mongo extends db_js_1.Db {
|
|
|
1122
1116
|
return;
|
|
1123
1117
|
let session = this.session;
|
|
1124
1118
|
await session.abortTransaction();
|
|
1125
|
-
|
|
1119
|
+
log.info("transaction aborted");
|
|
1126
1120
|
await session.endSession();
|
|
1127
1121
|
this.session = undefined;
|
|
1128
|
-
|
|
1122
|
+
log.info("session ended");
|
|
1129
1123
|
}
|
|
1130
1124
|
catch (err) {
|
|
1131
|
-
|
|
1125
|
+
log.error(`abortTransaction error ${err.message || err}`);
|
|
1132
1126
|
}
|
|
1133
1127
|
}
|
|
1134
1128
|
async _try_once(useTransaction, f, collection) {
|
|
@@ -1165,9 +1159,9 @@ class Mongo extends db_js_1.Db {
|
|
|
1165
1159
|
return await this._try_once(useTransaction, f, collection);
|
|
1166
1160
|
}
|
|
1167
1161
|
catch (err) {
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1162
|
+
log.error(`Mongo command has failed for ${this.db}.${collection} - ${(this.session ? "ROLLBACK - " : "")} ${err.message || err}`);
|
|
1163
|
+
log.error(debugObject);
|
|
1164
|
+
log.debug(err);
|
|
1171
1165
|
let x = (err || "").toString();
|
|
1172
1166
|
console.log('x');
|
|
1173
1167
|
let isRepeatable = x.match(/Topology is closed, please connect/i)
|
|
@@ -1176,21 +1170,21 @@ class Mongo extends db_js_1.Db {
|
|
|
1176
1170
|
|| x.match(/Topology closed/);
|
|
1177
1171
|
if (isRepeatable) {
|
|
1178
1172
|
try {
|
|
1179
|
-
|
|
1173
|
+
log.error("Trying to reopen connection and repeat as");
|
|
1180
1174
|
await this.close();
|
|
1181
1175
|
// a single retry
|
|
1182
1176
|
await super.connect();
|
|
1183
1177
|
let ret = await this._try_once(useTransaction, f, collection);
|
|
1184
|
-
|
|
1185
|
-
|
|
1178
|
+
log.error("OK - Retry succeeded.");
|
|
1179
|
+
log.error("");
|
|
1186
1180
|
return ret;
|
|
1187
1181
|
}
|
|
1188
1182
|
catch (err2) {
|
|
1189
1183
|
/* intentional */
|
|
1190
1184
|
if (debugObject)
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1185
|
+
log.error(debugObject);
|
|
1186
|
+
log.error(`FAIL - Retry failed: ${err2.message || err2}`);
|
|
1187
|
+
log.error("");
|
|
1194
1188
|
}
|
|
1195
1189
|
}
|
|
1196
1190
|
throw err;
|
|
@@ -1214,7 +1208,7 @@ class Mongo extends db_js_1.Db {
|
|
|
1214
1208
|
returnDocument: "after",
|
|
1215
1209
|
};
|
|
1216
1210
|
let nextSeq = await (conn.db(this.db)
|
|
1217
|
-
.collection(
|
|
1211
|
+
.collection(SEQUENCES_COLLECTION)
|
|
1218
1212
|
.findOneAndUpdate({ collection }, {
|
|
1219
1213
|
$inc: { seq: 1 },
|
|
1220
1214
|
$currentDate: { last: { $type: "date" }, ts: { $type: "timestamp" } }
|
|
@@ -1310,7 +1304,7 @@ class Mongo extends db_js_1.Db {
|
|
|
1310
1304
|
async _publishAndAudit(operation, db, collection, dataToPublish, noEmit) {
|
|
1311
1305
|
if (!dataToPublish._id && !["deleteMany", "updateMany"].includes(operation))
|
|
1312
1306
|
throw new Error(`_publishAndAudit requires _id for ${operation}`);
|
|
1313
|
-
let data = (
|
|
1307
|
+
let data = cloneDeep(dataToPublish);
|
|
1314
1308
|
if (data._id && /[0-9a-f]{24,24}/i.test(data._id.toString()))
|
|
1315
1309
|
data._id = data._id.toHexString();
|
|
1316
1310
|
let toPublish = undefined;
|
|
@@ -1341,7 +1335,7 @@ class Mongo extends db_js_1.Db {
|
|
|
1341
1335
|
return toPublish;
|
|
1342
1336
|
}
|
|
1343
1337
|
emit(what) {
|
|
1344
|
-
|
|
1338
|
+
log.debug("emitting publish", what);
|
|
1345
1339
|
this.emitter.emit('publish', what);
|
|
1346
1340
|
}
|
|
1347
1341
|
async _writeAuditRecord(collection, operation, data, user = this.user, audit = this.audit) {
|
|
@@ -1361,9 +1355,9 @@ class Mongo extends db_js_1.Db {
|
|
|
1361
1355
|
let auditRecord = {
|
|
1362
1356
|
db: this.db,
|
|
1363
1357
|
collection: collection,
|
|
1364
|
-
entityid:
|
|
1358
|
+
entityid: Base.objectid(data._id),
|
|
1365
1359
|
rev: ((previousAuditRecord === null || previousAuditRecord === void 0 ? void 0 : previousAuditRecord.rev) || 0) + 1,
|
|
1366
|
-
ts:
|
|
1360
|
+
ts: Base.timestamp(),
|
|
1367
1361
|
on: new Date(),
|
|
1368
1362
|
operation: operation,
|
|
1369
1363
|
changes: dataNoId,
|
|
@@ -1372,11 +1366,11 @@ class Mongo extends db_js_1.Db {
|
|
|
1372
1366
|
auditRecord.user = user;
|
|
1373
1367
|
if (audit)
|
|
1374
1368
|
auditRecord.audit = audit;
|
|
1375
|
-
|
|
1369
|
+
log.trace('AUDITING', auditRecord);
|
|
1376
1370
|
let ret = await client.db(this.db)
|
|
1377
1371
|
.collection(this.auditCollectionName)
|
|
1378
1372
|
.insertOne(auditRecord, this._sessionOpt());
|
|
1379
|
-
|
|
1373
|
+
log.debug('AUDITED', auditRecord, ret.insertedId);
|
|
1380
1374
|
}
|
|
1381
1375
|
_sessionOpt() {
|
|
1382
1376
|
return this.session ? { session: this.session } : {};
|
|
@@ -1418,8 +1412,8 @@ class Mongo extends db_js_1.Db {
|
|
|
1418
1412
|
for await (let key of Object.keys(update)) {
|
|
1419
1413
|
let shouldBeHashed = /^__hashed_(.+)$/.test(key);
|
|
1420
1414
|
if (shouldBeHashed) {
|
|
1421
|
-
let salt = await
|
|
1422
|
-
let hash = await
|
|
1415
|
+
let salt = await bcrypt.genSalt(saltRounds);
|
|
1416
|
+
let hash = await bcrypt.hash(update[key], salt);
|
|
1423
1417
|
update[key] = { salt, hash };
|
|
1424
1418
|
}
|
|
1425
1419
|
}
|
|
@@ -1443,6 +1437,4 @@ class Mongo extends db_js_1.Db {
|
|
|
1443
1437
|
return ret;
|
|
1444
1438
|
}
|
|
1445
1439
|
}
|
|
1446
|
-
exports.default = Mongo;
|
|
1447
|
-
module.exports = Mongo;
|
|
1448
1440
|
//# sourceMappingURL=mongo.js.map
|