cry-db 2.3.4 → 2.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/base.d.mts +1 -1
- package/dist/base.d.mts.map +1 -1
- package/dist/base.mjs +6 -4
- package/dist/base.mjs.map +1 -1
- package/dist/db.mjs.map +1 -1
- package/dist/mongo.d.mts +3 -1
- package/dist/mongo.d.mts.map +1 -1
- package/dist/mongo.mjs +128 -140
- package/dist/mongo.mjs.map +1 -1
- package/dist/repo.d.mts.map +1 -1
- package/dist/repo.mjs +1 -1
- package/dist/repo.mjs.map +1 -1
- package/dist/types.d.mts +1 -0
- package/dist/types.d.mts.map +1 -1
- package/dist/types.mjs.map +1 -1
- package/package.json +2 -2
package/dist/mongo.mjs
CHANGED
|
@@ -152,24 +152,8 @@ export class Mongo extends Db {
|
|
|
152
152
|
}
|
|
153
153
|
log.debug('find called', collection, query, opts);
|
|
154
154
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
155
|
-
let
|
|
156
|
-
|
|
157
|
-
optsIn.readPreference = opts.readPreference;
|
|
158
|
-
if (this.session)
|
|
159
|
-
optsIn.session = this.session;
|
|
160
|
-
let r = conn.find(query, optsIn);
|
|
161
|
-
if (opts.project)
|
|
162
|
-
r = r.project(opts.project);
|
|
163
|
-
if (opts.sort)
|
|
164
|
-
r = r.sort(opts.sort);
|
|
165
|
-
if (opts.skip)
|
|
166
|
-
r = r.skip(opts.skip);
|
|
167
|
-
if (opts.limit)
|
|
168
|
-
r = r.limit(opts.limit);
|
|
169
|
-
if (opts.collation)
|
|
170
|
-
r = r.collation(opts.collation);
|
|
171
|
-
let res = await r.toArray();
|
|
172
|
-
return this._processReturnedObject(res);
|
|
155
|
+
let r = this._applyQueryOpts(conn.find(query, this._buildFindOptions(opts)), opts);
|
|
156
|
+
return this._processReturnedObject(await r.toArray());
|
|
173
157
|
}, false, { operation: "find", collection, query, opts });
|
|
174
158
|
log.debug('find returns', ret);
|
|
175
159
|
return ret;
|
|
@@ -178,22 +162,7 @@ export class Mongo extends Db {
|
|
|
178
162
|
assert(collection);
|
|
179
163
|
query = this.replaceIds(query);
|
|
180
164
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
181
|
-
let
|
|
182
|
-
if (opts.readPreference)
|
|
183
|
-
optsIn.readPreference = opts.readPreference;
|
|
184
|
-
if (this.session)
|
|
185
|
-
optsIn.session = this.session;
|
|
186
|
-
let r = conn.find(query, optsIn);
|
|
187
|
-
if (opts.project)
|
|
188
|
-
r = r.project(opts.project);
|
|
189
|
-
if (opts.sort)
|
|
190
|
-
r = r.sort(opts.sort);
|
|
191
|
-
if (opts.skip)
|
|
192
|
-
r = r.skip(opts.skip);
|
|
193
|
-
if (opts.limit)
|
|
194
|
-
r = r.limit(opts.limit);
|
|
195
|
-
if (opts.collation)
|
|
196
|
-
r = r.collation(opts.collation);
|
|
165
|
+
let r = this._applyQueryOpts(conn.find(query, this._buildFindOptions(opts)), opts);
|
|
197
166
|
return this._processReturnedObject(await r.toArray());
|
|
198
167
|
}, false, { operation: "findAll", collection, query, opts });
|
|
199
168
|
log.debug('findAll returns', ret);
|
|
@@ -203,24 +172,7 @@ export class Mongo extends Db {
|
|
|
203
172
|
query = this._createQueryForNewer(timestamp, query);
|
|
204
173
|
log.debug('findNewer called', collection, timestamp, query, opts);
|
|
205
174
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
206
|
-
let
|
|
207
|
-
if (opts.readPreference)
|
|
208
|
-
optsIn.readPreference = opts.readPreference;
|
|
209
|
-
if (this.session)
|
|
210
|
-
optsIn.session = this.session;
|
|
211
|
-
let r = conn
|
|
212
|
-
.find(query, optsIn)
|
|
213
|
-
.sort({ _ts: 1 });
|
|
214
|
-
if (opts.project)
|
|
215
|
-
r = r.project(opts.project);
|
|
216
|
-
if (opts.sort)
|
|
217
|
-
r = r.sort(opts.sort);
|
|
218
|
-
if (opts.skip)
|
|
219
|
-
r = r.skip(opts.skip);
|
|
220
|
-
if (opts.limit)
|
|
221
|
-
r = r.limit(opts.limit);
|
|
222
|
-
if (opts.collation)
|
|
223
|
-
r = r.collation(opts.collation);
|
|
175
|
+
let r = this._applyQueryOpts(conn.find(query, this._buildFindOptions(opts)).sort({ _ts: 1 }), opts);
|
|
224
176
|
return this._processReturnedObject(await r.toArray());
|
|
225
177
|
}, false, { operation: "findNewer", collection, timestamp, query, opts });
|
|
226
178
|
log.debug('findNewer returns', ret);
|
|
@@ -229,30 +181,15 @@ export class Mongo extends Db {
|
|
|
229
181
|
async findNewerMany(spec = []) {
|
|
230
182
|
var _a;
|
|
231
183
|
log.debug('findNewerMany called', spec);
|
|
184
|
+
const dbName = this.db; // Capture db at operation start
|
|
232
185
|
let conn = await this.connect();
|
|
233
186
|
const getOneColl = async (coll) => {
|
|
234
187
|
let query = this._createQueryForNewer(coll.timestamp, coll.query || {});
|
|
235
188
|
if (process.env.MONGO_DEBUG_FINDNEWERMANY) {
|
|
236
189
|
log.debug("findNewerMany <-", coll.collection, coll.timestamp, coll.query, " -> ", JSON.stringify(query));
|
|
237
190
|
}
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
.collection(coll.collection)
|
|
241
|
-
.find(query, {
|
|
242
|
-
// readPreference: ReadPreference.SECONDARY_PREFERRED
|
|
243
|
-
})
|
|
244
|
-
.sort({ _ts: 1 });
|
|
245
|
-
let opts = coll.opts || {};
|
|
246
|
-
if (opts.project)
|
|
247
|
-
r = r.project(opts.project);
|
|
248
|
-
if (opts.sort)
|
|
249
|
-
r = r.sort(opts.sort);
|
|
250
|
-
if (opts.skip)
|
|
251
|
-
r = r.skip(opts.skip);
|
|
252
|
-
if (opts.limit)
|
|
253
|
-
r = r.limit(opts.limit);
|
|
254
|
-
if (opts.collation)
|
|
255
|
-
r = r.collation(opts.collation);
|
|
191
|
+
const opts = coll.opts || {};
|
|
192
|
+
let r = this._applyQueryOpts(conn.db(dbName).collection(coll.collection).find(query, {}).sort({ _ts: 1 }), opts);
|
|
256
193
|
let data = await r.toArray();
|
|
257
194
|
if (process.env.MONGO_DEBUG_FINDNEWERMANY) {
|
|
258
195
|
log.debug("findNewerMany ->", coll.collection, JSON.stringify(data, null, 2));
|
|
@@ -281,11 +218,12 @@ export class Mongo extends Db {
|
|
|
281
218
|
assert(collections);
|
|
282
219
|
assert(collections instanceof Array);
|
|
283
220
|
log.debug('latestTimestamps called', collections);
|
|
221
|
+
const dbName = this.db; // Capture db at operation start
|
|
284
222
|
let conn = await this.connect();
|
|
285
223
|
const getOne = async (collection) => {
|
|
286
224
|
var _a;
|
|
287
225
|
let cursor = conn
|
|
288
|
-
.db(
|
|
226
|
+
.db(dbName)
|
|
289
227
|
.collection(collection)
|
|
290
228
|
.find({}, this._sessionOpt())
|
|
291
229
|
.project({ _ts: 1 })
|
|
@@ -308,9 +246,10 @@ export class Mongo extends Db {
|
|
|
308
246
|
var _a;
|
|
309
247
|
assert(collection);
|
|
310
248
|
log.debug('latestTimestamp called', collection);
|
|
249
|
+
const dbName = this.db; // Capture db at operation start
|
|
311
250
|
let conn = await this.connect();
|
|
312
251
|
let cursor = conn
|
|
313
|
-
.db(
|
|
252
|
+
.db(dbName)
|
|
314
253
|
.collection(collection)
|
|
315
254
|
.find({}, this._sessionOpt())
|
|
316
255
|
.project({ _ts: 1 })
|
|
@@ -405,11 +344,12 @@ export class Mongo extends Db {
|
|
|
405
344
|
assert(id);
|
|
406
345
|
if (!id)
|
|
407
346
|
return null;
|
|
347
|
+
const dbName = this.db; // Capture db at operation start
|
|
408
348
|
let query = {
|
|
409
349
|
_id: Mongo._toId(id),
|
|
410
350
|
// _deleted: { $exists: false }
|
|
411
351
|
};
|
|
412
|
-
log.debug('findById called',
|
|
352
|
+
log.debug('findById called', dbName, collection, id, projection);
|
|
413
353
|
log.trace('findById executing with query', collection, query, projection);
|
|
414
354
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
415
355
|
let r = await conn.findOne(query, { ...(projection ? { projection } : {}), ...this._sessionOpt() });
|
|
@@ -424,6 +364,7 @@ export class Mongo extends Db {
|
|
|
424
364
|
assert(collection);
|
|
425
365
|
assert(query);
|
|
426
366
|
assert(update);
|
|
367
|
+
const dbName = this.db; // Capture db at operation start
|
|
427
368
|
query = this.replaceIds(query);
|
|
428
369
|
update = this.replaceIds(update);
|
|
429
370
|
let opts = {
|
|
@@ -437,14 +378,14 @@ export class Mongo extends Db {
|
|
|
437
378
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
438
379
|
update.$set = update.$set || {};
|
|
439
380
|
if (seqKeys)
|
|
440
|
-
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
381
|
+
await this._processSequenceField(client, dbName, collection, update.$set, seqKeys);
|
|
441
382
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
442
383
|
delete update.$set;
|
|
443
384
|
let res = await conn.findOneAndUpdate(query, update, opts);
|
|
444
385
|
if (!res)
|
|
445
386
|
return null;
|
|
446
387
|
let resObj = this._removeUnchanged(res, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
447
|
-
await this._publishAndAudit('update',
|
|
388
|
+
await this._publishAndAudit('update', dbName, collection, resObj);
|
|
448
389
|
return resObj;
|
|
449
390
|
}, !!seqKeys, { operation: "updateOne", collection, query, update, options });
|
|
450
391
|
log.debug('updateOne returns', obj);
|
|
@@ -453,6 +394,7 @@ export class Mongo extends Db {
|
|
|
453
394
|
async save(collection, update, id = undefined, options = { returnFullObject: false }) {
|
|
454
395
|
assert(collection);
|
|
455
396
|
assert(update);
|
|
397
|
+
const dbName = this.db; // Capture db at operation start
|
|
456
398
|
update = this.replaceIds(update);
|
|
457
399
|
let opts = {
|
|
458
400
|
upsert: true,
|
|
@@ -466,14 +408,14 @@ export class Mongo extends Db {
|
|
|
466
408
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
467
409
|
update.$set = update.$set || {};
|
|
468
410
|
if (seqKeys)
|
|
469
|
-
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
411
|
+
await this._processSequenceField(client, dbName, collection, update.$set, seqKeys);
|
|
470
412
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
471
413
|
delete update.$set;
|
|
472
414
|
let res = await conn.findOneAndUpdate({ _id }, update, opts);
|
|
473
415
|
if (!res)
|
|
474
416
|
return null;
|
|
475
417
|
let resObj = this._removeUnchanged(res, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
476
|
-
await this._publishAndAudit('update',
|
|
418
|
+
await this._publishAndAudit('update', dbName, collection, resObj);
|
|
477
419
|
return resObj;
|
|
478
420
|
}, !!seqKeys, { operation: "save", collection, _id, update, options });
|
|
479
421
|
log.debug('save returns', obj);
|
|
@@ -486,6 +428,7 @@ export class Mongo extends Db {
|
|
|
486
428
|
// if (this.syncSupport) log.warn("update does not increase _csq, avoit it.")
|
|
487
429
|
if (!Object.keys(update).length)
|
|
488
430
|
return { n: 0, ok: false };
|
|
431
|
+
const dbName = this.db; // Capture db at operation start
|
|
489
432
|
query = this.replaceIds(query);
|
|
490
433
|
update = this.replaceIds(update);
|
|
491
434
|
let opts = {
|
|
@@ -499,7 +442,7 @@ export class Mongo extends Db {
|
|
|
499
442
|
let obj = await this.executeTransactionally(collection, async (conn, client) => {
|
|
500
443
|
update.$set = update.$set || {};
|
|
501
444
|
if (seqKeys)
|
|
502
|
-
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
445
|
+
await this._processSequenceField(client, dbName, collection, update.$set, seqKeys);
|
|
503
446
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
504
447
|
delete update.$set;
|
|
505
448
|
log.debug('update called', collection, query, update);
|
|
@@ -508,7 +451,7 @@ export class Mongo extends Db {
|
|
|
508
451
|
n: res.modifiedCount,
|
|
509
452
|
ok: !!res.acknowledged
|
|
510
453
|
};
|
|
511
|
-
await this._publishAndAudit('updateMany',
|
|
454
|
+
await this._publishAndAudit('updateMany', dbName, collection, resObj);
|
|
512
455
|
return resObj;
|
|
513
456
|
}, !!seqKeys, { operation: "update", collection, query, update });
|
|
514
457
|
log.debug('update returns', obj);
|
|
@@ -520,6 +463,7 @@ export class Mongo extends Db {
|
|
|
520
463
|
assert(update);
|
|
521
464
|
assert(typeof update === 'object', 'update must be an object');
|
|
522
465
|
// if (!Object.keys(update).length) return null;
|
|
466
|
+
const dbName = this.db; // Capture db at operation start
|
|
523
467
|
query = this.replaceIds(query);
|
|
524
468
|
update = this.replaceIds(update);
|
|
525
469
|
let opts = {
|
|
@@ -537,15 +481,18 @@ export class Mongo extends Db {
|
|
|
537
481
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
538
482
|
update.$set = update.$set || {};
|
|
539
483
|
if (seqKeys)
|
|
540
|
-
await this._processSequenceField(client, collection, update.$set, seqKeys);
|
|
484
|
+
await this._processSequenceField(client, dbName, collection, update.$set, seqKeys);
|
|
541
485
|
if (update.$set === undefined || Object.keys(update.$set).length === 0)
|
|
542
486
|
delete update.$set;
|
|
543
487
|
let ret = await conn.findOneAndUpdate(query, update, opts);
|
|
544
488
|
if (ret) {
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
489
|
+
// Detect if this was an insert or update by checking _rev
|
|
490
|
+
const isInsert = this.revisions && ret._rev === 1;
|
|
491
|
+
let oper = isInsert ? "insert" : "update";
|
|
492
|
+
// For inserts, use full record to capture query fields; for updates, only changed fields
|
|
493
|
+
let retObj = isInsert ? ret : this._removeUnchanged(ret, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
494
|
+
await this._publishAndAudit(oper, dbName, collection, retObj);
|
|
495
|
+
return this._removeUnchanged(ret, update, !!(options === null || options === void 0 ? void 0 : options.returnFullObject));
|
|
549
496
|
}
|
|
550
497
|
;
|
|
551
498
|
return ret;
|
|
@@ -558,6 +505,7 @@ export class Mongo extends Db {
|
|
|
558
505
|
assert(insert, "insert can't be null");
|
|
559
506
|
assert(typeof insert === "object", "insert must be an object");
|
|
560
507
|
log.debug('insert called', collection, insert);
|
|
508
|
+
const dbName = this.db; // Capture db at operation start
|
|
561
509
|
insert = this.replaceIds(insert);
|
|
562
510
|
if (this.revisions) {
|
|
563
511
|
insert._rev = 1;
|
|
@@ -567,10 +515,10 @@ export class Mongo extends Db {
|
|
|
567
515
|
let seqKeys = this._findSequenceKeys(insert);
|
|
568
516
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
569
517
|
if (insert)
|
|
570
|
-
insert = await this._processSequenceField(client, collection, insert, seqKeys);
|
|
518
|
+
insert = await this._processSequenceField(client, dbName, collection, insert, seqKeys);
|
|
571
519
|
let obj = await conn.insertOne(insert, this._sessionOpt());
|
|
572
520
|
let fullObj = { _id: obj.insertedId, ...insert };
|
|
573
|
-
await this._publishAndAudit('insert',
|
|
521
|
+
await this._publishAndAudit('insert', dbName, collection, fullObj);
|
|
574
522
|
return fullObj;
|
|
575
523
|
}, !!seqKeys, { operation: "insert", collection, insert });
|
|
576
524
|
log.debug('insert returns', ret);
|
|
@@ -582,12 +530,13 @@ export class Mongo extends Db {
|
|
|
582
530
|
assert(batch, "batch can't be null");
|
|
583
531
|
assert(batch instanceof Array, "batch must be an Array");
|
|
584
532
|
log.debug('upsertBatch called', collection, batch);
|
|
533
|
+
const dbName = this.db; // Capture db at operation start
|
|
585
534
|
batch = this.replaceIds(batch);
|
|
586
535
|
for (let i = 0; i < batch.length; i++)
|
|
587
536
|
await this._processHashedKeys((_a = batch[i]) === null || _a === void 0 ? void 0 : _a.update);
|
|
588
537
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
589
538
|
var _a;
|
|
590
|
-
await this._processSequenceFieldForMany(client, collection, batch.map(b => b.update));
|
|
539
|
+
await this._processSequenceFieldForMany(client, dbName, collection, batch.map(b => b.update));
|
|
591
540
|
let batchData = [];
|
|
592
541
|
let changes = [];
|
|
593
542
|
for await (let part of batch) {
|
|
@@ -631,10 +580,10 @@ export class Mongo extends Db {
|
|
|
631
580
|
}
|
|
632
581
|
if (this.emittingPublishEvents || this.auditing) {
|
|
633
582
|
await this.emit("publish", {
|
|
634
|
-
channel: `db/${
|
|
583
|
+
channel: `db/${dbName}/${collection}`,
|
|
635
584
|
payload: {
|
|
636
585
|
operation: "batch",
|
|
637
|
-
db:
|
|
586
|
+
db: dbName,
|
|
638
587
|
collection,
|
|
639
588
|
data: batchData
|
|
640
589
|
}
|
|
@@ -642,10 +591,10 @@ export class Mongo extends Db {
|
|
|
642
591
|
}
|
|
643
592
|
if (this.emittingPublishRevEvents) {
|
|
644
593
|
await this.emit("publishRev", {
|
|
645
|
-
channel: `dbrev/${
|
|
594
|
+
channel: `dbrev/${dbName}/${collection}`,
|
|
646
595
|
payload: {
|
|
647
596
|
operation: "batch",
|
|
648
|
-
db:
|
|
597
|
+
db: dbName,
|
|
649
598
|
collection,
|
|
650
599
|
data: batchData.map(item => ({
|
|
651
600
|
operation: item.operation,
|
|
@@ -665,13 +614,14 @@ export class Mongo extends Db {
|
|
|
665
614
|
assert(insert, "insert can't be null");
|
|
666
615
|
assert(insert instanceof Array, "insert must be an Array");
|
|
667
616
|
log.debug('insertMany called', collection, insert);
|
|
617
|
+
const dbName = this.db; // Capture db at operation start
|
|
668
618
|
insert = this.replaceIds(insert);
|
|
669
619
|
for (let i = 0; i < insert.length; i++)
|
|
670
620
|
await this._processHashedKeys(insert[i]);
|
|
671
621
|
if (this.revisions)
|
|
672
622
|
insert.forEach(ins => { ins._rev = 1; ins._ts = Base.timestamp(); });
|
|
673
623
|
let ret = await this.executeTransactionally(collection, async (conn, client) => {
|
|
674
|
-
await this._processSequenceFieldForMany(client, collection, insert);
|
|
624
|
+
await this._processSequenceFieldForMany(client, dbName, collection, insert);
|
|
675
625
|
let obj = await conn.insertMany(insert, this._sessionOpt());
|
|
676
626
|
let ret = [];
|
|
677
627
|
for (let ns of Object.keys(obj.insertedIds)) {
|
|
@@ -685,7 +635,7 @@ export class Mongo extends Db {
|
|
|
685
635
|
if (this.emittingPublishEvents || this.emittingPublishRevEvents || this.auditing) {
|
|
686
636
|
for await (let rec of ret) {
|
|
687
637
|
if (rec)
|
|
688
|
-
await this._publishAndAudit('insert',
|
|
638
|
+
await this._publishAndAudit('insert', dbName, collection, rec);
|
|
689
639
|
}
|
|
690
640
|
}
|
|
691
641
|
return ret;
|
|
@@ -696,6 +646,7 @@ export class Mongo extends Db {
|
|
|
696
646
|
async deleteOne(collection, query) {
|
|
697
647
|
assert(collection);
|
|
698
648
|
assert(query);
|
|
649
|
+
const dbName = this.db; // Capture db at operation start
|
|
699
650
|
query = this.replaceIds(query);
|
|
700
651
|
if (!this.softdelete) {
|
|
701
652
|
const opts = this._sessionOpt();
|
|
@@ -703,7 +654,7 @@ export class Mongo extends Db {
|
|
|
703
654
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
704
655
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
705
656
|
if (obj)
|
|
706
|
-
await this._publishAndAudit('delete',
|
|
657
|
+
await this._publishAndAudit('delete', dbName, collection, obj);
|
|
707
658
|
return obj;
|
|
708
659
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
709
660
|
log.debug('deleteOne returns', ret);
|
|
@@ -725,7 +676,7 @@ export class Mongo extends Db {
|
|
|
725
676
|
// if (this.syncSupport) del.$set._csq = await this._getNextCollectionUpdateSeqNo(collection, client)
|
|
726
677
|
let obj = await conn.findOneAndUpdate(query, del, opts);
|
|
727
678
|
if (obj)
|
|
728
|
-
await this._publishAndAudit('delete',
|
|
679
|
+
await this._publishAndAudit('delete', dbName, collection, obj);
|
|
729
680
|
return obj;
|
|
730
681
|
}, false, { operation: "deleteOne", collection, query, softdelete: this.softdelete });
|
|
731
682
|
log.debug('deleteOne returns', ret);
|
|
@@ -738,6 +689,7 @@ export class Mongo extends Db {
|
|
|
738
689
|
returnDocument: "after",
|
|
739
690
|
...this._sessionOpt()
|
|
740
691
|
};
|
|
692
|
+
const dbName = this.db; // Capture db at operation start
|
|
741
693
|
query = this.replaceIds(query);
|
|
742
694
|
log.debug('blockOne called', collection, query);
|
|
743
695
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
@@ -753,7 +705,7 @@ export class Mongo extends Db {
|
|
|
753
705
|
return {
|
|
754
706
|
ok: false
|
|
755
707
|
};
|
|
756
|
-
await this._publishAndAudit('block',
|
|
708
|
+
await this._publishAndAudit('block', dbName, collection, obj);
|
|
757
709
|
return obj;
|
|
758
710
|
}, false, { operation: "blockOne", collection, query });
|
|
759
711
|
log.debug('blockOne returns', ret);
|
|
@@ -765,6 +717,7 @@ export class Mongo extends Db {
|
|
|
765
717
|
returnDocument: "after",
|
|
766
718
|
...this._sessionOpt()
|
|
767
719
|
};
|
|
720
|
+
const dbName = this.db; // Capture db at operation start
|
|
768
721
|
query = this.replaceIds(query);
|
|
769
722
|
log.debug('unblockOne called', collection, query);
|
|
770
723
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
@@ -780,7 +733,7 @@ export class Mongo extends Db {
|
|
|
780
733
|
return {
|
|
781
734
|
ok: false
|
|
782
735
|
};
|
|
783
|
-
await this._publishAndAudit('unblock',
|
|
736
|
+
await this._publishAndAudit('unblock', dbName, collection, obj);
|
|
784
737
|
return obj;
|
|
785
738
|
}, false, { operation: "unblockOne", collection, query });
|
|
786
739
|
log.debug('unblockOne returns', ret);
|
|
@@ -789,6 +742,7 @@ export class Mongo extends Db {
|
|
|
789
742
|
async hardDeleteOne(collection, query) {
|
|
790
743
|
assert(collection);
|
|
791
744
|
assert(query);
|
|
745
|
+
const dbName = this.db; // Capture db at operation start
|
|
792
746
|
if (typeof query === "string" || typeof query === "number" || query instanceof ObjectId) {
|
|
793
747
|
query = { _id: query };
|
|
794
748
|
}
|
|
@@ -801,7 +755,7 @@ export class Mongo extends Db {
|
|
|
801
755
|
let ret = await this.executeTransactionally(collection, async (conn) => {
|
|
802
756
|
let obj = await conn.findOneAndDelete(query, opts);
|
|
803
757
|
if (obj) {
|
|
804
|
-
await this._publishAndAudit('delete',
|
|
758
|
+
await this._publishAndAudit('delete', dbName, collection, obj);
|
|
805
759
|
}
|
|
806
760
|
return obj;
|
|
807
761
|
}, false, { operation: "hardDeleteOne", collection, query });
|
|
@@ -812,6 +766,7 @@ export class Mongo extends Db {
|
|
|
812
766
|
assert(collection);
|
|
813
767
|
assert(query);
|
|
814
768
|
// if (this.syncSupport) log.warn("delete does not increase _csq, avoit it.")
|
|
769
|
+
const dbName = this.db; // Capture db at operation start
|
|
815
770
|
query = this.replaceIds(query);
|
|
816
771
|
if (!this.softdelete) {
|
|
817
772
|
const opts = this._sessionOpt();
|
|
@@ -822,7 +777,7 @@ export class Mongo extends Db {
|
|
|
822
777
|
n: obj.deletedCount,
|
|
823
778
|
ok: !!obj.acknowledged
|
|
824
779
|
};
|
|
825
|
-
await this._publishAndAudit('deleteMany',
|
|
780
|
+
await this._publishAndAudit('deleteMany', dbName, collection, resObj);
|
|
826
781
|
return resObj;
|
|
827
782
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
828
783
|
log.debug('delete returns', ret);
|
|
@@ -847,7 +802,7 @@ export class Mongo extends Db {
|
|
|
847
802
|
n: obj.modifiedCount,
|
|
848
803
|
ok: !!obj.acknowledged
|
|
849
804
|
};
|
|
850
|
-
await this._publishAndAudit('deleteMany',
|
|
805
|
+
await this._publishAndAudit('deleteMany', dbName, collection, resObj);
|
|
851
806
|
return resObj;
|
|
852
807
|
}, false, { operation: "delete", collection, query, softdelete: this.softdelete });
|
|
853
808
|
log.debug('delete returns', ret);
|
|
@@ -858,6 +813,7 @@ export class Mongo extends Db {
|
|
|
858
813
|
assert(collection);
|
|
859
814
|
assert(query);
|
|
860
815
|
// if (this.syncSupport) log.warn("hardDelete does not increase _csq, avoit it.")
|
|
816
|
+
const dbName = this.db; // Capture db at operation start
|
|
861
817
|
query = this.replaceIds(query);
|
|
862
818
|
const opts = this._sessionOpt();
|
|
863
819
|
log.debug('hardDelete called', collection, query);
|
|
@@ -867,7 +823,7 @@ export class Mongo extends Db {
|
|
|
867
823
|
n: obj.deletedCount,
|
|
868
824
|
ok: !!obj.acknowledged
|
|
869
825
|
};
|
|
870
|
-
await this._publishAndAudit('deleteMany',
|
|
826
|
+
await this._publishAndAudit('deleteMany', dbName, collection, resObj);
|
|
871
827
|
return resObj;
|
|
872
828
|
}, false, { operation: "hardDelete", collection, query, softdelete: this.softdelete });
|
|
873
829
|
log.debug('hardDelete returns', ret);
|
|
@@ -886,8 +842,9 @@ export class Mongo extends Db {
|
|
|
886
842
|
_field = field;
|
|
887
843
|
if (!/^__hashed__/.test(_field))
|
|
888
844
|
_field = "__hashed__" + _field;
|
|
845
|
+
const dbName = this.db; // Capture db at operation start
|
|
889
846
|
let conn = await this.connect();
|
|
890
|
-
let obj = await conn.db(
|
|
847
|
+
let obj = await conn.db(dbName).collection(collection).findOne(query, { projection: { [_field]: 1 }, ...this._sessionOpt() });
|
|
891
848
|
if (!obj || !obj[_field]) {
|
|
892
849
|
log.debug('testHash returns false', obj);
|
|
893
850
|
return false;
|
|
@@ -963,18 +920,20 @@ export class Mongo extends Db {
|
|
|
963
920
|
async dropCollection(collection) {
|
|
964
921
|
assert(collection);
|
|
965
922
|
log.debug('dropCollection called', this.auditCollections);
|
|
923
|
+
const dbName = this.db; // Capture db at operation start
|
|
966
924
|
let client = await this.connect();
|
|
967
|
-
let existing = await client.db(
|
|
925
|
+
let existing = await client.db(dbName).collections();
|
|
968
926
|
if (existing.map((c) => c.collectionName).includes(collection)) {
|
|
969
|
-
await client.db(
|
|
927
|
+
await client.db(dbName).dropCollection(collection);
|
|
970
928
|
}
|
|
971
929
|
log.debug('dropCollection returns');
|
|
972
930
|
}
|
|
973
931
|
async resetCollectionSync(collection) {
|
|
974
932
|
assert(collection);
|
|
975
933
|
log.debug('resetCollectionSync called for', collection);
|
|
934
|
+
const dbName = this.db; // Capture db at operation start
|
|
976
935
|
let client = await this.connect();
|
|
977
|
-
await client.db(
|
|
936
|
+
await client.db(dbName)
|
|
978
937
|
.collection(SEQUENCES_COLLECTION)
|
|
979
938
|
.findOneAndDelete({ collection });
|
|
980
939
|
log.debug(`resetCollectionSync for ${collection} returns`);
|
|
@@ -983,11 +942,12 @@ export class Mongo extends Db {
|
|
|
983
942
|
assert(collections);
|
|
984
943
|
assert(collections instanceof Array);
|
|
985
944
|
log.debug('dropCollections called', this.auditCollections);
|
|
945
|
+
const dbName = this.db; // Capture db at operation start
|
|
986
946
|
let client = await this.connect();
|
|
987
|
-
let existing = await client.db(
|
|
947
|
+
let existing = await client.db(dbName).collections();
|
|
988
948
|
for await (let collection of collections) {
|
|
989
949
|
if (existing.map((c) => c.collectionName).includes(collection)) {
|
|
990
|
-
await client.db(
|
|
950
|
+
await client.db(dbName).dropCollection(collection);
|
|
991
951
|
}
|
|
992
952
|
}
|
|
993
953
|
log.debug('dropCollections returns');
|
|
@@ -996,11 +956,12 @@ export class Mongo extends Db {
|
|
|
996
956
|
assert(collections);
|
|
997
957
|
assert(collections instanceof Array);
|
|
998
958
|
log.debug('createCollections called', this.auditCollections);
|
|
959
|
+
const dbName = this.db; // Capture db at operation start
|
|
999
960
|
let client = await this.connect();
|
|
1000
961
|
let existing = await this.getCollections();
|
|
1001
962
|
for await (let collection of collections) {
|
|
1002
963
|
if (!existing.includes(collection)) {
|
|
1003
|
-
await client.db(
|
|
964
|
+
await client.db(dbName).createCollection(collection);
|
|
1004
965
|
}
|
|
1005
966
|
}
|
|
1006
967
|
log.debug('createCollections returns');
|
|
@@ -1008,23 +969,25 @@ export class Mongo extends Db {
|
|
|
1008
969
|
async createCollection(collection) {
|
|
1009
970
|
assert(collection);
|
|
1010
971
|
log.debug('createCollection called', collection);
|
|
972
|
+
const dbName = this.db; // Capture db at operation start
|
|
1011
973
|
let client = await this.connect();
|
|
1012
974
|
let existing = await this.getCollections();
|
|
1013
975
|
if (!existing.includes(collection)) {
|
|
1014
|
-
await client.db(
|
|
976
|
+
await client.db(dbName).createCollection(collection);
|
|
1015
977
|
}
|
|
1016
978
|
log.debug('createCollection returns');
|
|
1017
979
|
}
|
|
1018
980
|
async dbLogPurge(collection, _id) {
|
|
1019
981
|
assert(collection);
|
|
1020
982
|
log.debug('dblogPurge called', collection, _id);
|
|
983
|
+
const dbName = this.db; // Capture db at operation start
|
|
1021
984
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
1022
|
-
let cond = { db:
|
|
985
|
+
let cond = { db: dbName, collection, };
|
|
1023
986
|
if (_id !== undefined)
|
|
1024
987
|
cond._id = Mongo._toId(_id);
|
|
1025
988
|
let client = await this.connect();
|
|
1026
989
|
let ret = await client
|
|
1027
|
-
.db(
|
|
990
|
+
.db(dbName)
|
|
1028
991
|
.collection(this.auditCollectionName)
|
|
1029
992
|
.deleteMany(cond, this._sessionOpt());
|
|
1030
993
|
return {
|
|
@@ -1038,13 +1001,14 @@ export class Mongo extends Db {
|
|
|
1038
1001
|
async dbLogGet(collection, _id) {
|
|
1039
1002
|
assert(collection);
|
|
1040
1003
|
log.debug('dblogGet called', collection, _id);
|
|
1004
|
+
const dbName = this.db; // Capture db at operation start
|
|
1041
1005
|
let ret = await this.executeTransactionally(collection, async () => {
|
|
1042
|
-
let cond = { db:
|
|
1006
|
+
let cond = { db: dbName, collection };
|
|
1043
1007
|
if (_id)
|
|
1044
1008
|
cond.entityid = Mongo._toId(_id);
|
|
1045
1009
|
let client = await this.connect();
|
|
1046
1010
|
let ret = await client
|
|
1047
|
-
.db(
|
|
1011
|
+
.db(dbName)
|
|
1048
1012
|
.collection(this.auditCollectionName)
|
|
1049
1013
|
.find(cond, this._sessionOpt())
|
|
1050
1014
|
.sort({ _id: -1 })
|
|
@@ -1110,7 +1074,7 @@ export class Mongo extends Db {
|
|
|
1110
1074
|
// this.session = undefined;
|
|
1111
1075
|
return this.client;
|
|
1112
1076
|
}
|
|
1113
|
-
async close() {
|
|
1077
|
+
async close(force = true) {
|
|
1114
1078
|
// 1. Handle active transactions
|
|
1115
1079
|
if (this.session) {
|
|
1116
1080
|
try {
|
|
@@ -1136,10 +1100,12 @@ export class Mongo extends Db {
|
|
|
1136
1100
|
}
|
|
1137
1101
|
// 3. Close parent connection
|
|
1138
1102
|
try {
|
|
1139
|
-
await super.close();
|
|
1103
|
+
await super.close(force);
|
|
1140
1104
|
log.info("connection closed");
|
|
1141
1105
|
}
|
|
1142
|
-
catch {
|
|
1106
|
+
catch (err) {
|
|
1107
|
+
log.debug(`close parent connection failed: ${err.message || err}`);
|
|
1108
|
+
}
|
|
1143
1109
|
// 4. Clean up state
|
|
1144
1110
|
this.session = undefined;
|
|
1145
1111
|
}
|
|
@@ -1236,7 +1202,7 @@ export class Mongo extends Db {
|
|
|
1236
1202
|
log.error(`abortTransaction error ${err.message || err}`);
|
|
1237
1203
|
}
|
|
1238
1204
|
}
|
|
1239
|
-
async _try_once(useTransaction, f, collection) {
|
|
1205
|
+
async _try_once(useTransaction, f, collection, dbName) {
|
|
1240
1206
|
try {
|
|
1241
1207
|
let conn = await this.connect();
|
|
1242
1208
|
if (useTransaction)
|
|
@@ -1246,8 +1212,8 @@ export class Mongo extends Db {
|
|
|
1246
1212
|
* prewarm Mongo to avoid mongo bug
|
|
1247
1213
|
* "Cannot run getMore on cursor ... which was created in session ..., without an lsid
|
|
1248
1214
|
*/
|
|
1249
|
-
await (conn.db(
|
|
1250
|
-
let ret = await f(conn.db(
|
|
1215
|
+
// await (conn.db(dbName).collection(collection).findOne({}))
|
|
1216
|
+
let ret = await f(conn.db(dbName).collection(collection), conn);
|
|
1251
1217
|
if (useTransaction)
|
|
1252
1218
|
await this.commitTransaction();
|
|
1253
1219
|
return ret;
|
|
@@ -1257,20 +1223,23 @@ export class Mongo extends Db {
|
|
|
1257
1223
|
if (useTransaction)
|
|
1258
1224
|
await this.abortTransaction();
|
|
1259
1225
|
}
|
|
1260
|
-
catch {
|
|
1226
|
+
catch (abortErr) {
|
|
1227
|
+
log.debug(`abort transaction failed: ${abortErr.message || abortErr}`);
|
|
1228
|
+
}
|
|
1261
1229
|
throw err;
|
|
1262
1230
|
}
|
|
1263
1231
|
}
|
|
1264
1232
|
async executeTransactionally(collection, f, useTransaction = false, debugObject) {
|
|
1233
|
+
const dbName = this.db; // Capture db at operation start
|
|
1265
1234
|
try {
|
|
1266
1235
|
// NOTE - add this.syncSuppoer to use transactions for _csq increases
|
|
1267
1236
|
// Adds A LOT of performance penalty - soft.js takes 20x longer!!! (5.2s vs 250ms)
|
|
1268
1237
|
//let useTransaction = /* this.syncSupport || */ this._shouldAuditCollection(this.db, collection);
|
|
1269
1238
|
//if (useTransaction && !await this.inTransaction()) useTransaction=true;
|
|
1270
|
-
return await this._try_once(useTransaction, f, collection);
|
|
1239
|
+
return await this._try_once(useTransaction, f, collection, dbName);
|
|
1271
1240
|
}
|
|
1272
1241
|
catch (err) {
|
|
1273
|
-
log.error(`Mongo command has failed for ${
|
|
1242
|
+
log.error(`Mongo command has failed for ${dbName}.${collection} - ${(this.session ? "ROLLBACK - " : "")} ${err.message || err}`);
|
|
1274
1243
|
log.error(debugObject);
|
|
1275
1244
|
log.debug(err);
|
|
1276
1245
|
let x = ((err === null || err === void 0 ? void 0 : err.message) || (err === null || err === void 0 ? void 0 : err.toString()) || "").toString();
|
|
@@ -1291,7 +1260,7 @@ export class Mongo extends Db {
|
|
|
1291
1260
|
await this.close();
|
|
1292
1261
|
// a single retry
|
|
1293
1262
|
await super.connect();
|
|
1294
|
-
let ret = await this._try_once(useTransaction, f, collection);
|
|
1263
|
+
let ret = await this._try_once(useTransaction, f, collection, dbName);
|
|
1295
1264
|
log.error("OK - Retry succeeded.");
|
|
1296
1265
|
log.error("");
|
|
1297
1266
|
return ret;
|
|
@@ -1340,13 +1309,13 @@ export class Mongo extends Db {
|
|
|
1340
1309
|
let seqKeys = Object.keys(object).filter(key => object[key] === 'SEQ_NEXT' || object[key] === 'SEQ_LAST');
|
|
1341
1310
|
return ((seqKeys === null || seqKeys === void 0 ? void 0 : seqKeys.length) > 0 || this.syncSupport) ? { seqKeys } : undefined;
|
|
1342
1311
|
}
|
|
1343
|
-
async _processSequenceField(client, collection, insert, seqKeys) {
|
|
1312
|
+
async _processSequenceField(client, dbName, collection, insert, seqKeys) {
|
|
1344
1313
|
assert(this.client);
|
|
1345
1314
|
// if (this.syncSupport) {
|
|
1346
1315
|
// insert._csq = (await this._getNextCollectionUpdateSeqNo(collection, client));
|
|
1347
1316
|
// }
|
|
1348
1317
|
for await (let seqKey of (seqKeys === null || seqKeys === void 0 ? void 0 : seqKeys.seqKeys) || []) {
|
|
1349
|
-
let last = await this._findLastSequenceForKey(client.db(
|
|
1318
|
+
let last = await this._findLastSequenceForKey(client.db(dbName).collection(collection), seqKey);
|
|
1350
1319
|
if (last === undefined) {
|
|
1351
1320
|
await this.createCollection(collection);
|
|
1352
1321
|
last = 0;
|
|
@@ -1356,7 +1325,7 @@ export class Mongo extends Db {
|
|
|
1356
1325
|
}
|
|
1357
1326
|
return insert;
|
|
1358
1327
|
}
|
|
1359
|
-
async _processSequenceFieldForMany(connection, collection, inserts) {
|
|
1328
|
+
async _processSequenceFieldForMany(connection, dbName, collection, inserts) {
|
|
1360
1329
|
assert(this.client);
|
|
1361
1330
|
assert(connection);
|
|
1362
1331
|
if (!(inserts === null || inserts === void 0 ? void 0 : inserts.length))
|
|
@@ -1372,12 +1341,12 @@ export class Mongo extends Db {
|
|
|
1372
1341
|
if (!seqKeys.length)
|
|
1373
1342
|
return inserts;
|
|
1374
1343
|
for await (let seqKey of seqKeys) {
|
|
1375
|
-
let last = await this._findLastSequenceForKey(connection.db(
|
|
1344
|
+
let last = await this._findLastSequenceForKey(connection.db(dbName).collection(collection), seqKey);
|
|
1376
1345
|
if (last === undefined) {
|
|
1377
1346
|
try {
|
|
1378
1347
|
await this.createCollection(collection);
|
|
1379
1348
|
}
|
|
1380
|
-
catch { /*
|
|
1349
|
+
catch { /* collection may already exist */ }
|
|
1381
1350
|
last = 0;
|
|
1382
1351
|
}
|
|
1383
1352
|
for (let insert of inserts) {
|
|
@@ -1417,12 +1386,8 @@ export class Mongo extends Db {
|
|
|
1417
1386
|
_shouldAuditCollection(db, col, audited = this.auditedCollections) {
|
|
1418
1387
|
if (!this.auditing)
|
|
1419
1388
|
return false;
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
if (new RegExp(m, "i").test(r))
|
|
1423
|
-
return true;
|
|
1424
|
-
}
|
|
1425
|
-
return false;
|
|
1389
|
+
const fullName = ((db ? db + "." : "") + (col || "")).toLowerCase();
|
|
1390
|
+
return audited.some(m => fullName.includes(m.toLowerCase()));
|
|
1426
1391
|
}
|
|
1427
1392
|
async _publishAndAudit(operation, db, collection, dataToPublish, noEmit) {
|
|
1428
1393
|
if (!dataToPublish._id && !["deleteMany", "updateMany"].includes(operation))
|
|
@@ -1453,9 +1418,9 @@ export class Mongo extends Db {
|
|
|
1453
1418
|
toPublishAll.push(toPublish);
|
|
1454
1419
|
}
|
|
1455
1420
|
}
|
|
1456
|
-
if (this._shouldAuditCollection(
|
|
1421
|
+
if (this._shouldAuditCollection(db, collection)) {
|
|
1457
1422
|
if (['insert', 'update', 'delete', 'block', 'unblock'].includes(operation))
|
|
1458
|
-
await this._writeAuditRecord(collection, operation, data);
|
|
1423
|
+
await this._writeAuditRecord(db, collection, operation, data);
|
|
1459
1424
|
}
|
|
1460
1425
|
return toPublishAll;
|
|
1461
1426
|
}
|
|
@@ -1482,6 +1447,7 @@ export class Mongo extends Db {
|
|
|
1482
1447
|
operation,
|
|
1483
1448
|
_id: data._id,
|
|
1484
1449
|
_ts: data._ts,
|
|
1450
|
+
_rev: data._rev,
|
|
1485
1451
|
enquireLastTs: !data._ts,
|
|
1486
1452
|
}
|
|
1487
1453
|
};
|
|
@@ -1496,11 +1462,11 @@ export class Mongo extends Db {
|
|
|
1496
1462
|
if (event === "publishRev")
|
|
1497
1463
|
this.emitter.emit(event, what);
|
|
1498
1464
|
}
|
|
1499
|
-
async _writeAuditRecord(collection, operation, data, user = this.user, audit = this.audit) {
|
|
1465
|
+
async _writeAuditRecord(db, collection, operation, data, user = this.user, audit = this.audit) {
|
|
1500
1466
|
if (!this.auditing)
|
|
1501
1467
|
return;
|
|
1502
1468
|
let client = await this.connect();
|
|
1503
|
-
let previousAuditRecords = await (await client.db(
|
|
1469
|
+
let previousAuditRecords = await (await client.db(db).collection(this.auditCollectionName).aggregate([
|
|
1504
1470
|
{ $match: { entityid: Mongo._toId(data._id) } },
|
|
1505
1471
|
{ $sort: { rev: -1 } },
|
|
1506
1472
|
{ $limit: 1 }
|
|
@@ -1511,7 +1477,7 @@ export class Mongo extends Db {
|
|
|
1511
1477
|
let dataNoId = { ...data };
|
|
1512
1478
|
delete dataNoId._id;
|
|
1513
1479
|
let auditRecord = {
|
|
1514
|
-
db:
|
|
1480
|
+
db: db,
|
|
1515
1481
|
collection: collection,
|
|
1516
1482
|
entityid: Base.objectid(data._id),
|
|
1517
1483
|
rev: ((previousAuditRecord === null || previousAuditRecord === void 0 ? void 0 : previousAuditRecord.rev) || 0) + 1,
|
|
@@ -1525,7 +1491,7 @@ export class Mongo extends Db {
|
|
|
1525
1491
|
if (audit)
|
|
1526
1492
|
auditRecord.audit = audit;
|
|
1527
1493
|
log.trace('AUDITING', auditRecord);
|
|
1528
|
-
let ret = await client.db(
|
|
1494
|
+
let ret = await client.db(db)
|
|
1529
1495
|
.collection(this.auditCollectionName)
|
|
1530
1496
|
.insertOne(auditRecord, this._sessionOpt());
|
|
1531
1497
|
log.debug('AUDITED', auditRecord, ret.insertedId);
|
|
@@ -1533,6 +1499,28 @@ export class Mongo extends Db {
|
|
|
1533
1499
|
_sessionOpt() {
|
|
1534
1500
|
return this.session ? { session: this.session } : {};
|
|
1535
1501
|
}
|
|
1502
|
+
_buildFindOptions(opts) {
|
|
1503
|
+
const findOpts = {};
|
|
1504
|
+
if (opts.readPreference)
|
|
1505
|
+
findOpts.readPreference = opts.readPreference;
|
|
1506
|
+
if (this.session)
|
|
1507
|
+
findOpts.session = this.session;
|
|
1508
|
+
return findOpts;
|
|
1509
|
+
}
|
|
1510
|
+
_applyQueryOpts(cursor, opts) {
|
|
1511
|
+
let r = cursor;
|
|
1512
|
+
if (opts.project)
|
|
1513
|
+
r = r.project(opts.project);
|
|
1514
|
+
if (opts.sort)
|
|
1515
|
+
r = r.sort(opts.sort);
|
|
1516
|
+
if (opts.skip)
|
|
1517
|
+
r = r.skip(opts.skip);
|
|
1518
|
+
if (opts.limit)
|
|
1519
|
+
r = r.limit(opts.limit);
|
|
1520
|
+
if (opts.collation)
|
|
1521
|
+
r = r.collation(opts.collation);
|
|
1522
|
+
return r;
|
|
1523
|
+
}
|
|
1536
1524
|
async _processUpdateObject(update) {
|
|
1537
1525
|
await this._processHashedKeys(update);
|
|
1538
1526
|
for (let k in update) {
|