@omegup/msync 0.1.21 → 0.1.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/index.d.ts +4 -8
  2. package/index.esm.js +143 -67
  3. package/index.js +142 -67
  4. package/lib/accumulators/index.d.ts +10 -0
  5. package/lib/aggregate/$merge.d.ts +28 -0
  6. package/lib/aggregate/$upsert.d.ts +15 -0
  7. package/lib/aggregate/group/$group-merge.d.ts +12 -0
  8. package/lib/aggregate/group/index.d.ts +2 -0
  9. package/lib/aggregate/group/utils/sub-group.d.ts +6 -0
  10. package/lib/aggregate/group/utils/sub-merge.d.ts +20 -0
  11. package/lib/aggregate/index.d.ts +9 -0
  12. package/lib/aggregate/lookup/$lookup-delta.d.ts +10 -0
  13. package/lib/aggregate/lookup/$lookup-raw.d.ts +9 -0
  14. package/lib/aggregate/lookup/index.d.ts +12 -0
  15. package/lib/aggregate/match/$match-delta.d.ts +3 -0
  16. package/lib/aggregate/match/$match.d.ts +3 -0
  17. package/lib/aggregate/match/index.d.ts +2 -0
  18. package/lib/aggregate/mongo-stages.d.ts +13 -0
  19. package/lib/aggregate/out.d.ts +37 -0
  20. package/lib/aggregate/prefix.d.ts +33 -0
  21. package/lib/aggregate/raws.d.ts +17 -0
  22. package/lib/aggregate/set/$replace-with-each.d.ts +13 -0
  23. package/lib/aggregate/set/$set-delta.d.ts +5 -0
  24. package/lib/aggregate/set/$set.d.ts +6 -0
  25. package/lib/aggregate/set/index.d.ts +1 -0
  26. package/lib/aggregate/unwind/$unwind-delta.d.ts +5 -0
  27. package/lib/aggregate/unwind/index.d.ts +6 -0
  28. package/lib/boot/boot-simpl.d.ts +14 -0
  29. package/lib/boot/boot-utils.d.ts +27 -0
  30. package/lib/boot/boot.d.ts +16 -0
  31. package/lib/boot/first-stages.d.ts +12 -0
  32. package/lib/boot/index.d.ts +3 -0
  33. package/lib/boot/single.d.ts +11 -0
  34. package/lib/expression/arith.d.ts +32 -0
  35. package/lib/expression/array.d.ts +30 -0
  36. package/lib/expression/concat.d.ts +34 -0
  37. package/lib/expression/date.d.ts +17 -0
  38. package/lib/expression/expr-base.d.ts +5 -0
  39. package/lib/expression/index.d.ts +7 -0
  40. package/lib/expression/logic.d.ts +23 -0
  41. package/lib/expression/range.d.ts +11 -0
  42. package/lib/expression/val.d.ts +25 -0
  43. package/lib/field/field.d.ts +25 -0
  44. package/lib/field/index.d.ts +1 -0
  45. package/lib/machine.d.ts +9 -0
  46. package/lib/predicate/$eq.d.ts +24 -0
  47. package/lib/predicate/$expr.d.ts +3 -0
  48. package/lib/predicate/$in.d.ts +2 -0
  49. package/lib/predicate/index.d.ts +3 -0
  50. package/lib/predicate/utils.d.ts +5 -0
  51. package/lib/query/index.d.ts +1 -0
  52. package/lib/query/logic.d.ts +11 -0
  53. package/lib/stream/aggregate.d.ts +7 -0
  54. package/lib/types/accumulator.d.ts +30 -0
  55. package/lib/types/aggregate.d.ts +9 -0
  56. package/lib/types/expr.d.ts +27 -0
  57. package/lib/types/extern.d.ts +3 -0
  58. package/lib/types/index.d.ts +8 -0
  59. package/lib/types/lookup.d.ts +19 -0
  60. package/lib/types/machine.d.ts +17 -0
  61. package/lib/types/predicate.d.ts +11 -0
  62. package/lib/types/query.d.ts +12 -0
  63. package/lib/types/stream.d.ts +71 -0
  64. package/lib/update/index.d.ts +1 -0
  65. package/lib/update/updater.d.ts +19 -0
  66. package/lib/utils/before.d.ts +3 -0
  67. package/lib/utils/coll.d.ts +5 -0
  68. package/lib/utils/db-indexes.d.ts +4 -0
  69. package/lib/utils/guard.d.ts +19 -0
  70. package/lib/utils/index.d.ts +4 -0
  71. package/lib/utils/json.d.ts +9 -0
  72. package/lib/utils/log.d.ts +1 -0
  73. package/lib/utils/map-object.d.ts +41 -0
  74. package/lib/utils/merge/combiners.d.ts +6 -0
  75. package/lib/utils/merge/index.d.ts +2 -0
  76. package/lib/utils/merge/merge.d.ts +10 -0
  77. package/lib/utils/merge/next-winner.d.ts +3 -0
  78. package/lib/utils/merge/types.d.ts +19 -0
  79. package/lib/utils/merge/utils.d.ts +4 -0
  80. package/lib/utils/tear-down.d.ts +2 -0
  81. package/lib/watch.d.ts +34 -0
  82. package/package.json +1 -1
  83. package/test/mongodb.d.ts +9 -0
  84. package/test/uri.d.ts +1 -0
  85. package/types/class.d.ts +1 -0
  86. package/types/extern.d.ts +3 -0
  87. package/types/global.d.ts +50 -0
  88. package/types/hkt.d.ts +56 -0
  89. package/types/index.d.ts +6 -0
  90. package/types/json.d.ts +54 -0
  91. package/types/mongo.d.ts +24 -0
  92. package/types/view.d.ts +35 -0
package/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { Timestamp, Filter, UpdateFilter, BSON, Db, Collection, IndexSpecification, CreateIndexesOptions, MongoClient as MongoClient$1, OptionalUnlessRequiredId } from 'mongodb';
1
+ import { Timestamp, Filter, UpdateFilter, BSON, MaxKey, Db, Collection, IndexSpecification, CreateIndexesOptions, MongoClient as MongoClient$1, OptionalUnlessRequiredId } from 'mongodb';
2
2
  export { Collection, Timestamp } from 'mongodb';
3
3
 
4
4
  type HasJob = {
@@ -220,7 +220,7 @@ declare const Type: unique symbol;
220
220
 
221
221
  type U = undefined;
222
222
  type N = null | U;
223
- type jsonPrim = number | null | string | boolean | Timestamp | Date;
223
+ type jsonPrim = number | null | string | boolean | Timestamp | MaxKey | Date;
224
224
  type notObj = jsonPrim | U;
225
225
  type notArr = notObj | O;
226
226
  type jsonItem = unknown;
@@ -697,7 +697,7 @@ declare const wrap: <Result>(root: Machine<Result>) => Machine<Result>;
697
697
  declare const $eq: <T extends unknown>(operand: rawItem & T) => Predicate<T>;
698
698
  declare const $ne: <T extends unknown>(operand: rawItem & T) => Predicate<T>;
699
699
  type Numeric = number | Timestamp | Date;
700
- declare const comp: <D2 extends Numeric = Numeric>(op: "$lt" | "$gt" | "$lte" | "$gte") => <T extends Numeric>(operand: rawItem & D2) => Predicate<D2>;
700
+ declare const comp: <D2 extends Numeric = Numeric>(op: "$lt" | "$lte" | "$gt" | "$gte") => <T extends Numeric>(operand: rawItem & D2) => Predicate<D2>;
701
701
  declare const $gt: <T extends Numeric>(operand: rawItem & Numeric) => Predicate<Numeric>;
702
702
  declare const $gtTs: <T extends Numeric>(operand: rawItem & Timestamp) => Predicate<Timestamp>;
703
703
  declare const $gteTs: <T extends Numeric>(operand: rawItem & Timestamp) => Predicate<Timestamp>;
@@ -730,12 +730,8 @@ declare const $and: Combiner;
730
730
  declare const $nor: Combiner;
731
731
  declare const $or: Combiner;
732
732
 
733
- declare const setF: (f: ({ input }: {
734
- input: any;
735
- }) => Promise<void>) => void;
736
-
737
733
  declare const enablePreAndPostImages: <T extends doc>(coll: Collection<T>) => Promise<Document>;
738
734
  declare const prepare: (testName?: string) => Promise<MongoClient$1>;
739
735
  declare const makeCol: <T extends ID>(docs: readonly OptionalUnlessRequiredId<T>[], database: Db, name?: string) => Promise<Collection<T>>;
740
736
 
741
- export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $group_, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertPart, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge2, $mergeId, $mergePart, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $reduce, $replaceWith, $set, $simpleInsert, $simpleMerge, $simpleMergePart, $sum, $type, $unwind, $unwindDelta, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, type ExactKeys, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type MongoTypeNames, type N, type NoRaw, type NullToOBJ, type O, type OPick, type OPickD, type Patch, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, anyElementTrue, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, from, func, getWhenMatched, getWhenMatchedForMerge, gt, gte, inArray, isArray, ite, type jsonPrim, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, type notArr, notNull, now, or, pair, prepare, rand, range, regex, root, set, setF, setField, single, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
737
+ export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $group_, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertPart, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge2, $mergeId, $mergePart, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $reduce, $replaceWith, $set, $simpleInsert, $simpleMerge, $simpleMergePart, $sum, $type, $unwind, $unwindDelta, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, type ExactKeys, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type MongoTypeNames, type N, type NoRaw, type NullToOBJ, type O, type OPick, type OPickD, type Patch, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, anyElementTrue, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, from, func, getWhenMatched, getWhenMatchedForMerge, gt, gte, inArray, isArray, ite, type jsonPrim, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, type notArr, notNull, now, or, pair, prepare, rand, range, regex, root, set, setField, single, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
package/index.esm.js CHANGED
@@ -1,7 +1,7 @@
1
+ import { MaxKey, MongoClient, UUID } from 'mongodb';
1
2
  import crypto$1 from 'crypto';
2
3
  import { canonicalize } from 'json-canonicalize';
3
4
  import { SynchronousPromise } from 'synchronous-promise';
4
- import { MongoClient, UUID } from 'mongodb';
5
5
  import { writeFile } from 'fs/promises';
6
6
 
7
7
  const asExprRaw = (raw) => ({ get: () => raw });
@@ -82,7 +82,7 @@ const val = (val) => asExpr({
82
82
  : val),
83
83
  });
84
84
  const current = asExpr({
85
- raw: () => asExprRaw('$$CLUSTER_TIME'),
85
+ raw: () => asExprRaw(new MaxKey()),
86
86
  });
87
87
  const $let = (vars, inExpr) => asExpr({
88
88
  raw: f => asExprRaw({
@@ -1304,16 +1304,18 @@ const nextWinner = (previousWinner, previousWinnerNextFrame, sources, interrupt)
1304
1304
  };
1305
1305
 
1306
1306
  const mergeIterators = (params) => {
1307
- const { sources, interrupt, select = race } = params;
1307
+ const { sources, interrupt, select = race, hooks } = params;
1308
1308
  const reiterate = (winner) => {
1309
1309
  const { frame, key } = winner;
1310
1310
  return {
1311
1311
  cont: () => {
1312
1312
  const result = frame.cont();
1313
+ hooks?.start?.(frame, result);
1313
1314
  return mergeIterators({
1314
1315
  sources: patch(sources, key, result),
1315
1316
  interrupt,
1316
1317
  select: sources => nextWinner(winner, result.next, sources, interrupt),
1318
+ hooks,
1317
1319
  });
1318
1320
  },
1319
1321
  data: frame.data,
@@ -1321,7 +1323,7 @@ const mergeIterators = (params) => {
1321
1323
  };
1322
1324
  };
1323
1325
  return {
1324
- stop: () => mergeIterators({ sources: restart(sources), interrupt }),
1326
+ stop: () => mergeIterators({ sources: restart(sources), interrupt, select, hooks }),
1325
1327
  next: select(sources).then(reiterate),
1326
1328
  clear: async () => {
1327
1329
  for (const key in sources) {
@@ -1331,56 +1333,37 @@ const mergeIterators = (params) => {
1331
1333
  };
1332
1334
  };
1333
1335
 
1334
- const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
1335
- const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
1336
- const json = (a) => replace(JSON.stringify(a));
1337
- const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
1338
-
1339
1336
  const state = { steady: false, f: (_) => Promise.resolve() };
1340
1337
  let timeout = null;
1341
- const setF = (f) => {
1342
- state.f = f;
1343
- };
1344
- const aggregate = (db, streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
1345
- const req = {
1346
- aggregate: coll.collectionName,
1347
- pipeline: input,
1348
- cursor: {},
1349
- ...(snapshot && { readConcern: { level: 'snapshot' } }),
1350
- };
1351
- if (timeout !== null) {
1352
- clearTimeout(timeout);
1353
- timeout = null;
1354
- }
1355
- log('exec', streamName, req);
1356
- const start2 = Date.now();
1357
- return db.then(d => d.command(req)).then(result => {
1358
- log('prepare', streamName, Date.now() - start);
1359
- log('prepare2', streamName, start2 - start);
1360
- const r = result;
1361
- log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('"$$CLUSTER_TIME"', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
1362
- if (!state.steady) {
1363
- if (timeout !== null)
1364
- throw new Error('timeout should be null');
1365
- timeout = setTimeout(() => {
1366
- state.steady = true;
1367
- console.log('steady');
1368
- }, 10000);
1369
- }
1370
- return r;
1371
- }, err => {
1372
- log('err', req, err);
1373
- throw new Error(err);
1374
- });
1375
- });
1376
-
1377
1338
  const firstWorksMerge = (iters) => {
1378
1339
  const iterator = () => {
1379
1340
  const results = iters.map(iter => iter());
1380
1341
  const sources = { ...results };
1381
1342
  return mergeIterators({
1382
1343
  sources,
1383
- interrupt: key => state.steady
1344
+ interrupt: key => false,
1345
+ hooks: {
1346
+ start: (frame, result) => {
1347
+ if (!frame.info.job)
1348
+ return;
1349
+ if (timeout !== null) {
1350
+ clearTimeout(timeout);
1351
+ timeout = null;
1352
+ }
1353
+ result.next.then(() => {
1354
+ if (!frame.info.job)
1355
+ return;
1356
+ if (!state.steady) {
1357
+ if (timeout !== null)
1358
+ clearTimeout(timeout);
1359
+ timeout = setTimeout(() => {
1360
+ state.steady = true;
1361
+ console.log('steady');
1362
+ }, 2000);
1363
+ }
1364
+ });
1365
+ },
1366
+ },
1384
1367
  });
1385
1368
  };
1386
1369
  return iterator;
@@ -1510,10 +1493,7 @@ const $insertX = (out, expr, map, ext, extExpr) => {
1510
1493
  teardown: c => c({
1511
1494
  collection: out,
1512
1495
  method: 'updateMany',
1513
- params: [
1514
- filter,
1515
- [{ $set: { deletedAt: '$$NOW', touchedAt: '$$CLUSTER_TIME' } }],
1516
- ],
1496
+ params: [filter, [{ $set: { deletedAt: '$$NOW', touchedAt: '$$CLUSTER_TIME' } }]],
1517
1497
  }),
1518
1498
  raw: () => {
1519
1499
  const replacer = map(mergeObjects(expr, field(mergeExpr(extExpr, {
@@ -1544,6 +1524,32 @@ const $insertPart = (out, ext) => {
1544
1524
  const $insert = (out) => $insertPart(out, {});
1545
1525
  const assertNotNull = (expr) => expr;
1546
1526
 
1527
+ const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
1528
+ const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
1529
+ const json = (a) => replace(JSON.stringify(a));
1530
+ const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
1531
+
1532
+ const aggregate = (db, streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
1533
+ const req = {
1534
+ aggregate: coll.collectionName,
1535
+ pipeline: input,
1536
+ cursor: {},
1537
+ ...(snapshot && { readConcern: { level: 'snapshot' } }),
1538
+ };
1539
+ log('exec', streamName, req);
1540
+ const start2 = Date.now();
1541
+ return db.then(d => d.command(req)).then(result => {
1542
+ log('prepare', streamName, Date.now() - start);
1543
+ log('prepare2', streamName, start2 - start);
1544
+ const r = result;
1545
+ log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('"$$CLUSTER_TIME"', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
1546
+ return r;
1547
+ }, err => {
1548
+ log('err', req, err);
1549
+ throw new Error(err);
1550
+ });
1551
+ });
1552
+
1547
1553
  const addTeardown = (it, tr) => {
1548
1554
  if (!tr)
1549
1555
  return it;
@@ -1571,7 +1577,7 @@ async function getLastCommittedTs(adminDb) {
1571
1577
  const st = await adminDb.command({ replSetGetStatus: 1 });
1572
1578
  return st?.optimes?.lastCommittedOpTime?.ts ?? null;
1573
1579
  }
1574
- async function waitUntilStablePast(db, oplogTs, { pollMs = 0, timeoutMs = 10_000, } = {}) {
1580
+ async function waitUntilStablePast(db, oplogTs, { pollMs = 0, timeoutMs = 10_000 } = {}) {
1575
1581
  const adminDb = db.client.db('admin');
1576
1582
  const deadline = Date.now() + timeoutMs;
1577
1583
  while (true) {
@@ -1579,7 +1585,7 @@ async function waitUntilStablePast(db, oplogTs, { pollMs = 0, timeoutMs = 10_000
1579
1585
  if (stable && stable.comp(oplogTs) >= 0)
1580
1586
  return;
1581
1587
  if (Date.now() > deadline) {
1582
- throw new Error("Timed out waiting for stable timestamp to reach oplog event time");
1588
+ throw new Error('Timed out waiting for stable timestamp to reach oplog event time');
1583
1589
  }
1584
1590
  await sleep(pollMs);
1585
1591
  }
@@ -1601,29 +1607,37 @@ async function* tailOplog(db, opts) {
1601
1607
  try {
1602
1608
  for await (const doc of cursor) {
1603
1609
  lastTs = doc.ts;
1604
- if (doc.op === 'i') {
1605
- yield { ns: doc.ns, fields: new Set(Object.keys(doc.o)), doc };
1610
+ if (doc.op === 'i' || '_id' in doc.o) {
1611
+ const fields = new Set(Object.keys(doc.o));
1612
+ fields.delete('_id');
1613
+ yield { fields, doc, changeTouched: doc.o['touchedAt'] instanceof MaxKey };
1606
1614
  }
1607
1615
  else {
1616
+ let changeTouched = false;
1608
1617
  if (doc.o['$v'] !== 2) {
1609
- throw new Error(`Expected update with $v: 2, got ${JSON.stringify(doc.o)}`);
1618
+ throw new Error(`Expected update with $v: 2, got ${JSON.stringify(doc)}`);
1610
1619
  }
1611
1620
  const updatedFields = [];
1612
1621
  const diff = doc.o['diff'];
1613
1622
  for (const updateOp in diff) {
1614
1623
  if (['u', 'i', 'd'].includes(updateOp)) {
1615
1624
  updatedFields.push(...Object.keys(diff[updateOp]));
1625
+ if (diff[updateOp]['touchedAt'] instanceof MaxKey) {
1626
+ changeTouched = true;
1627
+ }
1616
1628
  }
1617
1629
  else if (updateOp.startsWith('s')) {
1618
1630
  updatedFields.push(updateOp.slice(1));
1619
1631
  }
1620
1632
  }
1621
- yield { ns: doc.ns, fields: new Set(updatedFields), doc };
1633
+ yield { fields: new Set(updatedFields), doc, changeTouched };
1622
1634
  }
1623
1635
  }
1624
1636
  }
1625
1637
  catch (e) {
1626
- log('oplog loop error', e);
1638
+ log('oplog loop error, notifying watchers and reopening');
1639
+ console.error(e);
1640
+ yield null;
1627
1641
  }
1628
1642
  finally {
1629
1643
  log('oplog loop ended');
@@ -1634,15 +1648,76 @@ async function* tailOplog(db, opts) {
1634
1648
  }
1635
1649
  const watchers = new Map();
1636
1650
  let running = false;
1651
+ const makePromise = () => {
1652
+ let resolve = () => { };
1653
+ let promise = new Promise(r => (resolve = r));
1654
+ return { promise, resolve };
1655
+ };
1637
1656
  const loop = async (db) => {
1638
1657
  log('starting oplog loop');
1639
- for await (const { ns, fields, doc } of tailOplog(db, {})) {
1640
- const m = watchers.get(ns);
1641
- if (!m)
1658
+ let notify = makePromise();
1659
+ let batch = [];
1660
+ const last = db.collection('__last');
1661
+ const run = async () => {
1662
+ for await (const event of tailOplog(db, {
1663
+ since: (await last.findOne({ _id: 'oplog' }))?.ts,
1664
+ })) {
1665
+ if (event?.fields.size === 0)
1666
+ continue;
1667
+ batch = event && batch ? [...batch, event] : null;
1668
+ notify.resolve();
1669
+ }
1670
+ };
1671
+ run();
1672
+ const iter = async function* () {
1673
+ while (true) {
1674
+ await notify.promise;
1675
+ const b = batch;
1676
+ if (b?.length) {
1677
+ last
1678
+ .updateOne({ _id: 'oplog' }, { $set: { ts: b[b.length - 1].doc.ts } }, { upsert: true })
1679
+ .catch(() => { });
1680
+ }
1681
+ batch = [];
1682
+ notify = makePromise();
1683
+ yield b;
1684
+ }
1685
+ };
1686
+ for await (const events of iter()) {
1687
+ if (!events) {
1688
+ log('notifying watchers of oplog loop restart');
1689
+ for (const m of watchers.values()) {
1690
+ for (const { cb } of m.values()) {
1691
+ cb(null);
1692
+ }
1693
+ }
1642
1694
  continue;
1643
- for (const { cb, keys } of m.values()) {
1644
- if (!keys || keys.some(k => fields.has(k))) {
1645
- cb(doc);
1695
+ }
1696
+ const groups = Object.groupBy(events.filter(e => e.changeTouched), ev => ev.doc.ns);
1697
+ for (const [ns, evs] of Object.entries(groups)) {
1698
+ if (!evs)
1699
+ continue;
1700
+ const [dbName, collName] = ns.split('.');
1701
+ if (dbName !== db.databaseName)
1702
+ continue;
1703
+ const coll = db.collection(collName);
1704
+ coll
1705
+ .bulkWrite(evs.map((e) => ({
1706
+ updateOne: {
1707
+ filter: { _id: e.doc.o['_id'] ?? e.doc.o2?._id },
1708
+ update: { $set: { touchedAt: e.doc.ts } },
1709
+ },
1710
+ })))
1711
+ .catch(() => { });
1712
+ }
1713
+ for (const { fields, doc } of events) {
1714
+ const m = watchers.get(doc.ns);
1715
+ if (!m)
1716
+ continue;
1717
+ for (const { cb, keys } of m.values()) {
1718
+ if (!keys || keys.some(k => fields.has(k))) {
1719
+ cb(doc);
1720
+ }
1646
1721
  }
1647
1722
  }
1648
1723
  }
@@ -1668,7 +1743,7 @@ const makeWatchStream = ({ collection, projection: p, hardMatch: m }, streamName
1668
1743
  const projection = { ...(p ? mapExactToObject(p, v => v) : {}), deletedAt: 1 };
1669
1744
  let resolve = (_) => { };
1670
1745
  const promise = new Promise(r => (resolve = r));
1671
- const close = register(collection, p ? Object.keys(projection) : null, (doc) => {
1746
+ const close = register(collection, p ? Object.keys(projection) : null, doc => {
1672
1747
  log(streamName, 'change detected', doc);
1673
1748
  resolve(doc);
1674
1749
  close();
@@ -1677,9 +1752,10 @@ const makeWatchStream = ({ collection, projection: p, hardMatch: m }, streamName
1677
1752
  tryNext: async () => {
1678
1753
  const doc = await promise;
1679
1754
  const start = Date.now();
1680
- await waitUntilStablePast(collection.s.db, doc.ts);
1755
+ if (doc)
1756
+ await waitUntilStablePast(collection.s.db, doc.ts);
1681
1757
  log(streamName, 'stable past took', Date.now() - start);
1682
- return doc;
1758
+ return doc ?? {};
1683
1759
  },
1684
1760
  close: async () => close(),
1685
1761
  };
@@ -2119,4 +2195,4 @@ const executes = (view, input, needs) => {
2119
2195
  };
2120
2196
  const single = (view, needs = {}) => pipe(input => executes(view, input, needs), emptyDelta(), concatDelta, emptyDelta);
2121
2197
 
2122
- export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $group_, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertPart, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge2, $mergeId, $mergePart, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $reduce, $replaceWith, $set, $simpleInsert, $simpleMerge, $simpleMergePart, $sum, $type, $unwind, $unwindDelta, Field, Machine, add, and, anyElementTrue, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, from, func, getWhenMatched, getWhenMatchedForMerge, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, notNull, now, or, pair, prepare, rand, range, regex, root, set, setF, setField, single, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
2198
+ export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $group_, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertPart, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge2, $mergeId, $mergePart, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $reduce, $replaceWith, $set, $simpleInsert, $simpleMerge, $simpleMergePart, $sum, $type, $unwind, $unwindDelta, Field, Machine, add, and, anyElementTrue, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, from, func, getWhenMatched, getWhenMatchedForMerge, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, notNull, now, or, pair, prepare, rand, range, regex, root, set, setField, single, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };