@omegup/msync 0.1.22 → 0.1.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/index.d.ts +3 -2
  2. package/index.esm.js +114 -73
  3. package/index.js +113 -72
  4. package/package.json +1 -1
  5. package/lib/accumulators/index.d.ts +0 -10
  6. package/lib/aggregate/$merge.d.ts +0 -28
  7. package/lib/aggregate/$upsert.d.ts +0 -15
  8. package/lib/aggregate/group/$group-merge.d.ts +0 -12
  9. package/lib/aggregate/group/index.d.ts +0 -2
  10. package/lib/aggregate/group/utils/sub-group.d.ts +0 -6
  11. package/lib/aggregate/group/utils/sub-merge.d.ts +0 -20
  12. package/lib/aggregate/index.d.ts +0 -9
  13. package/lib/aggregate/lookup/$lookup-delta.d.ts +0 -10
  14. package/lib/aggregate/lookup/$lookup-raw.d.ts +0 -9
  15. package/lib/aggregate/lookup/index.d.ts +0 -12
  16. package/lib/aggregate/match/$match-delta.d.ts +0 -3
  17. package/lib/aggregate/match/$match.d.ts +0 -3
  18. package/lib/aggregate/match/index.d.ts +0 -2
  19. package/lib/aggregate/mongo-stages.d.ts +0 -13
  20. package/lib/aggregate/out.d.ts +0 -37
  21. package/lib/aggregate/prefix.d.ts +0 -33
  22. package/lib/aggregate/raws.d.ts +0 -17
  23. package/lib/aggregate/set/$replace-with-each.d.ts +0 -13
  24. package/lib/aggregate/set/$set-delta.d.ts +0 -5
  25. package/lib/aggregate/set/$set.d.ts +0 -6
  26. package/lib/aggregate/set/index.d.ts +0 -1
  27. package/lib/aggregate/unwind/$unwind-delta.d.ts +0 -5
  28. package/lib/aggregate/unwind/index.d.ts +0 -6
  29. package/lib/boot/boot-simpl.d.ts +0 -14
  30. package/lib/boot/boot-utils.d.ts +0 -27
  31. package/lib/boot/boot.d.ts +0 -16
  32. package/lib/boot/first-stages.d.ts +0 -12
  33. package/lib/boot/index.d.ts +0 -3
  34. package/lib/boot/single.d.ts +0 -11
  35. package/lib/expression/arith.d.ts +0 -32
  36. package/lib/expression/array.d.ts +0 -30
  37. package/lib/expression/concat.d.ts +0 -34
  38. package/lib/expression/date.d.ts +0 -17
  39. package/lib/expression/expr-base.d.ts +0 -5
  40. package/lib/expression/index.d.ts +0 -7
  41. package/lib/expression/logic.d.ts +0 -23
  42. package/lib/expression/range.d.ts +0 -11
  43. package/lib/expression/val.d.ts +0 -25
  44. package/lib/field/field.d.ts +0 -25
  45. package/lib/field/index.d.ts +0 -1
  46. package/lib/machine.d.ts +0 -9
  47. package/lib/predicate/$eq.d.ts +0 -24
  48. package/lib/predicate/$expr.d.ts +0 -3
  49. package/lib/predicate/$in.d.ts +0 -2
  50. package/lib/predicate/index.d.ts +0 -3
  51. package/lib/predicate/utils.d.ts +0 -5
  52. package/lib/query/index.d.ts +0 -1
  53. package/lib/query/logic.d.ts +0 -11
  54. package/lib/stream/aggregate.d.ts +0 -7
  55. package/lib/types/accumulator.d.ts +0 -30
  56. package/lib/types/aggregate.d.ts +0 -9
  57. package/lib/types/expr.d.ts +0 -27
  58. package/lib/types/extern.d.ts +0 -3
  59. package/lib/types/index.d.ts +0 -8
  60. package/lib/types/lookup.d.ts +0 -19
  61. package/lib/types/machine.d.ts +0 -17
  62. package/lib/types/predicate.d.ts +0 -11
  63. package/lib/types/query.d.ts +0 -12
  64. package/lib/types/stream.d.ts +0 -71
  65. package/lib/update/index.d.ts +0 -1
  66. package/lib/update/updater.d.ts +0 -19
  67. package/lib/utils/before.d.ts +0 -3
  68. package/lib/utils/coll.d.ts +0 -5
  69. package/lib/utils/db-indexes.d.ts +0 -4
  70. package/lib/utils/guard.d.ts +0 -19
  71. package/lib/utils/index.d.ts +0 -4
  72. package/lib/utils/json.d.ts +0 -9
  73. package/lib/utils/log.d.ts +0 -1
  74. package/lib/utils/map-object.d.ts +0 -41
  75. package/lib/utils/merge/combiners.d.ts +0 -6
  76. package/lib/utils/merge/index.d.ts +0 -2
  77. package/lib/utils/merge/merge.d.ts +0 -10
  78. package/lib/utils/merge/next-winner.d.ts +0 -3
  79. package/lib/utils/merge/types.d.ts +0 -19
  80. package/lib/utils/merge/utils.d.ts +0 -4
  81. package/lib/utils/tear-down.d.ts +0 -2
  82. package/lib/watch.d.ts +0 -34
  83. package/test/mongodb.d.ts +0 -9
  84. package/test/uri.d.ts +0 -1
  85. package/types/class.d.ts +0 -1
  86. package/types/extern.d.ts +0 -3
  87. package/types/global.d.ts +0 -50
  88. package/types/hkt.d.ts +0 -56
  89. package/types/index.d.ts +0 -6
  90. package/types/json.d.ts +0 -54
  91. package/types/mongo.d.ts +0 -24
  92. package/types/view.d.ts +0 -35
package/index.d.ts CHANGED
@@ -94,7 +94,7 @@ type StreamRunnerParam<in V, out Result> = {
94
94
  raw: (first: boolean) => RawStages<unknown, V, Result>;
95
95
  teardown: <R>(consume: <W, M extends keyof Actions<W>>(x: TeardownRecord<W, M>) => R) => R;
96
96
  };
97
- type StreamRunner<out V> = <Result>(input: StreamRunnerParam<V, Result>) => Runner<readonly Result[], HasJob>;
97
+ type StreamRunner<out V> = <Result>(input: StreamRunnerParam<V, Result>, setup?: () => Promise<void>) => Runner<readonly Result[], HasJob>;
98
98
  type SimpleStreamExecutionResult<out Q, out V extends Q> = {
99
99
  readonly out: StreamRunner<V>;
100
100
  };
@@ -536,7 +536,8 @@ declare const log: (...args: unknown[]) => void;
536
536
 
537
537
  declare const createIndex: (collection: {
538
538
  readonly createIndex: Collection["createIndex"];
539
- }, indexSpec: IndexSpecification, options?: CreateIndexesOptions) => Promise<void>;
539
+ collectionName: string;
540
+ }, indexSpec: IndexSpecification, op?: CreateIndexesOptions) => Promise<void>;
540
541
 
541
542
  declare const noop: () => void;
542
543
  declare const map1: <K extends string, Im>(k: AsLiteral<K>, to: Im) => { readonly [P in K]: [P, Im]; } & {
package/index.esm.js CHANGED
@@ -1,4 +1,4 @@
1
- import { MaxKey, MongoClient, UUID } from 'mongodb';
1
+ import { MaxKey, UUID, MongoClient } from 'mongodb';
2
2
  import crypto$1 from 'crypto';
3
3
  import { canonicalize } from 'json-canonicalize';
4
4
  import { SynchronousPromise } from 'synchronous-promise';
@@ -1251,19 +1251,48 @@ const $lookupRaw = ({ field1, field2 }, { coll, exec, input }, k2, k, includeNul
1251
1251
 
1252
1252
  const asBefore = (f) => f(() => root().of('before'));
1253
1253
 
1254
- const createIndex = async (collection, indexSpec, options) => {
1254
+ const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
1255
+ const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
1256
+ const json = (a) => replace(JSON.stringify(a));
1257
+ const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
1258
+
1259
+ const indexMap = new Map();
1260
+ const createIndex = async (collection, indexSpec, op) => {
1261
+ const { name, ...options } = op ?? {};
1262
+ const map = indexMap.get(collection.collectionName) ?? new Map();
1263
+ indexMap.set(collection.collectionName, map);
1264
+ const indexKey = `${JSON.stringify(indexSpec)}-${JSON.stringify(options)}`;
1265
+ if (map.has(indexKey)) {
1266
+ await map.get(indexKey);
1267
+ return;
1268
+ }
1269
+ const promise = createIndexWithRetry(collection, indexSpec, op);
1270
+ map.set(indexKey, promise);
1271
+ await promise;
1272
+ };
1273
+ const createIndexWithRetry = async (collection, indexSpec, options) => {
1274
+ const log = () => { };
1275
+ log('Creating index', { collection: collection.collectionName, indexSpec, options });
1255
1276
  while (true) {
1256
1277
  try {
1257
1278
  await collection.createIndex(indexSpec, options);
1279
+ log('Index created', { collection: collection.collectionName, indexSpec, options });
1258
1280
  }
1259
1281
  catch (e) {
1260
1282
  if ([85, 276].includes(e.code)) {
1283
+ log('Index created with different name', e.code, { collection: collection.collectionName, indexSpec, options });
1261
1284
  break;
1262
1285
  }
1263
1286
  if (e.code == 12587) {
1264
1287
  await new Promise(res => setTimeout(res, 300));
1265
1288
  continue;
1266
1289
  }
1290
+ log('Error creating index', {
1291
+ collection: collection.collectionName,
1292
+ indexSpec,
1293
+ options,
1294
+ error: e,
1295
+ });
1267
1296
  console.error('Error creating index', e);
1268
1297
  throw e;
1269
1298
  }
@@ -1333,7 +1362,7 @@ const mergeIterators = (params) => {
1333
1362
  };
1334
1363
  };
1335
1364
 
1336
- const state = { steady: false, f: (_) => Promise.resolve() };
1365
+ const state = { steady: false };
1337
1366
  let timeout = null;
1338
1367
  const firstWorksMerge = (iters) => {
1339
1368
  const iterator = () => {
@@ -1341,7 +1370,7 @@ const firstWorksMerge = (iters) => {
1341
1370
  const sources = { ...results };
1342
1371
  return mergeIterators({
1343
1372
  sources,
1344
- interrupt: key => false,
1373
+ interrupt: key => state.steady,
1345
1374
  hooks: {
1346
1375
  start: (frame, result) => {
1347
1376
  if (!frame.info.job)
@@ -1409,8 +1438,6 @@ const runCont = async (it, cb) => {
1409
1438
 
1410
1439
  const merge = ({ lsource: L, rsource: R, }) => mergeIterators({ sources: { L, R } });
1411
1440
  const join = ({ lField, rField, left, right, as }, leftSnapshot, rightSnapshot, stagesUntilNextLookup, outerLeft) => {
1412
- createIndex(leftSnapshot.coll, { [`before.${lField.str()}`]: 1 }).catch(e => e.code == 86 || Promise.reject(e));
1413
- createIndex(rightSnapshot.coll, { [`before.${rField.str()}`]: 1 }).catch(e => e.code == 86 || Promise.reject(e));
1414
1441
  const rightJoinField = { field1: lField, field2: rField };
1415
1442
  const joinId = 'left';
1416
1443
  const joinR_Snapshot = asBefore($lookupRaw(rightJoinField, rightSnapshot, as, joinId, outerLeft));
@@ -1430,6 +1457,15 @@ const join = ({ lField, rField, left, right, as }, leftSnapshot, rightSnapshot,
1430
1457
  const getRunner = (f, stages, final) => f.out({
1431
1458
  raw: first => concatStages(stages, final.raw(first)),
1432
1459
  teardown: final.teardown,
1460
+ }, async () => {
1461
+ log('Creating indexes for lookup left', leftSnapshot.coll.collectionName, {
1462
+ [`before.${lField.str()}`]: 1,
1463
+ });
1464
+ await createIndex(leftSnapshot.coll, { [`before.${lField.str()}`]: 1 }, { name: 'left_' + new UUID().toString('base64') });
1465
+ log('Creating indexes for lookup right', rightSnapshot.coll.collectionName, {
1466
+ [`before.${rField.str()}`]: 1,
1467
+ });
1468
+ await createIndex(rightSnapshot.coll, { [`before.${rField.str()}`]: 1 }, { name: 'right_' + new UUID().toString('base64') });
1433
1469
  });
1434
1470
  const lRunner = getRunner(left, lRunnerInput, finalInput);
1435
1471
  const rRunner = getRunner(right, rRunnerInput, finalInput);
@@ -1524,11 +1560,6 @@ const $insertPart = (out, ext) => {
1524
1560
  const $insert = (out) => $insertPart(out, {});
1525
1561
  const assertNotNull = (expr) => expr;
1526
1562
 
1527
- const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
1528
- const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
1529
- const json = (a) => replace(JSON.stringify(a));
1530
- const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
1531
-
1532
1563
  const aggregate = (db, streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
1533
1564
  const req = {
1534
1565
  aggregate: coll.collectionName,
@@ -1657,11 +1688,8 @@ const loop = async (db) => {
1657
1688
  log('starting oplog loop');
1658
1689
  let notify = makePromise();
1659
1690
  let batch = [];
1660
- const last = db.collection('__last');
1661
1691
  const run = async () => {
1662
- for await (const event of tailOplog(db, {
1663
- since: (await last.findOne({ _id: 'oplog' }))?.ts,
1664
- })) {
1692
+ for await (const event of tailOplog(db, {})) {
1665
1693
  if (event?.fields.size === 0)
1666
1694
  continue;
1667
1695
  batch = event && batch ? [...batch, event] : null;
@@ -1673,11 +1701,6 @@ const loop = async (db) => {
1673
1701
  while (true) {
1674
1702
  await notify.promise;
1675
1703
  const b = batch;
1676
- if (b?.length) {
1677
- last
1678
- .updateOne({ _id: 'oplog' }, { $set: { ts: b[b.length - 1].doc.ts } }, { upsert: true })
1679
- .catch(() => { });
1680
- }
1681
1704
  batch = [];
1682
1705
  notify = makePromise();
1683
1706
  yield b;
@@ -1739,7 +1762,12 @@ const register = (coll, keys, cb) => {
1739
1762
  watchers.delete(ns);
1740
1763
  };
1741
1764
  };
1742
- const makeWatchStream = ({ collection, projection: p, hardMatch: m }, streamName) => {
1765
+ let maxKeysRemoved = null;
1766
+ const makeWatchStream = async ({ collection, projection: p, hardMatch: m }, streamName) => {
1767
+ const { db } = collection.s;
1768
+ await (maxKeysRemoved ??= Promise.all((await db.listCollections({}, { nameOnly: true }).toArray()).map(x => void db
1769
+ .collection(x.name)
1770
+ .updateMany({ touchedAt: new MaxKey() }, [{ $set: { touchedAt: '$$CLUSTER_TIME' } }]))).then(() => { }));
1743
1771
  const projection = { ...(p ? mapExactToObject(p, v => v) : {}), deletedAt: 1 };
1744
1772
  let resolve = (_) => { };
1745
1773
  const promise = new Promise(r => (resolve = r));
@@ -1849,44 +1877,17 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1849
1877
  streamNames[streamName] = hash;
1850
1878
  else if (streamNames[streamName] != hash)
1851
1879
  throw new Error(`streamName ${streamName} already used`);
1852
- db.command({
1853
- collMod: coll,
1854
- changeStreamPreAndPostImages: { enabled: true },
1855
- });
1856
- createIndex(collection, { touchedAt: 1 }, hardMatch
1857
- ? {
1858
- partialFilterExpression: hardMatch.raw(root()),
1859
- name: 'touchedAt_hard_' + new UUID().toString('base64'),
1860
- }
1861
- : {}).catch(e => e.code == 86 || Promise.reject(e));
1862
1880
  const last = db.collection('__last');
1863
1881
  const snapshotCollection = db.collection(coll + '_' + streamName + '_snapshot');
1864
- createIndex(snapshotCollection, { before: 1 }, {
1865
- partialFilterExpression: { before: null },
1866
- name: 'before_' + new UUID().toString('base64'),
1867
- });
1868
- createIndex(snapshotCollection, { updated: 1 }, {
1869
- partialFilterExpression: { updated: true },
1870
- name: 'updated_' + new UUID().toString('base64'),
1871
- });
1872
- createIndex(snapshotCollection, { updated: 1, after: 1, before: 1 }, {
1873
- partialFilterExpression: { updated: true, after: null, before: null },
1874
- name: 'updated_nulls_' + new UUID().toString('base64'),
1875
- });
1876
- createIndex(snapshotCollection, { updated: 1, after: 1 }, {
1877
- partialFilterExpression: { updated: true, after: null },
1878
- name: 'updated_no_after_' + new UUID().toString('base64'),
1879
- });
1880
- createIndex(snapshotCollection, { updated: 1 }, {
1881
- partialFilterExpression: { updated: true, after: null, before: null },
1882
- name: 'updated_nulls_' + new UUID().toString('base64'),
1883
- });
1884
1882
  const job = {};
1885
- const run = (finalInput) => {
1886
- const clear = async () => Promise.all([
1887
- snapshotCollection.drop().catch(noop).catch(noop),
1888
- last.deleteOne({ _id: streamName }),
1889
- ]);
1883
+ const run = (finalInput, setup) => {
1884
+ const dropSnapshot = async () => {
1885
+ await snapshotCollection.drop().catch(noop);
1886
+ log('snapshot collection dropped', streamName, `db['${snapshotCollection.collectionName}'].drop()`);
1887
+ log('with', [...(indexMap.get(snapshotCollection.collectionName)?.keys() ?? [])], 'indexes in map before deletion');
1888
+ indexMap.delete(snapshotCollection.collectionName);
1889
+ };
1890
+ const clear = async () => Promise.all([dropSnapshot(), last.deleteOne({ _id: streamName })]);
1890
1891
  const withStop = (next, tr) => {
1891
1892
  return addTeardown(() => ({ stop, next: next(), clear }), tr);
1892
1893
  };
@@ -1911,7 +1912,9 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1911
1912
  const step0 = () => SynchronousPromise.resolve(next(step1, 'empty new collection'));
1912
1913
  const stop = withStop(step0);
1913
1914
  const step1 = async () => {
1915
+ log('reset collection', streamName, `db['${snapshotCollection.collectionName}'].updateMany( updated: true }, { $set: { updated: false, after: null } })`);
1914
1916
  await snapshotCollection.updateMany({ updated: true }, { $set: { updated: false, after: null } });
1917
+ log('reset collection done', streamName);
1915
1918
  return next(step2, 'get last update');
1916
1919
  };
1917
1920
  const step2 = () => Promise.all([
@@ -1934,13 +1937,45 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1934
1937
  };
1935
1938
  const [action, out] = actions[method](collection, params);
1936
1939
  log('teardown', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
1937
- await Promise.all([snapshotCollection.drop().catch(noop), action]);
1940
+ await Promise.all([dropSnapshot(), action]);
1938
1941
  log('teardown done', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
1939
1942
  };
1940
1943
  if (!same) {
1941
1944
  log('not same, new data', streamName, data);
1942
1945
  await handleTeardown(exists ?? { data });
1943
1946
  }
1947
+ log('creating indexes');
1948
+ await createIndex(snapshotCollection, { before: 1 }, {
1949
+ partialFilterExpression: { before: null },
1950
+ name: 'before_' + new UUID().toString('base64'),
1951
+ });
1952
+ await createIndex(snapshotCollection, { updated: 1 }, {
1953
+ partialFilterExpression: { updated: true },
1954
+ name: 'updated_' + new UUID().toString('base64'),
1955
+ });
1956
+ await createIndex(snapshotCollection, { updated: 1, after: 1, before: 1 }, {
1957
+ partialFilterExpression: { updated: true, after: null, before: null },
1958
+ name: 'updated_nulls_' + new UUID().toString('base64'),
1959
+ });
1960
+ await createIndex(snapshotCollection, { updated: 1, after: 1 }, {
1961
+ partialFilterExpression: { updated: true, after: null },
1962
+ name: 'updated_no_after_' + new UUID().toString('base64'),
1963
+ });
1964
+ await createIndex(snapshotCollection, { updated: 1 }, {
1965
+ partialFilterExpression: { updated: true, after: null, before: null },
1966
+ name: 'updated_nulls_' + new UUID().toString('base64'),
1967
+ });
1968
+ await db.command({
1969
+ collMod: coll,
1970
+ changeStreamPreAndPostImages: { enabled: true },
1971
+ });
1972
+ await createIndex(collection, { touchedAt: 1 }, hardMatch
1973
+ ? {
1974
+ partialFilterExpression: hardMatch.raw(root()),
1975
+ name: 'touchedAt_hard_' + new UUID().toString('base64'),
1976
+ }
1977
+ : {});
1978
+ await setup?.();
1944
1979
  await after?.();
1945
1980
  return nextData([])(async () => {
1946
1981
  await new Promise(resolve => setTimeout(resolve, 1000));
@@ -1980,7 +2015,8 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1980
2015
  const first = ts === undefined;
1981
2016
  const stages = finalInput.raw(first);
1982
2017
  await last.updateOne({ _id: streamName }, { $set: { job: 1 } }, { upsert: true });
1983
- const stream = makeStream();
2018
+ const stream = await makeStream();
2019
+ const nextRes = stream.tryNext();
1984
2020
  const aggResult = await aggregate(pdb, streamName, c => c({
1985
2021
  coll: snapshotCollection,
1986
2022
  input: link()
@@ -1994,7 +2030,6 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1994
2030
  .with(input.delta)
1995
2031
  .with(stages).stages,
1996
2032
  }), false, start);
1997
- const nextRes = stream.tryNext();
1998
2033
  stages.at(-1).$merge.into.coll;
1999
2034
  return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes, first }), 'remove handled deleted updated', () => stream.close());
2000
2035
  };
@@ -2073,14 +2108,6 @@ const executes$1 = (view, input, streamName, needs) => {
2073
2108
  : pre;
2074
2109
  const job = {};
2075
2110
  const db = collection.s.db, coll = collection.collectionName;
2076
- db.command({
2077
- collMod: coll,
2078
- changeStreamPreAndPostImages: { enabled: true },
2079
- });
2080
- createIndex(collection, { touchedAt: 1 }, {
2081
- partialFilterExpression: { deletedAt: { $eq: null } },
2082
- name: 'touchedAt_' + new UUID().toString('base64'),
2083
- });
2084
2111
  const last = db.collection('__last');
2085
2112
  const projectInput = projection &&
2086
2113
  $project_(spread(projection, {
@@ -2117,10 +2144,24 @@ const executes$1 = (view, input, streamName, needs) => {
2117
2144
  };
2118
2145
  const step0 = () => SynchronousPromise.resolve(next(step1, 'get last update'));
2119
2146
  const stop = withStop(step0);
2120
- const step1 = () => Promise.all([
2121
- last.findOne({ _id: streamName, data }),
2122
- last.findOne({ _id: streamName }),
2123
- ]).then(ts => next(step2_5(ts), 'handle teardown'));
2147
+ const step1 = async () => {
2148
+ log('creating indexes');
2149
+ await db.command({
2150
+ collMod: coll,
2151
+ changeStreamPreAndPostImages: { enabled: true },
2152
+ });
2153
+ await createIndex(collection, { touchedAt: 1 }, {
2154
+ partialFilterExpression: { deletedAt: { $eq: null } },
2155
+ name: 'touchedAt_' + new UUID().toString('base64'),
2156
+ });
2157
+ log('stream started', { streamName, data });
2158
+ const ts = await Promise.all([
2159
+ last.findOne({ _id: streamName, data }),
2160
+ last.findOne({ _id: streamName }),
2161
+ ]);
2162
+ log('got last update', { streamName, ts });
2163
+ return next(step2_5(ts), 'handle teardown');
2164
+ };
2124
2165
  const step2_5 = ([same, exists]) => async () => {
2125
2166
  const handleTeardown = async (last) => {
2126
2167
  if (!last.data)
@@ -2150,12 +2191,12 @@ const executes$1 = (view, input, streamName, needs) => {
2150
2191
  const makeStream = () => makeWatchStream(view, streamName);
2151
2192
  const step4 = (lastTS) => async () => {
2152
2193
  const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
2153
- const stream = makeStream();
2194
+ const stream = await makeStream();
2195
+ const nextRes = stream.tryNext();
2154
2196
  const aggResult = await aggregate(pdb, streamName, c => c({
2155
2197
  coll: collection,
2156
2198
  input: raw,
2157
2199
  }));
2158
- const nextRes = stream.tryNext();
2159
2200
  return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
2160
2201
  };
2161
2202
  const step7 = (l) => async () => {