@omegup/msync 0.1.22 → 0.1.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/index.d.ts +3 -2
  2. package/index.esm.js +114 -73
  3. package/index.js +113 -72
  4. package/package.json +1 -1
  5. package/lib/accumulators/index.d.ts +0 -10
  6. package/lib/aggregate/$merge.d.ts +0 -28
  7. package/lib/aggregate/$upsert.d.ts +0 -15
  8. package/lib/aggregate/group/$group-merge.d.ts +0 -12
  9. package/lib/aggregate/group/index.d.ts +0 -2
  10. package/lib/aggregate/group/utils/sub-group.d.ts +0 -6
  11. package/lib/aggregate/group/utils/sub-merge.d.ts +0 -20
  12. package/lib/aggregate/index.d.ts +0 -9
  13. package/lib/aggregate/lookup/$lookup-delta.d.ts +0 -10
  14. package/lib/aggregate/lookup/$lookup-raw.d.ts +0 -9
  15. package/lib/aggregate/lookup/index.d.ts +0 -12
  16. package/lib/aggregate/match/$match-delta.d.ts +0 -3
  17. package/lib/aggregate/match/$match.d.ts +0 -3
  18. package/lib/aggregate/match/index.d.ts +0 -2
  19. package/lib/aggregate/mongo-stages.d.ts +0 -13
  20. package/lib/aggregate/out.d.ts +0 -37
  21. package/lib/aggregate/prefix.d.ts +0 -33
  22. package/lib/aggregate/raws.d.ts +0 -17
  23. package/lib/aggregate/set/$replace-with-each.d.ts +0 -13
  24. package/lib/aggregate/set/$set-delta.d.ts +0 -5
  25. package/lib/aggregate/set/$set.d.ts +0 -6
  26. package/lib/aggregate/set/index.d.ts +0 -1
  27. package/lib/aggregate/unwind/$unwind-delta.d.ts +0 -5
  28. package/lib/aggregate/unwind/index.d.ts +0 -6
  29. package/lib/boot/boot-simpl.d.ts +0 -14
  30. package/lib/boot/boot-utils.d.ts +0 -27
  31. package/lib/boot/boot.d.ts +0 -16
  32. package/lib/boot/first-stages.d.ts +0 -12
  33. package/lib/boot/index.d.ts +0 -3
  34. package/lib/boot/single.d.ts +0 -11
  35. package/lib/expression/arith.d.ts +0 -32
  36. package/lib/expression/array.d.ts +0 -30
  37. package/lib/expression/concat.d.ts +0 -34
  38. package/lib/expression/date.d.ts +0 -17
  39. package/lib/expression/expr-base.d.ts +0 -5
  40. package/lib/expression/index.d.ts +0 -7
  41. package/lib/expression/logic.d.ts +0 -23
  42. package/lib/expression/range.d.ts +0 -11
  43. package/lib/expression/val.d.ts +0 -25
  44. package/lib/field/field.d.ts +0 -25
  45. package/lib/field/index.d.ts +0 -1
  46. package/lib/machine.d.ts +0 -9
  47. package/lib/predicate/$eq.d.ts +0 -24
  48. package/lib/predicate/$expr.d.ts +0 -3
  49. package/lib/predicate/$in.d.ts +0 -2
  50. package/lib/predicate/index.d.ts +0 -3
  51. package/lib/predicate/utils.d.ts +0 -5
  52. package/lib/query/index.d.ts +0 -1
  53. package/lib/query/logic.d.ts +0 -11
  54. package/lib/stream/aggregate.d.ts +0 -7
  55. package/lib/types/accumulator.d.ts +0 -30
  56. package/lib/types/aggregate.d.ts +0 -9
  57. package/lib/types/expr.d.ts +0 -27
  58. package/lib/types/extern.d.ts +0 -3
  59. package/lib/types/index.d.ts +0 -8
  60. package/lib/types/lookup.d.ts +0 -19
  61. package/lib/types/machine.d.ts +0 -17
  62. package/lib/types/predicate.d.ts +0 -11
  63. package/lib/types/query.d.ts +0 -12
  64. package/lib/types/stream.d.ts +0 -71
  65. package/lib/update/index.d.ts +0 -1
  66. package/lib/update/updater.d.ts +0 -19
  67. package/lib/utils/before.d.ts +0 -3
  68. package/lib/utils/coll.d.ts +0 -5
  69. package/lib/utils/db-indexes.d.ts +0 -4
  70. package/lib/utils/guard.d.ts +0 -19
  71. package/lib/utils/index.d.ts +0 -4
  72. package/lib/utils/json.d.ts +0 -9
  73. package/lib/utils/log.d.ts +0 -1
  74. package/lib/utils/map-object.d.ts +0 -41
  75. package/lib/utils/merge/combiners.d.ts +0 -6
  76. package/lib/utils/merge/index.d.ts +0 -2
  77. package/lib/utils/merge/merge.d.ts +0 -10
  78. package/lib/utils/merge/next-winner.d.ts +0 -3
  79. package/lib/utils/merge/types.d.ts +0 -19
  80. package/lib/utils/merge/utils.d.ts +0 -4
  81. package/lib/utils/tear-down.d.ts +0 -2
  82. package/lib/watch.d.ts +0 -34
  83. package/test/mongodb.d.ts +0 -9
  84. package/test/uri.d.ts +0 -1
  85. package/types/class.d.ts +0 -1
  86. package/types/extern.d.ts +0 -3
  87. package/types/global.d.ts +0 -50
  88. package/types/hkt.d.ts +0 -56
  89. package/types/index.d.ts +0 -6
  90. package/types/json.d.ts +0 -54
  91. package/types/mongo.d.ts +0 -24
  92. package/types/view.d.ts +0 -35
package/index.js CHANGED
@@ -1253,19 +1253,48 @@ const $lookupRaw = ({ field1, field2 }, { coll, exec, input }, k2, k, includeNul
1253
1253
 
1254
1254
  const asBefore = (f) => f(() => root().of('before'));
1255
1255
 
1256
- const createIndex = async (collection, indexSpec, options) => {
1256
+ const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
1257
+ const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
1258
+ const json = (a) => replace(JSON.stringify(a));
1259
+ const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
1260
+
1261
+ const indexMap = new Map();
1262
+ const createIndex = async (collection, indexSpec, op) => {
1263
+ const { name, ...options } = op ?? {};
1264
+ const map = indexMap.get(collection.collectionName) ?? new Map();
1265
+ indexMap.set(collection.collectionName, map);
1266
+ const indexKey = `${JSON.stringify(indexSpec)}-${JSON.stringify(options)}`;
1267
+ if (map.has(indexKey)) {
1268
+ await map.get(indexKey);
1269
+ return;
1270
+ }
1271
+ const promise = createIndexWithRetry(collection, indexSpec, op);
1272
+ map.set(indexKey, promise);
1273
+ await promise;
1274
+ };
1275
+ const createIndexWithRetry = async (collection, indexSpec, options) => {
1276
+ const log = () => { };
1277
+ log('Creating index', { collection: collection.collectionName, indexSpec, options });
1257
1278
  while (true) {
1258
1279
  try {
1259
1280
  await collection.createIndex(indexSpec, options);
1281
+ log('Index created', { collection: collection.collectionName, indexSpec, options });
1260
1282
  }
1261
1283
  catch (e) {
1262
1284
  if ([85, 276].includes(e.code)) {
1285
+ log('Index created with different name', e.code, { collection: collection.collectionName, indexSpec, options });
1263
1286
  break;
1264
1287
  }
1265
1288
  if (e.code == 12587) {
1266
1289
  await new Promise(res => setTimeout(res, 300));
1267
1290
  continue;
1268
1291
  }
1292
+ log('Error creating index', {
1293
+ collection: collection.collectionName,
1294
+ indexSpec,
1295
+ options,
1296
+ error: e,
1297
+ });
1269
1298
  console.error('Error creating index', e);
1270
1299
  throw e;
1271
1300
  }
@@ -1335,7 +1364,7 @@ const mergeIterators = (params) => {
1335
1364
  };
1336
1365
  };
1337
1366
 
1338
- const state = { steady: false, f: (_) => Promise.resolve() };
1367
+ const state = { steady: false };
1339
1368
  let timeout = null;
1340
1369
  const firstWorksMerge = (iters) => {
1341
1370
  const iterator = () => {
@@ -1343,7 +1372,7 @@ const firstWorksMerge = (iters) => {
1343
1372
  const sources = { ...results };
1344
1373
  return mergeIterators({
1345
1374
  sources,
1346
- interrupt: key => false,
1375
+ interrupt: key => state.steady,
1347
1376
  hooks: {
1348
1377
  start: (frame, result) => {
1349
1378
  if (!frame.info.job)
@@ -1411,8 +1440,6 @@ const runCont = async (it, cb) => {
1411
1440
 
1412
1441
  const merge = ({ lsource: L, rsource: R, }) => mergeIterators({ sources: { L, R } });
1413
1442
  const join = ({ lField, rField, left, right, as }, leftSnapshot, rightSnapshot, stagesUntilNextLookup, outerLeft) => {
1414
- createIndex(leftSnapshot.coll, { [`before.${lField.str()}`]: 1 }).catch(e => e.code == 86 || Promise.reject(e));
1415
- createIndex(rightSnapshot.coll, { [`before.${rField.str()}`]: 1 }).catch(e => e.code == 86 || Promise.reject(e));
1416
1443
  const rightJoinField = { field1: lField, field2: rField };
1417
1444
  const joinId = 'left';
1418
1445
  const joinR_Snapshot = asBefore($lookupRaw(rightJoinField, rightSnapshot, as, joinId, outerLeft));
@@ -1432,6 +1459,15 @@ const join = ({ lField, rField, left, right, as }, leftSnapshot, rightSnapshot,
1432
1459
  const getRunner = (f, stages, final) => f.out({
1433
1460
  raw: first => concatStages(stages, final.raw(first)),
1434
1461
  teardown: final.teardown,
1462
+ }, async () => {
1463
+ log('Creating indexes for lookup left', leftSnapshot.coll.collectionName, {
1464
+ [`before.${lField.str()}`]: 1,
1465
+ });
1466
+ await createIndex(leftSnapshot.coll, { [`before.${lField.str()}`]: 1 }, { name: 'left_' + new mongodb.UUID().toString('base64') });
1467
+ log('Creating indexes for lookup right', rightSnapshot.coll.collectionName, {
1468
+ [`before.${rField.str()}`]: 1,
1469
+ });
1470
+ await createIndex(rightSnapshot.coll, { [`before.${rField.str()}`]: 1 }, { name: 'right_' + new mongodb.UUID().toString('base64') });
1435
1471
  });
1436
1472
  const lRunner = getRunner(left, lRunnerInput, finalInput);
1437
1473
  const rRunner = getRunner(right, rRunnerInput, finalInput);
@@ -1526,11 +1562,6 @@ const $insertPart = (out, ext) => {
1526
1562
  const $insert = (out) => $insertPart(out, {});
1527
1563
  const assertNotNull = (expr) => expr;
1528
1564
 
1529
- const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
1530
- const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
1531
- const json = (a) => replace(JSON.stringify(a));
1532
- const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
1533
-
1534
1565
  const aggregate = (db, streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
1535
1566
  const req = {
1536
1567
  aggregate: coll.collectionName,
@@ -1659,11 +1690,8 @@ const loop = async (db) => {
1659
1690
  log('starting oplog loop');
1660
1691
  let notify = makePromise();
1661
1692
  let batch = [];
1662
- const last = db.collection('__last');
1663
1693
  const run = async () => {
1664
- for await (const event of tailOplog(db, {
1665
- since: (await last.findOne({ _id: 'oplog' }))?.ts,
1666
- })) {
1694
+ for await (const event of tailOplog(db, {})) {
1667
1695
  if (event?.fields.size === 0)
1668
1696
  continue;
1669
1697
  batch = event && batch ? [...batch, event] : null;
@@ -1675,11 +1703,6 @@ const loop = async (db) => {
1675
1703
  while (true) {
1676
1704
  await notify.promise;
1677
1705
  const b = batch;
1678
- if (b?.length) {
1679
- last
1680
- .updateOne({ _id: 'oplog' }, { $set: { ts: b[b.length - 1].doc.ts } }, { upsert: true })
1681
- .catch(() => { });
1682
- }
1683
1706
  batch = [];
1684
1707
  notify = makePromise();
1685
1708
  yield b;
@@ -1741,7 +1764,12 @@ const register = (coll, keys, cb) => {
1741
1764
  watchers.delete(ns);
1742
1765
  };
1743
1766
  };
1744
- const makeWatchStream = ({ collection, projection: p, hardMatch: m }, streamName) => {
1767
+ let maxKeysRemoved = null;
1768
+ const makeWatchStream = async ({ collection, projection: p, hardMatch: m }, streamName) => {
1769
+ const { db } = collection.s;
1770
+ await (maxKeysRemoved ??= Promise.all((await db.listCollections({}, { nameOnly: true }).toArray()).map(x => void db
1771
+ .collection(x.name)
1772
+ .updateMany({ touchedAt: new mongodb.MaxKey() }, [{ $set: { touchedAt: '$$CLUSTER_TIME' } }]))).then(() => { }));
1745
1773
  const projection = { ...(p ? mapExactToObject(p, v => v) : {}), deletedAt: 1 };
1746
1774
  let resolve = (_) => { };
1747
1775
  const promise = new Promise(r => (resolve = r));
@@ -1851,44 +1879,17 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1851
1879
  streamNames[streamName] = hash;
1852
1880
  else if (streamNames[streamName] != hash)
1853
1881
  throw new Error(`streamName ${streamName} already used`);
1854
- db.command({
1855
- collMod: coll,
1856
- changeStreamPreAndPostImages: { enabled: true },
1857
- });
1858
- createIndex(collection, { touchedAt: 1 }, hardMatch
1859
- ? {
1860
- partialFilterExpression: hardMatch.raw(root()),
1861
- name: 'touchedAt_hard_' + new mongodb.UUID().toString('base64'),
1862
- }
1863
- : {}).catch(e => e.code == 86 || Promise.reject(e));
1864
1882
  const last = db.collection('__last');
1865
1883
  const snapshotCollection = db.collection(coll + '_' + streamName + '_snapshot');
1866
- createIndex(snapshotCollection, { before: 1 }, {
1867
- partialFilterExpression: { before: null },
1868
- name: 'before_' + new mongodb.UUID().toString('base64'),
1869
- });
1870
- createIndex(snapshotCollection, { updated: 1 }, {
1871
- partialFilterExpression: { updated: true },
1872
- name: 'updated_' + new mongodb.UUID().toString('base64'),
1873
- });
1874
- createIndex(snapshotCollection, { updated: 1, after: 1, before: 1 }, {
1875
- partialFilterExpression: { updated: true, after: null, before: null },
1876
- name: 'updated_nulls_' + new mongodb.UUID().toString('base64'),
1877
- });
1878
- createIndex(snapshotCollection, { updated: 1, after: 1 }, {
1879
- partialFilterExpression: { updated: true, after: null },
1880
- name: 'updated_no_after_' + new mongodb.UUID().toString('base64'),
1881
- });
1882
- createIndex(snapshotCollection, { updated: 1 }, {
1883
- partialFilterExpression: { updated: true, after: null, before: null },
1884
- name: 'updated_nulls_' + new mongodb.UUID().toString('base64'),
1885
- });
1886
1884
  const job = {};
1887
- const run = (finalInput) => {
1888
- const clear = async () => Promise.all([
1889
- snapshotCollection.drop().catch(noop).catch(noop),
1890
- last.deleteOne({ _id: streamName }),
1891
- ]);
1885
+ const run = (finalInput, setup) => {
1886
+ const dropSnapshot = async () => {
1887
+ await snapshotCollection.drop().catch(noop);
1888
+ log('snapshot collection dropped', streamName, `db['${snapshotCollection.collectionName}'].drop()`);
1889
+ log('with', [...(indexMap.get(snapshotCollection.collectionName)?.keys() ?? [])], 'indexes in map before deletion');
1890
+ indexMap.delete(snapshotCollection.collectionName);
1891
+ };
1892
+ const clear = async () => Promise.all([dropSnapshot(), last.deleteOne({ _id: streamName })]);
1892
1893
  const withStop = (next, tr) => {
1893
1894
  return addTeardown(() => ({ stop, next: next(), clear }), tr);
1894
1895
  };
@@ -1913,7 +1914,9 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1913
1914
  const step0 = () => synchronousPromise.SynchronousPromise.resolve(next(step1, 'empty new collection'));
1914
1915
  const stop = withStop(step0);
1915
1916
  const step1 = async () => {
1917
+ log('reset collection', streamName, `db['${snapshotCollection.collectionName}'].updateMany( updated: true }, { $set: { updated: false, after: null } })`);
1916
1918
  await snapshotCollection.updateMany({ updated: true }, { $set: { updated: false, after: null } });
1919
+ log('reset collection done', streamName);
1917
1920
  return next(step2, 'get last update');
1918
1921
  };
1919
1922
  const step2 = () => Promise.all([
@@ -1936,13 +1939,45 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1936
1939
  };
1937
1940
  const [action, out] = actions[method](collection, params);
1938
1941
  log('teardown', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
1939
- await Promise.all([snapshotCollection.drop().catch(noop), action]);
1942
+ await Promise.all([dropSnapshot(), action]);
1940
1943
  log('teardown done', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
1941
1944
  };
1942
1945
  if (!same) {
1943
1946
  log('not same, new data', streamName, data);
1944
1947
  await handleTeardown(exists ?? { data });
1945
1948
  }
1949
+ log('creating indexes');
1950
+ await createIndex(snapshotCollection, { before: 1 }, {
1951
+ partialFilterExpression: { before: null },
1952
+ name: 'before_' + new mongodb.UUID().toString('base64'),
1953
+ });
1954
+ await createIndex(snapshotCollection, { updated: 1 }, {
1955
+ partialFilterExpression: { updated: true },
1956
+ name: 'updated_' + new mongodb.UUID().toString('base64'),
1957
+ });
1958
+ await createIndex(snapshotCollection, { updated: 1, after: 1, before: 1 }, {
1959
+ partialFilterExpression: { updated: true, after: null, before: null },
1960
+ name: 'updated_nulls_' + new mongodb.UUID().toString('base64'),
1961
+ });
1962
+ await createIndex(snapshotCollection, { updated: 1, after: 1 }, {
1963
+ partialFilterExpression: { updated: true, after: null },
1964
+ name: 'updated_no_after_' + new mongodb.UUID().toString('base64'),
1965
+ });
1966
+ await createIndex(snapshotCollection, { updated: 1 }, {
1967
+ partialFilterExpression: { updated: true, after: null, before: null },
1968
+ name: 'updated_nulls_' + new mongodb.UUID().toString('base64'),
1969
+ });
1970
+ await db.command({
1971
+ collMod: coll,
1972
+ changeStreamPreAndPostImages: { enabled: true },
1973
+ });
1974
+ await createIndex(collection, { touchedAt: 1 }, hardMatch
1975
+ ? {
1976
+ partialFilterExpression: hardMatch.raw(root()),
1977
+ name: 'touchedAt_hard_' + new mongodb.UUID().toString('base64'),
1978
+ }
1979
+ : {});
1980
+ await setup?.();
1946
1981
  await after?.();
1947
1982
  return nextData([])(async () => {
1948
1983
  await new Promise(resolve => setTimeout(resolve, 1000));
@@ -1982,7 +2017,8 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1982
2017
  const first = ts === undefined;
1983
2018
  const stages = finalInput.raw(first);
1984
2019
  await last.updateOne({ _id: streamName }, { $set: { job: 1 } }, { upsert: true });
1985
- const stream = makeStream();
2020
+ const stream = await makeStream();
2021
+ const nextRes = stream.tryNext();
1986
2022
  const aggResult = await aggregate(pdb, streamName, c => c({
1987
2023
  coll: snapshotCollection,
1988
2024
  input: link()
@@ -1996,7 +2032,6 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1996
2032
  .with(input.delta)
1997
2033
  .with(stages).stages,
1998
2034
  }), false, start);
1999
- const nextRes = stream.tryNext();
2000
2035
  stages.at(-1).$merge.into.coll;
2001
2036
  return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes, first }), 'remove handled deleted updated', () => stream.close());
2002
2037
  };
@@ -2075,14 +2110,6 @@ const executes$1 = (view, input, streamName, needs) => {
2075
2110
  : pre;
2076
2111
  const job = {};
2077
2112
  const db = collection.s.db, coll = collection.collectionName;
2078
- db.command({
2079
- collMod: coll,
2080
- changeStreamPreAndPostImages: { enabled: true },
2081
- });
2082
- createIndex(collection, { touchedAt: 1 }, {
2083
- partialFilterExpression: { deletedAt: { $eq: null } },
2084
- name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
2085
- });
2086
2113
  const last = db.collection('__last');
2087
2114
  const projectInput = projection &&
2088
2115
  $project_(spread(projection, {
@@ -2119,10 +2146,24 @@ const executes$1 = (view, input, streamName, needs) => {
2119
2146
  };
2120
2147
  const step0 = () => synchronousPromise.SynchronousPromise.resolve(next(step1, 'get last update'));
2121
2148
  const stop = withStop(step0);
2122
- const step1 = () => Promise.all([
2123
- last.findOne({ _id: streamName, data }),
2124
- last.findOne({ _id: streamName }),
2125
- ]).then(ts => next(step2_5(ts), 'handle teardown'));
2149
+ const step1 = async () => {
2150
+ log('creating indexes');
2151
+ await db.command({
2152
+ collMod: coll,
2153
+ changeStreamPreAndPostImages: { enabled: true },
2154
+ });
2155
+ await createIndex(collection, { touchedAt: 1 }, {
2156
+ partialFilterExpression: { deletedAt: { $eq: null } },
2157
+ name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
2158
+ });
2159
+ log('stream started', { streamName, data });
2160
+ const ts = await Promise.all([
2161
+ last.findOne({ _id: streamName, data }),
2162
+ last.findOne({ _id: streamName }),
2163
+ ]);
2164
+ log('got last update', { streamName, ts });
2165
+ return next(step2_5(ts), 'handle teardown');
2166
+ };
2126
2167
  const step2_5 = ([same, exists]) => async () => {
2127
2168
  const handleTeardown = async (last) => {
2128
2169
  if (!last.data)
@@ -2152,12 +2193,12 @@ const executes$1 = (view, input, streamName, needs) => {
2152
2193
  const makeStream = () => makeWatchStream(view, streamName);
2153
2194
  const step4 = (lastTS) => async () => {
2154
2195
  const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
2155
- const stream = makeStream();
2196
+ const stream = await makeStream();
2197
+ const nextRes = stream.tryNext();
2156
2198
  const aggResult = await aggregate(pdb, streamName, c => c({
2157
2199
  coll: collection,
2158
2200
  input: raw,
2159
2201
  }));
2160
- const nextRes = stream.tryNext();
2161
2202
  return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
2162
2203
  };
2163
2204
  const step7 = (l) => async () => {
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "module": "index.esm.js",
4
4
  "typings": "index.d.ts",
5
5
  "name": "@omegup/msync",
6
- "version": "0.1.22",
6
+ "version": "0.1.24",
7
7
  "dependencies": {
8
8
  "dayjs": "^1.11.9",
9
9
  "dotenv": "^16.3.1",
@@ -1,10 +0,0 @@
1
- import type { AppMap, Arr, N, O, Rec } from '../../types';
2
- import type { ExprHKT } from '../expression/concat';
3
- import { type NoRaw } from '../expression/val';
4
- import type { DeltaAccumulator, Expr, Part } from '../types';
5
- export declare const $sum: <D extends O, C = unknown>(expr: Expr<number | N, D, C>) => DeltaAccumulator<D, number, C>;
6
- export declare const $accumulator: <D, T, Ctx, A extends readonly unknown[]>(init: () => NoRaw<T>, accumulateArgs: AppMap<ExprHKT<Part<D>, Ctx>, A>, accumulate: (a: NoRaw<T>, ...args: NoRaw<A>) => NoRaw<T>, merge: (...args: NoRaw<[T | N, T]>) => NoRaw<T>) => DeltaAccumulator<D, T, Ctx>;
7
- export declare const $countDict: <D extends O, C = unknown>(expr: Expr<string, D, C>) => DeltaAccumulator<D, Rec<string, number>, C>;
8
- export declare const $pushDict: <D extends O, V, C = unknown>(key: Expr<string, D, C>, value: Expr<V, D, C>) => DeltaAccumulator<D, Rec<string, Rec<"0" | "1", Arr<V>>>, C>;
9
- export declare const $keys: <D extends O, C = unknown>(expr: Expr<Rec<string, number>, D, C>) => Expr<Arr<string>, D, C>;
10
- export declare const $entries: <D extends O, V, C = unknown>(expr: Expr<Rec<string, Rec<"1" | "0", Arr<V>>>, D, C>) => Expr<Arr<Rec<"k", string> & Rec<"v", V>>, D, C>;
@@ -1,28 +0,0 @@
1
- import type { IdHKT, OPick, RWCollection, Timestamp } from '../../types';
2
- import type { doc, ID, N, O, rawItem, Rec, Replace, RORec, StrKey } from '../../types/json';
3
- import { type ExprsExact } from '../expression/concat';
4
- import type { Delta, Expr, IsDeleted, Model, RawStages, StreamRunnerParam, TS } from '../types';
5
- import { type Exact } from '../utils/map-object';
6
- type OutInputE<T, E, A = T | null> = ID & Rec<'after', A> & E;
7
- type Allowed<K extends string> = Exclude<K, keyof (TS & ID)>;
8
- export type Patch<V, KK extends StrKey<V> = StrKey<V>> = ((OPick<V, Allowed<KK>> & ID) | (Rec<Allowed<KK>, N> & ID)) & TS;
9
- type TakeDoc<V, E = ID, KK extends StrKey<V> = StrKey<V>> = OPick<V, Allowed<KK>> & E;
10
- type ND = {
11
- readonly deletedAt?: null;
12
- };
13
- type SafeE<E> = Omit<E, `$${string}` | keyof ID>;
14
- export declare const getWhenMatchedForMerge: <Out extends Model, P extends Model, K extends keyof IsDeleted>(whenNotMatched: "discard" | "fail" | "insert") => RawStages<O, Out, Out | Replace<Out, P>, RORec<"new", Replace<P, RORec<K, Timestamp>>>>;
15
- export declare const getWhenMatched: <Out extends Model, P extends Model, K extends keyof IsDeleted>(whenNotMatched: "discard" | "fail" | "insert") => RawStages<O, Rec<"old" | "merged", Out | Replace<Out, P>>, Out | Replace<Out, P>>;
16
- type MergeCollection<V extends O, Out extends Model> = {
17
- coll: RWCollection<Out | Replace<Out, Patch<V>> | Replace<Patch<V>, IsDeleted>, Out>;
18
- whenNotMatched: 'discard';
19
- } | {
20
- coll: RWCollection<Out | Replace<Out, Patch<V>>, Out>;
21
- whenNotMatched: 'fail';
22
- };
23
- export declare const $mergeId: <V extends O>() => <SourcePart extends doc, Out extends Model, E = unknown, EEE extends RORec<string, rawItem> = {}>(out: MergeCollection<V, Out>, keys: ExprsExact<TakeDoc<V, unknown>, SourcePart>, id: Expr<string, OutInputE<TakeDoc<V>, E, null>>, ext: Exact<Omit<SafeE<EEE>, keyof (ND & TS)>, IdHKT>) => StreamRunnerParam<OutInputE<SourcePart, E>, "out">;
24
- export declare const $simpleMergePart: <V extends O>() => <Source extends doc, Out extends Model, EEE extends RORec<string, rawItem>>(out: MergeCollection<V, Out>, keys: ExprsExact<TakeDoc<V, unknown>, Source>, ext: Exact<Omit<SafeE<EEE>, keyof (ND & TS)>, IdHKT>) => StreamRunnerParam<Source, "out">;
25
- export declare const $simpleMerge: <V extends O>() => <Source extends doc, Out extends Model>(out: RWCollection<Out | Replace<Out, Patch<V>> | Replace<Patch<V>, IsDeleted>, Out>, keys: ExprsExact<TakeDoc<V, unknown>, Source>, whenNotMatched?: "fail" | "discard") => StreamRunnerParam<Source, "out">;
26
- export declare const $mergePart: <V extends O>() => <Out extends Model, SourcePart extends doc, EEE extends RORec<string, rawItem>>(out: RWCollection<Out | Replace<Out, Patch<V>>, Out>, keys: ExprsExact<TakeDoc<V, unknown>, SourcePart>, ext: Exact<Omit<SafeE<EEE>, keyof (ND & TS)>, IdHKT>) => StreamRunnerParam<Delta<SourcePart>, "out">;
27
- export declare const $merge: <V extends O>() => <Out extends Model, SourcePart extends doc>(out: RWCollection<Out | Replace<Out, Patch<V>>, Out>, keys: ExprsExact<TakeDoc<V, unknown>, SourcePart>) => StreamRunnerParam<Delta<SourcePart>, "out">;
28
- export {};
@@ -1,15 +0,0 @@
1
- import type { IdHKT, RWCollection } from '../../types';
2
- import type { ID, O, RORec, doc, rawItem } from '../../types/json';
3
- import { type ExprsExact } from '../expression/concat';
4
- import type { Del, Delta, Expr, StreamRunnerParam, TS } from '../types';
5
- import { type Exact } from '../utils/map-object';
6
- type ND = {
7
- readonly deletedAt?: null;
8
- };
9
- type SafeE<E> = Omit<E, `$${string}` | keyof ID>;
10
- export type Merge<T extends doc, E> = Omit<SafeE<E>, keyof (ND & TS)> & ((T & ND & TS) | Del);
11
- export declare const $insertX: <T extends doc, D extends O, EEE extends RORec<string, rawItem>>(out: RWCollection<Merge<T, EEE>>, expr: Expr<T, D>, map: (x: Expr<T & ND & TS & Omit<SafeE<EEE>, keyof (ND & TS)>, D>) => Expr<Merge<T, EEE>, D>, ext: Exact<Omit<SafeE<EEE>, keyof (ND & TS)>, IdHKT>, extExpr: ExprsExact<Omit<SafeE<EEE>, keyof (ND & TS)>, unknown>) => StreamRunnerParam<D, "out">;
12
- export declare const $simpleInsert: <T extends doc>(out: RWCollection<Merge<T, {}>>) => StreamRunnerParam<T, "out">;
13
- export declare const $insertPart: <T extends doc, EEE extends RORec<string, rawItem>>(out: RWCollection<Merge<T, EEE>>, ext: Exact<Omit<SafeE<EEE>, keyof (ND & TS)>, IdHKT>) => StreamRunnerParam<Delta<T>, "out">;
14
- export declare const $insert: <T extends doc>(out: RWCollection<Merge<T, {}>>) => StreamRunnerParam<Delta<T>, "out">;
15
- export {};
@@ -1,12 +0,0 @@
1
- import type { RWCollection, WriteonlyCollection } from '../../../types';
2
- import type { AsLiteral, ID, O, Rec, doc, notArr } from '../../../types/json';
3
- import { type ExprsExact } from '../../expression/concat';
4
- import type { Delta, DeltaAccumulators, Expr, StreamRunnerParam, TS } from '../../types';
5
- import type { MergeInto } from '../out';
6
- import { type Extra, type IdAndTsKeys, type Loose, type MergedInput, type Strict, type V_Grp } from './utils/sub-merge';
7
- type Denied<GID = never> = keyof (TS & ID) | GID;
8
- type GI<GG> = Exclude<GG, keyof TS>;
9
- export declare const $groupMerge: <T extends O, Grp extends notArr, V extends O, GG extends string, EE = {}, Out extends Loose<Grp, V, GG> = Loose<Grp, V, GG>>(id: Expr<Grp, T>, args: DeltaAccumulators<T, O & Omit<V, Denied<GI<GG>>>>, out: MergeInto<Strict<Grp, V, GG, EE>, Out, WriteonlyCollection<MergedInput<Out, V, Grp, GG, EE>>>, gid: AsLiteral<GI<GG>>, extra: ExprsExact<Extra<EE, V, GG>, V_Grp<V, GG, Grp>>, idPrefix?: string) => StreamRunnerParam<Delta<T>, "out">;
10
- export declare const $groupId: <T extends O, V extends O, EE = {}, Out extends Loose<string, V, "_id"> = Loose<string, V, "_id">>(id: Expr<string, T>, args: DeltaAccumulators<T, O & Omit<V, Denied>>, out: RWCollection<MergedInput<Out, V, string, "_id", EE>, Out>, extra: ExprsExact<Omit<EE, IdAndTsKeys | keyof Omit<V, IdAndTsKeys>>, doc & Omit<V, IdAndTsKeys>>) => StreamRunnerParam<Delta<T>, "out">;
11
- export declare const $group: <T extends O, Grp extends notArr, V extends O, EE = {}, Out extends Loose<Grp, V, "_grp"> = Loose<Grp, V, "_grp">>(id: Expr<Grp, T>, args: DeltaAccumulators<T, O & Omit<V, Denied<"_grp">>>, out: RWCollection<MergedInput<Out, V, Grp, "_grp", EE> | Strict<Grp, V, "_grp", EE>, Out>, extra: ExprsExact<Omit<EE, IdAndTsKeys | "_grp" | Exclude<keyof V, IdAndTsKeys | "_grp">>, Rec<"_grp", Grp> & Omit<V, IdAndTsKeys | "_grp">>, idPrefix?: string) => StreamRunnerParam<Delta<T>, "out">;
12
- export {};
@@ -1,2 +0,0 @@
1
- export { $groupMerge, $groupId, $group } from './$group-merge';
2
- export type { Loose, Strict } from './utils/sub-merge';
@@ -1,6 +0,0 @@
1
- import type { O, RORec, Rec } from '../../../../types';
2
- import { type ExprsExact } from '../../../expression/concat';
3
- import type { Delta, Expr, RawStages } from '../../../types';
4
- import type { DeltaAccumulators } from '../../../types/accumulator';
5
- export type WithItem<V, Grp> = Rec<'_id', Grp> & Rec<'item', O<V>>;
6
- export declare const subGroup: <T extends O, Grp, V extends O, GID extends string>(id: Expr<Grp, T>, args: DeltaAccumulators<T, V>, addGrp: <D extends Rec<"_id", Grp>>(src: ExprsExact<V, D>) => ExprsExact<RORec<GID, Grp> & V, D>) => RawStages<unknown, Delta<T>, Rec<GID, Grp> & V>;
@@ -1,20 +0,0 @@
1
- import type { AsLiteral, ID, O, Rec, Replace, WriteonlyCollection, notArr } from '../../../../types';
2
- import { type ExprsExact } from '../../../expression/concat';
3
- import type { Model, RawStages, TS } from '../../../types';
4
- import type { DeltaAccumulators } from '../../../types/accumulator';
5
- import { type MergeInto } from '../../out';
6
- type GI<GG> = Exclude<GG, keyof TS>;
7
- export type IdAndTsKeys = keyof (TS & ID);
8
- type V<VV, GG extends string> = Omit<VV, IdAndTsKeys | GI<GG>>;
9
- export type Prepare<Grp, GG extends string> = TS & ID & Rec<GI<GG>, Grp>;
10
- type Par<T> = {
11
- [P in keyof T]?: T[P] | null;
12
- };
13
- export type Loose<Grp, VV, GG extends string> = Prepare<Grp, GG> & Par<V<VV, GG>>;
14
- export type Strict<Grp, VV, GG extends string, EE> = Prepare<Grp, GG> & V<VV, GG> & Omit<EE, IdAndTsKeys | GI<GG> | keyof V<VV, GG>>;
15
- export type V_Grp<VV, GG extends string, Grp> = Rec<GI<GG>, Grp> & V<VV, GG>;
16
- export type Extra<EE, VV, GG extends string> = Omit<EE, IdAndTsKeys | GI<GG> | keyof V<VV, GG>>;
17
- type OrReplace<T, V> = T | Replace<T, V>;
18
- export type MergedInput<Out, VV, Grp, GG extends string, EE> = OrReplace<Replace<Replace<Out, V<VV, GG>>, Extra<EE, VV, GG>> & Model, TS & ID & V_Grp<VV, GG, Grp> & Extra<EE, VV, GG>>;
19
- export declare const subMerge: <T extends O, Grp extends notArr, VV extends O, GG extends string, EE = {}, Out extends Loose<Grp, VV, GG> = Loose<Grp, VV, GG>>(args: DeltaAccumulators<T, V<VV, GG>>, out: MergeInto<Strict<Grp, VV, GG, EE>, Out, WriteonlyCollection<MergedInput<Out, VV, Grp, GG, EE>>>, gid: AsLiteral<GI<GG>>, extra: ExprsExact<Extra<EE, VV, GG>, V_Grp<VV, GG, Grp>>, idPrefix: string, first: boolean) => RawStages<unknown, V_Grp<VV, GG, Grp>, "out">;
20
- export {};
@@ -1,9 +0,0 @@
1
- export * from './group';
2
- export * from './lookup';
3
- export * from './match';
4
- export * from './set';
5
- export * from './unwind';
6
- export * from './$merge';
7
- export * from './$upsert';
8
- export * from './out';
9
- export { $group_ } from './mongo-stages';
@@ -1,10 +0,0 @@
1
- import type { AsLiteral, ID, N, O, Rec, doc, rawItem } from '../../../types';
2
- import { Field } from '../../field';
3
- import type { BA, Before, Delta, RawStages, TStages, UBefore } from '../../types';
4
- type s = string;
5
- type Both<K1 extends s, LE, KK2 extends s, RE, N1 extends null = never, N2 extends null = never> = Delta<Rec<K1, LE | N1> & Rec<Exclude<KK2, BA | K1>, RE | N2> & ID>;
6
- export declare const $lookupDelta: <LQ extends O, LE extends LQ & doc, RQ extends O, RE extends RQ & doc, BRB extends UBefore<RQ>, RS extends UBefore<RQ>, S extends rawItem, K1 extends s, KK2 extends s, N1 extends null = never, N2 extends null = never>({ field1, field2 }: {
7
- field1: Field<LQ, S | N>;
8
- field2: Field<RQ, S | N>;
9
- }, { coll, exec, input }: TStages<RS, UBefore<RQ>, BRB, Before<RE>>, k1: AsLiteral<K1>, k2: AsLiteral<Exclude<KK2, BA | K1>>, k: ([N1] extends [never] ? K1 : never) | ([N2] extends [never] ? Exclude<KK2, BA | K1> : never) | false, includeNull1?: N1, includeNull2?: N2) => RawStages<unknown, Delta<LE>, Both<K1, LE, KK2, RE, N1, N2>>;
10
- export {};
@@ -1,9 +0,0 @@
1
- import type { App, AsLiteral, HKT, ID, O, Rec } from '../../../types';
2
- import { type Field } from '../../field';
3
- import type { Before, RawStages, TStages } from '../../types';
4
- type s = string;
5
- export declare const $lookupRaw: <LQ extends O, LE extends LQ & ID, RQ extends O, RE extends RQ & ID, BRB extends Before<RQ>, RS, S, As extends s, Null extends null = never>({ field1, field2 }: {
6
- field1: Field<LQ, S>;
7
- field2: Field<RQ, S>;
8
- }, { coll, exec, input }: TStages<RS, Before<RQ>, BRB, Before<RE>>, k2: AsLiteral<As>, k: "left", includeNull?: Null) => <F extends HKT<O, O>>(f: <T extends O>() => Field<App<F, T>, T>) => RawStages<App<F, LQ>, App<F, LE>, App<F, LE & Rec<As, RE> & ID>>;
9
- export {};
@@ -1,12 +0,0 @@
1
- import type { AsLiteral, O, RORec, doc, notArr } from '../../../types';
2
- import { type Field } from '../../field';
3
- import type { SnapshotStream, SnapshotStreamExecutionResult } from '../../types/stream';
4
- type Params<As extends string, LQ extends O, RQ extends O, RE extends RQ, S extends notArr> = {
5
- localField: Field<LQ, S>;
6
- foreignField: Field<RQ, S>;
7
- from: SnapshotStreamExecutionResult<RQ, RE>;
8
- as: AsLiteral<As>;
9
- };
10
- export declare const $lookup: <As extends string, LQ extends doc, RQ extends O, RE extends RQ & doc, S extends notArr>(p: Params<As, LQ, RQ, RE, S>) => <LE extends LQ>(l: SnapshotStream<LQ, LE>) => SnapshotStream<LQ, LE & RORec<As, RE>>;
11
- export declare const $outerLookup: <As extends string, LQ extends doc, RQ extends O, RE extends RQ & doc, S extends notArr>(p: Params<As, LQ, RQ, RE, S>) => <LE extends LQ>(l: SnapshotStream<LQ, LE>) => SnapshotStream<LQ, LE & RORec<As, RE | null>>;
12
- export {};
@@ -1,3 +0,0 @@
1
- import type { doc } from '../../../types';
2
- import type { Delta, Expr } from '../../types';
3
- export declare const $matchDelta: <T extends doc>(query: Expr<boolean, T>) => import("../../types").RawStages<unknown, Delta<T>, Delta<T>, unknown, number>;
@@ -1,3 +0,0 @@
1
- import type { doc } from '../../../types';
2
- import type { DeltaStages, Expr } from '../../types';
3
- export declare const $match: <T extends doc>(query: Expr<boolean, T>) => DeltaStages<T, T, T>;
@@ -1,2 +0,0 @@
1
- export { $matchDelta } from './$match-delta';
2
- export { $match } from './$match';
@@ -1,13 +0,0 @@
1
- import type { Arr, O, Rec } from '../../types';
2
- import type { Accumulators, Expr, LookupArgs, Query, RawStages } from '../types';
3
- import type { Updater } from '../update/updater';
4
- export * from './raws';
5
- type s = string;
6
- export declare const $match_: <Q, T extends Q & O, C = unknown>(query?: Query<T, C>) => RawStages<O<T>, T, T, C>;
7
- export declare const $set_: <Q, T extends Q & O, V extends Q & O, C = unknown>(updater: Updater<T, T, V, C>) => RawStages<O<Q>, T, V, C, 1>;
8
- export declare const $replaceWith_: <T extends O, V extends O, C = unknown>(expr: Expr<V, T, C>) => RawStages<O, T, V, C, 1>;
9
- export declare const $unwind_: <T extends O, K extends s, U, Null extends null = never>(k: K, includeNull?: Null) => RawStages<O<T>, T & Rec<K, Arr<U>>, T & Rec<K, Null | U>>;
10
- export declare const $group_: <V extends O>() => <ID, T extends O, C = unknown>(id: Expr<ID, T, C>, args: Accumulators<T, V, C>) => RawStages<O, T, Rec<"_id", ID> & V, C, 1>;
11
- export declare const $documents_: <T extends O, C>(docs: Expr<Arr<T>, unknown, C>) => RawStages<unknown, null, T, C, 1>;
12
- export declare const $project_: <T extends O, K extends import("../..").StrKey<T>>(projection: import("../utils").ExactKeys<K>) => RawStages<T, T, T, unknown, 1>;
13
- export declare const $simpleLookup_: <T extends O, U extends O, R, K extends s, Ctx, C = unknown, S = string>(args: LookupArgs<T, U, R, K, Ctx, C, S>) => RawStages<O<T>, T, T & Rec<K, Arr<U>>, C, 1>;
@@ -1,37 +0,0 @@
1
- import type { O, RWCollection, ReadonlyCollection, Replace, jsonItem } from '../../types';
2
- import type { ExprsExact } from '../expression/concat';
3
- import { type Field } from '../field';
4
- import type { RawStages } from '../types';
5
- export type MergeInto<T extends O, Out extends O, E = unknown> = {
6
- whenNotMatched: 'insert';
7
- into: RWCollection<T, Out> & E;
8
- } | {
9
- whenNotMatched: 'discard' | 'fail';
10
- into: ReadonlyCollection<Out> & E;
11
- };
12
- export type MergeArgs<T extends O, Out extends O, Ctx, In extends O> = {
13
- on: Field<T, jsonItem> & Field<Out, jsonItem>;
14
- } & MergeInto<T, Out> & (({
15
- stages?: never;
16
- } & ({
17
- whenMatched: 'keepExisting' | 'fail';
18
- } | {
19
- whenMatched: 'replace';
20
- into: RWCollection<T, Out>;
21
- } | {
22
- whenMatched: 'merge';
23
- into: RWCollection<Replace<Out, T>, Out>;
24
- })) | {
25
- stages: true;
26
- into: RWCollection<In, Out>;
27
- whenMatched: RawStages<unknown, Out, In, {
28
- new: T;
29
- }>;
30
- } | {
31
- stages: 'ctx';
32
- vars: ExprsExact<Ctx, T>;
33
- into: RWCollection<In, Out>;
34
- whenMatched: RawStages<unknown, Out, In, Ctx>;
35
- });
36
- export declare const $merge_: <T extends O, Out extends O = T, Ctx = unknown, In extends O = Out>({ into, on, whenNotMatched, ...notMatched }: MergeArgs<T, Out, Ctx, In>) => RawStages<unknown, T, "out", unknown, number>;
37
- export declare const $merge2: <T extends O, Out extends O = T, Ctx = unknown, In extends O = Out>(args: MergeArgs<T, Out, Ctx, In>) => RawStages<unknown, T, "out", unknown, number>;
@@ -1,33 +0,0 @@
1
- import type { App, HKT, O2, O3, O, RawObj } from '../../types';
2
- import type { Delta, DeltaStages, FRawStages, RawStages, Stream, TStages } from '../types';
3
- import type { Equal } from '../utils/guard';
4
- type n = number;
5
- export declare const asStages: <Q, T extends Q, V extends Q, C = unknown, M extends n = number>(x: readonly RawObj[]) => RawStages<Q, T, V, C, M>;
6
- export declare const same: <T extends Dom, V extends Dom, F extends HKT<Dom, Q>, Dom = unknown, Q = unknown, C = unknown, M extends n = number>(_: Equal<Dom, T, V>) => RawStages<Q, App<F, T>, App<F, V>, C, M>;
7
- export declare const concatStages: <Q, T extends Q, V extends Q, W extends Q, C, M extends n = number>(part1: RawStages<Q, T, V, C, M>, part2: RawStages<Q, V, W, C, M>) => RawStages<Q, T, W, C, M>;
8
- export declare const concatFStages: <Q, T extends Q & O, V extends Q & O, W extends Q & O, C, M extends n = number>(part1: FRawStages<Q, T, V, C, M>, part2: FRawStages<Q, V, W, C, M>) => FRawStages<Q, T, W, C, M>;
9
- export declare const concatDelta: <Q extends O, T extends Q, V extends Q, W extends Q>(part1: DeltaStages<Q, T, V>, part2: DeltaStages<Q, V, W>) => DeltaStages<Q, T, W>;
10
- export type Concat<out Q, in T extends Q, out V extends Q, in out C, in out M extends n = n> = {
11
- with: <Q2, W extends Q2>(extra: RawStages<Q | Q2, V, W, C, M>) => Concat<Q | Q2, T, W, C, M>;
12
- stages: RawStages<Q, T, V, C, M>;
13
- };
14
- type FConcat<out Q, in T extends Q & O, out V extends Q & O, in out C, in out M extends n = n> = {
15
- with: <Q2, W extends Q2 & O>(extra: FRawStages<Q | Q2, V, W, C, M>) => FConcat<Q | Q2, T, W, C, M>;
16
- stages: FRawStages<Q, T, V, C, M>;
17
- };
18
- export type DeltaPipe<Q extends O, T extends Q, F extends HKT<O2>, G extends HKT<O3>> = {
19
- with: <Q2 extends O, V extends Q2>(map: (a: Stream<Q, T, F, G>) => Stream<Q | Q2, V, F, G>) => DeltaPipe<Q | Q2, V, F, G>;
20
- then: <Q2 extends O, V extends Q2>(next: App<G, [Q2 | T, T, V]>) => DeltaPipe<Q | Q2, V, F, G>;
21
- get: () => App<F, [Q, T]>;
22
- };
23
- export declare const pipe: <Q extends O, S extends Q, T extends Q, F extends HKT<O2>, G extends HKT<O3>>(stream: Stream<Q, S, F, G>, s: App<G, [Q, S, T]>, concat: <Q_1 extends O, T_1 extends Q_1, V extends Q_1, W extends Q_1>(part1: App<G, [Q_1, T_1, V]>, part2: App<G, [Q_1, V, W]>) => App<G, [Q_1, T_1, W]>, empty: <T_2 extends Q>() => App<G, [Q, T_2, T_2]>) => DeltaPipe<Q, T, F, G>;
24
- type Link = <T, C = unknown, M extends n = n>() => Concat<T, T, T, C, M>;
25
- type FLink = <T extends O, C = unknown, M extends n = n>() => FConcat<T, T, T, C, M>;
26
- export declare const link: Link;
27
- export declare const flink: FLink;
28
- export declare const emptyDelta: <T extends O>() => {
29
- delta: RawStages<Delta<T>, Delta<T>, Delta<T>, unknown, number>;
30
- raw: <F extends HKT<O>>() => RawStages<App<F, T>, App<F, T>, App<F, T>, unknown, number>;
31
- };
32
- export declare const concatTStages: <S, Q, B extends Q, T extends Q, V extends Q>({ coll, exec, input }: TStages<S, Q, B, T>, stages: RawStages<Q, T, V>) => TStages<S, Q, B, V>;
33
- export {};
@@ -1,17 +0,0 @@
1
- import type { App, Arr, ConstHKT, HKT, O, Rec, StrKey, rawItem } from '../../types';
2
- import type { ExprsExact } from '../expression/concat';
3
- import { type Field } from '../field';
4
- import type { Accumulators, Expr, FRawStages, LookupArgs, Query, RawStages } from '../types';
5
- import type { Updater } from '../update/updater';
6
- import { type ExactKeys } from '../utils/map-object';
7
- type s = string;
8
- export declare const $match1: <T extends O, C = unknown>(query?: Query<T, C>) => FRawStages<T, T, T, C>;
9
- export declare const $set1: <Q, T extends Q & O, V extends Q & O, C = unknown>(updater: Updater<T, T, V, C>) => FRawStages<Q, T, V, C, 1>;
10
- export declare const $project1: <T extends O, K extends StrKey<T>>(projection: ExactKeys<K>) => RawStages<T, T, T, unknown, 1>;
11
- export declare const $replaceWith1: <T extends O, V extends O, C = unknown>(expr: Expr<V, T, C>) => FRawStages<O, T, V, C, 1>;
12
- export declare const $unwind1: <T extends O, K extends s, R, Null extends null = never>(k: K, includeNull?: Null) => FRawStages<T, T & Rec<K, Arr<R>>, T & Rec<K, R | Null>>;
13
- export declare const $group1: <T extends O, ID, V extends O, C>(id: Expr<ID, T, C>, args: Accumulators<T, V, C>) => <F extends HKT<O, O>>(f: <T_1 extends O>() => Field<App<F, T_1>, T_1>) => RawStages<O, App<F, T>, Rec<"_id", ID> & V, C, 1>;
14
- export declare const $documents1: <Q extends O, T extends Q & O, C>(docs: Expr<Arr<T>, unknown, C>) => <F extends HKT<O, O>>(f: <T_1 extends O>() => Field<App<F, T_1>, T_1>) => RawStages<unknown, null, App<F, T>, C, 1>;
15
- export declare const rawVars: <T, Ctx, C, V extends O>(vars: ExprsExact<Ctx, T, C>, f: Field<V, T, unknown>) => import("../utils/map-object").ExactPart1<Ctx, ConstHKT<rawItem, unknown>>;
16
- export declare const $simpleLookup1: <T extends O, U extends O, R, K extends s, Ctx, C = unknown, S = string>(args: LookupArgs<T, U, R, K, Ctx, C, S>) => FRawStages<T, T, T & Rec<K, Arr<U>>, C, 1>;
17
- export {};