@omegup/msync 0.0.16 → 0.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.d.ts +21 -9
  2. package/index.esm.js +119 -13
  3. package/index.js +119 -12
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -96,6 +96,9 @@ type StreamRunnerParam<in V, out Result> = {
96
96
  teardown: <R>(consume: <W, M extends keyof Actions<W>>(x: TeardownRecord<W, M>) => R) => R;
97
97
  };
98
98
  type StreamRunner<out V> = <Result>(input: StreamRunnerParam<V, Result>) => Runner<readonly Result[], HasJob>;
99
+ type SimpleStreamExecutionResult<out Q, out V extends Q> = {
100
+ readonly out: StreamRunner<V>;
101
+ };
99
102
  type SnapshotStreamExecutionResult<out Q, out V extends Q> = {
100
103
  readonly out: StreamRunner<Delta<V>>;
101
104
  readonly stages: Stages<Before<Q>, Before<V>, UBefore<Q>>;
@@ -474,11 +477,10 @@ declare const $unwindDelta: <K1 extends s$1, T extends doc, K2 extends s$1, U ex
474
477
  type s = string;
475
478
  declare const $unwind: <T extends doc, K extends s, U extends doc>(k: AsLiteral<K>, dict: RORec<K, "key">) => DeltaStages<O, T & Rec<K, Arr<U>>, T & Rec<K, U>>;
476
479
 
477
- type OutInputE<T, E, A = T | null> = ID & Rec<'after', A> & E;
478
- type Allowed$1<K extends string> = Exclude<K, keyof (TS & ID)>;
479
- type Patch<V, KK extends StrKey<V>> = ((OPick<V, Allowed$1<KK>> & ID) | (Rec<Allowed$1<KK>, N> & ID)) & TS;
480
- declare const $simpleMerge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$1<KK>>) => StreamRunnerParam<OutInputE<OPick<V, Allowed$1<KK>> & ID, unknown>, "out">;
481
- declare const $merge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$1<KK>>) => StreamRunnerParam<Delta<OPick<V, Allowed$1<KK>> & ID>, "out">;
480
+ type Allowed$2<K extends string> = Exclude<K, keyof (TS & ID)>;
481
+ type Patch<V, KK extends StrKey<V>> = ((OPick<V, Allowed$2<KK>> & ID) | (Rec<Allowed$2<KK>, N> & ID)) & TS;
482
+ declare const $simpleMerge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc, E = unknown>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$2<KK>>) => StreamRunnerParam<OPick<V, Allowed$2<KK>> & ID & E, "out">;
483
+ declare const $merge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$2<KK>>) => StreamRunnerParam<Delta<OPick<V, Allowed$2<KK>> & ID>, "out">;
482
484
 
483
485
  type ND = {
484
486
  readonly deletedAt?: null;
@@ -492,15 +494,25 @@ type DeltaPipe<Q extends O, T extends Q, F extends HKT<O2>, G extends HKT<O3>> =
492
494
  get: () => App<F, [Q, T]>;
493
495
  };
494
496
 
495
- type Allowed<K> = Exclude<K, 'deletedAt' | '_id'>;
496
- type AllowedPick<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed<K>>;
497
+ type Allowed$1<K> = Exclude<K, 'deletedAt' | '_id'>;
498
+ type AllowedPick$1<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed$1<K>>;
497
499
  interface SnapshotStreamHKT extends HKT<O2> {
498
500
  readonly out: SnapshotStreamExecutionResult<I<O2, this>[0], I<O2, this>[1]>;
499
501
  }
500
502
  interface DeltaHKT extends HKT<O3> {
501
503
  readonly out: DeltaStages<I<O3, this>[0], I<O3, this>[1], I<O3, this>[2]>;
502
504
  }
503
- declare const staging: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed<KK>>, streamName: string) => DeltaPipe<AllowedPick<V, KK>, AllowedPick<V, KK>, SnapshotStreamHKT, DeltaHKT>;
505
+ declare const staging: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed$1<KK>>, streamName: string) => DeltaPipe<AllowedPick$1<V, KK>, AllowedPick$1<V, KK>, SnapshotStreamHKT, DeltaHKT>;
506
+
507
+ type Allowed<K> = Exclude<K, 'deletedAt' | '_id'>;
508
+ type AllowedPick<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed<K>>;
509
+ interface StreamRunnerHKT extends HKT<O2> {
510
+ readonly out: SimpleStreamExecutionResult<I<O2, this>[0], I<O2, this>[1]>;
511
+ }
512
+ interface StagesHKT extends HKT<O3> {
513
+ readonly out: RORec<'lin', RawStages<I<O3, this>[0], I<O3, this>[1], I<O3, this>[2], unknown, 1>>;
514
+ }
515
+ declare const from: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed<KK>>, streamName: string) => DeltaPipe<AllowedPick<V, KK>, AllowedPick<V, KK>, StreamRunnerHKT, StagesHKT>;
504
516
 
505
517
  declare const max: <D, C>(...expr: Expr<number, D, C>[]) => Expr<number, D, C>;
506
518
  declare const lt: <D, C>(expr_0: Expr<number, D, C>, expr_1: Expr<number, D, C>) => Expr<boolean, D, C>;
@@ -641,4 +653,4 @@ declare const enablePreAndPostImages: <T extends doc>(coll: Collection<T>) => Pr
641
653
  declare const prepare: (testName?: string) => Promise<MongoClient$1>;
642
654
  declare const makeCol: <T extends ID>(docs: readonly OptionalUnlessRequiredId<T>[], database: Db, name?: string) => Promise<Collection<T>>;
643
655
 
644
- export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type N, type NoRaw, type O, type OPick, type OPickD, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, type notArr, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
656
+ export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type N, type NoRaw, type O, type OPick, type OPickD, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, type notArr, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
package/index.esm.js CHANGED
@@ -1033,17 +1033,14 @@ const $replaceWith = (expr) => ({
1033
1033
  lin: $replaceWith_(expr),
1034
1034
  });
1035
1035
 
1036
- const $mergeId = () => (out, keys, id) => ({
1036
+ const $mergeX = (out, keys, f, map) => ({
1037
1037
  raw: (first) => {
1038
- const omRORec = omitRORec();
1039
- const patch = mapExactToObject(keys, (_, k) => [k, root().of('after').of(k).expr()]);
1040
- const replacer = ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1041
- _id: ['_id', id],
1042
- touchedAt: ['touchedAt', current],
1043
- }))), field(omitPick().backward(spread(patch, {
1044
- _id: ['_id', root().of('after').of('_id').expr()],
1038
+ const patch = mapExactToObject(keys, (_, k) => [k, f.of(k).expr()]);
1039
+ const or = field(omitPick().backward(spread(patch, {
1040
+ _id: ['_id', f.of('_id').expr()],
1045
1041
  touchedAt: ['touchedAt', current],
1046
- }))));
1042
+ })));
1043
+ const replacer = map(or);
1047
1044
  return link()
1048
1045
  .with($replaceWith_(replacer))
1049
1046
  .with($merge_({
@@ -1066,7 +1063,16 @@ const $mergeId = () => (out, keys, id) => ({
1066
1063
  ],
1067
1064
  }),
1068
1065
  });
1069
- const $simpleMerge = () => (out, keys) => $mergeId()(out, keys, root().of('_id').expr());
1066
+ const $mergeId = () => (out, keys, id) => {
1067
+ const omRORec = omitRORec();
1068
+ return $mergeX(out, keys, root().of('after'), or => {
1069
+ return ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1070
+ _id: ['_id', id],
1071
+ touchedAt: ['touchedAt', current],
1072
+ }))), or);
1073
+ });
1074
+ };
1075
+ const $simpleMerge = () => (out, keys) => $mergeX(out, keys, root(), id$1);
1070
1076
  const $merge = () => (out, keys) => $mergeId()(out, keys, assertNotNull$1(root()
1071
1077
  .of('before')
1072
1078
  .of('_id')
@@ -1183,7 +1189,7 @@ const actions = {
1183
1189
  updateMany: (c, args) => c.updateMany(...args),
1184
1190
  };
1185
1191
  const streamNames = {};
1186
- const executes = (view, input, streamName) => {
1192
+ const executes$1 = (view, input, streamName) => {
1187
1193
  const hash = crypto$1
1188
1194
  .createHash('md5')
1189
1195
  .update(new Error().stack + '')
@@ -1349,7 +1355,107 @@ const executes = (view, input, streamName) => {
1349
1355
  out: run,
1350
1356
  };
1351
1357
  };
1352
- const staging = (view, streamName) => pipe(input => executes(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1358
+ const staging = (view, streamName) => pipe(input => executes$1(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1359
+
1360
+ const executes = (view, input, streamName) => {
1361
+ const hash = crypto$1
1362
+ .createHash('md5')
1363
+ .update(new Error().stack + '')
1364
+ .digest('base64url');
1365
+ if (!streamNames[streamName])
1366
+ streamNames[streamName] = hash;
1367
+ else if (streamNames[streamName] != hash)
1368
+ throw new Error('streamName already used');
1369
+ const { collection, projection, hardMatch, match } = view;
1370
+ const job = {};
1371
+ const db = collection.s.db, coll = collection.collectionName;
1372
+ db.command({
1373
+ collMod: coll,
1374
+ changeStreamPreAndPostImages: { enabled: true },
1375
+ });
1376
+ createIndex(collection, { touchedAt: 1 }, {
1377
+ partialFilterExpression: { deletedAt: { $eq: null } },
1378
+ name: 'touchedAt_' + new UUID().toString('base64'),
1379
+ });
1380
+ const last = db.collection('__last');
1381
+ const projectInput = $project_(spread(projection, {
1382
+ deletedAt: ['deletedAt', 1],
1383
+ _id: ['_id', 1],
1384
+ }));
1385
+ const notDeleted = root().of('deletedAt').has($eq(null));
1386
+ const run = (finalInput) => {
1387
+ const clear = async () => { };
1388
+ const withStop = (next, tr) => {
1389
+ return addTeardown(() => ({ stop, next: next(), clear }), tr);
1390
+ };
1391
+ const next = (next, debug, tr) => ({
1392
+ cont: withStop(next, tr),
1393
+ data: [],
1394
+ info: { job, debug },
1395
+ });
1396
+ const data = {
1397
+ input: input,
1398
+ finalInputFirst: finalInput.raw(true),
1399
+ finalInput: finalInput.raw(false),
1400
+ teardown: finalInput.teardown((x) => ({
1401
+ collection: x.collection.collectionName,
1402
+ method: x.method,
1403
+ params: x.params,
1404
+ })),
1405
+ };
1406
+ const step0 = () => Promise.resolve(next(step1, 'get last update'));
1407
+ const stop = withStop(step0);
1408
+ const step1 = () => Promise.all([
1409
+ last.findOne({ _id: streamName, data }),
1410
+ last.findOne({ _id: streamName }),
1411
+ ]).then(ts => next(step2_5(ts), 'handle teardown'));
1412
+ const step2_5 = ([same, exists]) => async () => {
1413
+ const handleTeardown = async (last) => {
1414
+ const { collection: c, method: m, params: p } = last.data.teardown;
1415
+ const { collection, method, params } = {
1416
+ collection: db.collection(c),
1417
+ method: m,
1418
+ params: p,
1419
+ };
1420
+ await actions[method](collection, params);
1421
+ };
1422
+ if (exists && !same)
1423
+ await handleTeardown(exists);
1424
+ return next(step4(same), 'clone into new collection');
1425
+ };
1426
+ const makeStream = (startAt) => makeWatchStream(db, view, startAt);
1427
+ const step4 = (lastTS) => async () => {
1428
+ const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
1429
+ const aggResult = await aggregate(c => c({
1430
+ coll: collection,
1431
+ input: link()
1432
+ .with($match_(hardQuery))
1433
+ .with(projectInput)
1434
+ .with(input)
1435
+ .with(finalInput.raw(lastTS === null)).stages,
1436
+ }));
1437
+ const stream = makeStream(aggResult.cursor.atClusterTime);
1438
+ return next(step7({ aggResult, result: aggResult, stream }), 'update __last', () => stream.close());
1439
+ };
1440
+ const step7 = (l) => async () => {
1441
+ await last.updateOne({ _id: streamName }, { $set: { ts: l.result.cursor.atClusterTime, data } }, { upsert: true });
1442
+ return step8(l);
1443
+ };
1444
+ const step8 = (l) => {
1445
+ return {
1446
+ data: l.aggResult.cursor.firstBatch,
1447
+ info: { job: undefined, debug: 'wait for change' },
1448
+ cont: withStop(() => l.stream.tryNext().then(doc => (doc ? next(step1, 'restart') : step8(l)))),
1449
+ };
1450
+ };
1451
+ return stop;
1452
+ };
1453
+ return {
1454
+ out: run,
1455
+ };
1456
+ };
1457
+ const emptyLin = () => ({ lin: link().stages });
1458
+ const from = (view, streamName) => pipe(input => executes(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
1353
1459
 
1354
1460
  const dayAndMonthPart = (date) => asExpr({
1355
1461
  raw: f => asExprRaw({ $dateToString: { date: date.raw(f).get(), format: '%m-%d' } }),
@@ -1629,4 +1735,4 @@ const makeCol = async (docs, database, name) => {
1629
1735
  }
1630
1736
  };
1631
1737
 
1632
- export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, Field, Machine, add, and, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
1738
+ export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, Field, Machine, add, and, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
package/index.js CHANGED
@@ -1035,17 +1035,14 @@ const $replaceWith = (expr) => ({
1035
1035
  lin: $replaceWith_(expr),
1036
1036
  });
1037
1037
 
1038
- const $mergeId = () => (out, keys, id) => ({
1038
+ const $mergeX = (out, keys, f, map) => ({
1039
1039
  raw: (first) => {
1040
- const omRORec = omitRORec();
1041
- const patch = mapExactToObject(keys, (_, k) => [k, root().of('after').of(k).expr()]);
1042
- const replacer = ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1043
- _id: ['_id', id],
1044
- touchedAt: ['touchedAt', current],
1045
- }))), field(omitPick().backward(spread(patch, {
1046
- _id: ['_id', root().of('after').of('_id').expr()],
1040
+ const patch = mapExactToObject(keys, (_, k) => [k, f.of(k).expr()]);
1041
+ const or = field(omitPick().backward(spread(patch, {
1042
+ _id: ['_id', f.of('_id').expr()],
1047
1043
  touchedAt: ['touchedAt', current],
1048
- }))));
1044
+ })));
1045
+ const replacer = map(or);
1049
1046
  return link()
1050
1047
  .with($replaceWith_(replacer))
1051
1048
  .with($merge_({
@@ -1068,7 +1065,16 @@ const $mergeId = () => (out, keys, id) => ({
1068
1065
  ],
1069
1066
  }),
1070
1067
  });
1071
- const $simpleMerge = () => (out, keys) => $mergeId()(out, keys, root().of('_id').expr());
1068
+ const $mergeId = () => (out, keys, id) => {
1069
+ const omRORec = omitRORec();
1070
+ return $mergeX(out, keys, root().of('after'), or => {
1071
+ return ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1072
+ _id: ['_id', id],
1073
+ touchedAt: ['touchedAt', current],
1074
+ }))), or);
1075
+ });
1076
+ };
1077
+ const $simpleMerge = () => (out, keys) => $mergeX(out, keys, root(), id$1);
1072
1078
  const $merge = () => (out, keys) => $mergeId()(out, keys, assertNotNull$1(root()
1073
1079
  .of('before')
1074
1080
  .of('_id')
@@ -1185,7 +1191,7 @@ const actions = {
1185
1191
  updateMany: (c, args) => c.updateMany(...args),
1186
1192
  };
1187
1193
  const streamNames = {};
1188
- const executes = (view, input, streamName) => {
1194
+ const executes$1 = (view, input, streamName) => {
1189
1195
  const hash = crypto$1
1190
1196
  .createHash('md5')
1191
1197
  .update(new Error().stack + '')
@@ -1351,7 +1357,107 @@ const executes = (view, input, streamName) => {
1351
1357
  out: run,
1352
1358
  };
1353
1359
  };
1354
- const staging = (view, streamName) => pipe(input => executes(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1360
+ const staging = (view, streamName) => pipe(input => executes$1(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1361
+
1362
+ const executes = (view, input, streamName) => {
1363
+ const hash = crypto$1
1364
+ .createHash('md5')
1365
+ .update(new Error().stack + '')
1366
+ .digest('base64url');
1367
+ if (!streamNames[streamName])
1368
+ streamNames[streamName] = hash;
1369
+ else if (streamNames[streamName] != hash)
1370
+ throw new Error('streamName already used');
1371
+ const { collection, projection, hardMatch, match } = view;
1372
+ const job = {};
1373
+ const db = collection.s.db, coll = collection.collectionName;
1374
+ db.command({
1375
+ collMod: coll,
1376
+ changeStreamPreAndPostImages: { enabled: true },
1377
+ });
1378
+ createIndex(collection, { touchedAt: 1 }, {
1379
+ partialFilterExpression: { deletedAt: { $eq: null } },
1380
+ name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
1381
+ });
1382
+ const last = db.collection('__last');
1383
+ const projectInput = $project_(spread(projection, {
1384
+ deletedAt: ['deletedAt', 1],
1385
+ _id: ['_id', 1],
1386
+ }));
1387
+ const notDeleted = root().of('deletedAt').has($eq(null));
1388
+ const run = (finalInput) => {
1389
+ const clear = async () => { };
1390
+ const withStop = (next, tr) => {
1391
+ return addTeardown(() => ({ stop, next: next(), clear }), tr);
1392
+ };
1393
+ const next = (next, debug, tr) => ({
1394
+ cont: withStop(next, tr),
1395
+ data: [],
1396
+ info: { job, debug },
1397
+ });
1398
+ const data = {
1399
+ input: input,
1400
+ finalInputFirst: finalInput.raw(true),
1401
+ finalInput: finalInput.raw(false),
1402
+ teardown: finalInput.teardown((x) => ({
1403
+ collection: x.collection.collectionName,
1404
+ method: x.method,
1405
+ params: x.params,
1406
+ })),
1407
+ };
1408
+ const step0 = () => Promise.resolve(next(step1, 'get last update'));
1409
+ const stop = withStop(step0);
1410
+ const step1 = () => Promise.all([
1411
+ last.findOne({ _id: streamName, data }),
1412
+ last.findOne({ _id: streamName }),
1413
+ ]).then(ts => next(step2_5(ts), 'handle teardown'));
1414
+ const step2_5 = ([same, exists]) => async () => {
1415
+ const handleTeardown = async (last) => {
1416
+ const { collection: c, method: m, params: p } = last.data.teardown;
1417
+ const { collection, method, params } = {
1418
+ collection: db.collection(c),
1419
+ method: m,
1420
+ params: p,
1421
+ };
1422
+ await actions[method](collection, params);
1423
+ };
1424
+ if (exists && !same)
1425
+ await handleTeardown(exists);
1426
+ return next(step4(same), 'clone into new collection');
1427
+ };
1428
+ const makeStream = (startAt) => makeWatchStream(db, view, startAt);
1429
+ const step4 = (lastTS) => async () => {
1430
+ const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
1431
+ const aggResult = await aggregate(c => c({
1432
+ coll: collection,
1433
+ input: link()
1434
+ .with($match_(hardQuery))
1435
+ .with(projectInput)
1436
+ .with(input)
1437
+ .with(finalInput.raw(lastTS === null)).stages,
1438
+ }));
1439
+ const stream = makeStream(aggResult.cursor.atClusterTime);
1440
+ return next(step7({ aggResult, result: aggResult, stream }), 'update __last', () => stream.close());
1441
+ };
1442
+ const step7 = (l) => async () => {
1443
+ await last.updateOne({ _id: streamName }, { $set: { ts: l.result.cursor.atClusterTime, data } }, { upsert: true });
1444
+ return step8(l);
1445
+ };
1446
+ const step8 = (l) => {
1447
+ return {
1448
+ data: l.aggResult.cursor.firstBatch,
1449
+ info: { job: undefined, debug: 'wait for change' },
1450
+ cont: withStop(() => l.stream.tryNext().then(doc => (doc ? next(step1, 'restart') : step8(l)))),
1451
+ };
1452
+ };
1453
+ return stop;
1454
+ };
1455
+ return {
1456
+ out: run,
1457
+ };
1458
+ };
1459
+ const emptyLin = () => ({ lin: link().stages });
1460
+ const from = (view, streamName) => pipe(input => executes(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
1355
1461
 
1356
1462
  const dayAndMonthPart = (date) => asExpr({
1357
1463
  raw: f => asExprRaw({ $dateToString: { date: date.raw(f).get(), format: '%m-%d' } }),
@@ -1702,6 +1808,7 @@ exports.filterDefined = filterDefined;
1702
1808
  exports.first = first$1;
1703
1809
  exports.firstSure = firstSure;
1704
1810
  exports.floor = floor;
1811
+ exports.from = from;
1705
1812
  exports.func = func;
1706
1813
  exports.gt = gt;
1707
1814
  exports.gte = gte;
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "module": "index.esm.js",
4
4
  "typings": "index.d.ts",
5
5
  "name": "@omegup/msync",
6
- "version": "0.0.16",
6
+ "version": "0.0.18",
7
7
  "dependencies": {
8
8
  "dayjs": "^1.11.9",
9
9
  "dotenv": "^16.3.1",