@omegup/msync 0.0.14 → 0.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.d.ts +26 -27
  2. package/index.esm.js +134 -130
  3. package/index.js +133 -130
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { Timestamp, BSON, Db, Collection, IndexSpecification, CreateIndexesOptions, MongoClient as MongoClient$1, OptionalUnlessRequiredId } from 'mongodb';
1
+ import { Timestamp, Filter, UpdateFilter, BSON, Db, Collection, IndexSpecification, CreateIndexesOptions, MongoClient as MongoClient$1, OptionalUnlessRequiredId } from 'mongodb';
2
2
  export { Collection, Timestamp } from 'mongodb';
3
3
  import * as bson from 'bson';
4
4
 
@@ -15,7 +15,7 @@ type Frame<T, Info> = {
15
15
  type IteratorResult<out T, out Info> = {
16
16
  next: NextFrame<T, Info>;
17
17
  stop: Iterator<T, Info>;
18
- clear: () => Promise<void>;
18
+ clear: () => Promise<unknown>;
19
19
  };
20
20
  type Iterator<out T, out Info> = () => IteratorResult<T, Info>;
21
21
 
@@ -83,10 +83,19 @@ type TStages<in out S, out Q, in out B extends Q, out R extends Q, M extends num
83
83
  exec: RawStages<Q, B, R, unknown, M>;
84
84
  };
85
85
  type Stages<out Q, out R extends Q, out SDom> = <E>(consume: <S extends SDom, B extends Q>(value: TStages<S, Q, B, R>) => E) => E;
86
- type StreamRunner<out V> = <Result>(input: RawStages<unknown, V, Result>) => Runner<readonly Result[], HasJob>;
87
- type SimpleStreamExecutionResult<out Q, out V extends Q> = {
88
- readonly out: StreamRunner<V>;
86
+ type Actions<W> = {
87
+ updateMany: [Filter<W>, UpdateFilter<W> | BSON.Document[]];
89
88
  };
89
+ type TeardownRecord<W, M extends keyof Actions<W>> = {
90
+ collection: WriteonlyCollection<W>;
91
+ method: M;
92
+ params: Actions<W>[M];
93
+ };
94
+ type StreamRunnerParam<in V, out Result> = {
95
+ raw: (first: boolean) => RawStages<unknown, V, Result>;
96
+ teardown: <R>(consume: <W, M extends keyof Actions<W>>(x: TeardownRecord<W, M>) => R) => R;
97
+ };
98
+ type StreamRunner<out V> = <Result>(input: StreamRunnerParam<V, Result>) => Runner<readonly Result[], HasJob>;
90
99
  type SnapshotStreamExecutionResult<out Q, out V extends Q> = {
91
100
  readonly out: StreamRunner<Delta<V>>;
92
101
  readonly stages: Stages<Before<Q>, Before<V>, UBefore<Q>>;
@@ -428,9 +437,9 @@ type Extra<EE, VV, GG extends string> = Omit<EE, IdAndTsKeys | GI$1<GG> | keyof
428
437
 
429
438
  type Denied<GID = never> = keyof (TS & ID) | GID;
430
439
  type GI<GG> = Exclude<GG, keyof TS>;
431
- declare const $groupMerge: <T extends O, Grp extends notArr, V extends O, GG extends string, EE = {}, Out extends Loose<Grp, V, GG> = Loose<Grp, V, GG>>(id: Expr<Grp, T>, args: DeltaAccumulators<T, O & Omit<V, Denied<GI<GG>>>>, out: MergeInto<Strict<Grp, V, GG, EE>, Out, WriteonlyCollection<Replace<Out, Strict<Grp, V, GG, EE>>>>, gid: AsLiteral<GI<GG>>, extra: ExprsExact<Extra<EE, V, GG>, V_Grp<V, GG, Grp>>, idPrefix?: string) => RawStages<unknown, Delta<T>, "out">;
432
- declare const $groupId: <T extends O, V extends O, EE = {}, Out extends Loose<string, V, "_id"> = Loose<string, V, "_id">>(id: Expr<string, T>, args: DeltaAccumulators<T, O & Omit<V, Denied>>, out: RWCollection<Replace<Out, Strict<string, V, "_id", EE>>, Out>, extra: ExprsExact<Omit<EE, IdAndTsKeys | keyof Omit<V, IdAndTsKeys>>, doc & Omit<V, IdAndTsKeys>>) => RawStages<unknown, Delta<T>, "out">;
433
- declare const $group: <T extends O, Grp extends notArr, V extends O, EE = {}, Out extends Loose<Grp, V, "_grp"> = Loose<Grp, V, "_grp">>(id: Expr<Grp, T>, args: DeltaAccumulators<T, O & Omit<V, Denied<"_grp">>>, out: RWCollection<Strict<Grp, V, "_grp", EE>, Out>, extra: ExprsExact<Omit<EE, IdAndTsKeys | "_grp" | keyof Omit<V, IdAndTsKeys | "_grp">>, Rec<"_grp", Grp> & Omit<V, IdAndTsKeys | "_grp">>, idPrefix?: string) => RawStages<unknown, Delta<T>, "out">;
440
+ declare const $groupMerge: <T extends O, Grp extends notArr, V extends O, GG extends string, EE = {}, Out extends Loose<Grp, V, GG> = Loose<Grp, V, GG>>(id: Expr<Grp, T>, args: DeltaAccumulators<T, O & Omit<V, Denied<GI<GG>>>>, out: MergeInto<Strict<Grp, V, GG, EE>, Out, WriteonlyCollection<Replace<Out, Strict<Grp, V, GG, EE>>>>, gid: AsLiteral<GI<GG>>, extra: ExprsExact<Extra<EE, V, GG>, V_Grp<V, GG, Grp>>, idPrefix?: string) => StreamRunnerParam<Delta<T>, "out">;
441
+ declare const $groupId: <T extends O, V extends O, EE = {}, Out extends Loose<string, V, "_id"> = Loose<string, V, "_id">>(id: Expr<string, T>, args: DeltaAccumulators<T, O & Omit<V, Denied>>, out: RWCollection<Replace<Out, Strict<string, V, "_id", EE>>, Out>, extra: ExprsExact<Omit<EE, IdAndTsKeys | keyof Omit<V, IdAndTsKeys>>, doc & Omit<V, IdAndTsKeys>>) => StreamRunnerParam<Delta<T>, "out">;
442
+ declare const $group: <T extends O, Grp extends notArr, V extends O, EE = {}, Out extends Loose<Grp, V, "_grp"> = Loose<Grp, V, "_grp">>(id: Expr<Grp, T>, args: DeltaAccumulators<T, O & Omit<V, Denied<"_grp">>>, out: RWCollection<Strict<Grp, V, "_grp", EE>, Out>, extra: ExprsExact<Omit<EE, IdAndTsKeys | "_grp" | keyof Omit<V, IdAndTsKeys | "_grp">>, Rec<"_grp", Grp> & Omit<V, IdAndTsKeys | "_grp">>, idPrefix?: string) => StreamRunnerParam<Delta<T>, "out">;
434
443
 
435
444
  type Params<As extends string, LQ extends O, RQ extends O, RE extends RQ, S extends notArr> = {
436
445
  localField: Field<LQ, S>;
@@ -466,16 +475,16 @@ type s = string;
466
475
  declare const $unwind: <T extends doc, K extends s, U extends doc>(k: AsLiteral<K>, dict: RORec<K, "key">) => DeltaStages<O, T & Rec<K, Arr<U>>, T & Rec<K, U>>;
467
476
 
468
477
  type OutInputE<T, E, A = T | null> = ID & Rec<'after', A> & E;
469
- type Allowed$2<K extends string> = Exclude<K, keyof (TS & ID)>;
470
- type Patch<V, KK extends StrKey<V>> = ((OPick<V, Allowed$2<KK>> & ID) | (Rec<Allowed$2<KK>, N> & ID)) & TS;
471
- declare const $simpleMerge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$2<KK>>) => RawStages<unknown, OutInputE<OPick<V, Allowed$2<KK>> & ID, unknown>, "out">;
472
- declare const $merge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$2<KK>>) => RawStages<unknown, Delta<OPick<V, Allowed$2<KK>> & ID>, "out">;
478
+ type Allowed$1<K extends string> = Exclude<K, keyof (TS & ID)>;
479
+ type Patch<V, KK extends StrKey<V>> = ((OPick<V, Allowed$1<KK>> & ID) | (Rec<Allowed$1<KK>, N> & ID)) & TS;
480
+ declare const $simpleMerge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$1<KK>>) => StreamRunnerParam<OutInputE<OPick<V, Allowed$1<KK>> & ID, unknown>, "out">;
481
+ declare const $merge: <V extends Model & ID>() => <KK extends StrKey<V>, Out extends doc>(out: RWCollection<Out | Replace<Out, Patch<V, KK>>, Out>, keys: ExactKeys<Allowed$1<KK>>) => StreamRunnerParam<Delta<OPick<V, Allowed$1<KK>> & ID>, "out">;
473
482
 
474
483
  type ND = {
475
484
  readonly deletedAt?: null;
476
485
  };
477
486
  type Merge<T extends ID> = (T & ND & TS) | Del;
478
- declare const $upsert: <T extends doc>(out: RWCollection<Merge<T>>) => RawStages<unknown, Delta<T>, "out">;
487
+ declare const $upsert: <T extends doc>(out: RWCollection<Merge<T>>) => StreamRunnerParam<Delta<T>, "out">;
479
488
 
480
489
  type DeltaPipe<Q extends O, T extends Q, F extends HKT<O2>, G extends HKT<O3>> = {
481
490
  with: <Q2 extends O, V extends Q2>(map: (a: Stream<Q, T, F, G>) => Stream<Q | Q2, V, F, G>) => DeltaPipe<Q | Q2, V, F, G>;
@@ -483,25 +492,15 @@ type DeltaPipe<Q extends O, T extends Q, F extends HKT<O2>, G extends HKT<O3>> =
483
492
  get: () => App<F, [Q, T]>;
484
493
  };
485
494
 
486
- type Allowed$1<K> = Exclude<K, 'deletedAt' | '_id'>;
487
- type AllowedPick$1<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed$1<K>>;
495
+ type Allowed<K> = Exclude<K, 'deletedAt' | '_id'>;
496
+ type AllowedPick<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed<K>>;
488
497
  interface SnapshotStreamHKT extends HKT<O2> {
489
498
  readonly out: SnapshotStreamExecutionResult<I<O2, this>[0], I<O2, this>[1]>;
490
499
  }
491
500
  interface DeltaHKT extends HKT<O3> {
492
501
  readonly out: DeltaStages<I<O3, this>[0], I<O3, this>[1], I<O3, this>[2]>;
493
502
  }
494
- declare const staging: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed$1<KK>>, streamName: string) => DeltaPipe<AllowedPick$1<V, KK>, AllowedPick$1<V, KK>, SnapshotStreamHKT, DeltaHKT>;
495
-
496
- type Allowed<K> = Exclude<K, 'deletedAt' | '_id'>;
497
- type AllowedPick<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed<K>>;
498
- interface StreamRunnerHKT extends HKT<O2> {
499
- readonly out: SimpleStreamExecutionResult<I<O2, this>[0], I<O2, this>[1]>;
500
- }
501
- interface StagesHKT extends HKT<O3> {
502
- readonly out: RORec<'lin', RawStages<I<O3, this>[0], I<O3, this>[1], I<O3, this>[2], unknown, 1>>;
503
- }
504
- declare const from: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed<KK>>, streamName: string) => DeltaPipe<AllowedPick<V, KK>, AllowedPick<V, KK>, StreamRunnerHKT, StagesHKT>;
503
+ declare const staging: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed<KK>>, streamName: string) => DeltaPipe<AllowedPick<V, KK>, AllowedPick<V, KK>, SnapshotStreamHKT, DeltaHKT>;
505
504
 
506
505
  declare const max: <D, C>(...expr: Expr<number, D, C>[]) => Expr<number, D, C>;
507
506
  declare const lt: <D, C>(expr_0: Expr<number, D, C>, expr_1: Expr<number, D, C>) => Expr<boolean, D, C>;
@@ -642,4 +641,4 @@ declare const enablePreAndPostImages: <T extends doc>(coll: Collection<T>) => Pr
642
641
  declare const prepare: (testName?: string) => Promise<MongoClient$1>;
643
642
  declare const makeCol: <T extends ID>(docs: readonly OptionalUnlessRequiredId<T>[], database: Db, name?: string) => Promise<Collection<T>>;
644
643
 
645
- export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type N, type NoRaw, type O, type OPick, type OPickD, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, type notArr, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
644
+ export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type N, type NoRaw, type O, type OPick, type OPickD, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, type notArr, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
package/index.esm.js CHANGED
@@ -549,7 +549,7 @@ const $merge_ = ({ into, on, whenNotMatched, ...notMatched }) => asStages([
549
549
  },
550
550
  ]);
551
551
 
552
- const subMerge = (args, out, gid, extra, idPrefix) => {
552
+ const subMerge = (args, out, gid, extra, idPrefix, first) => {
553
553
  const doubleReplace = (x) => x;
554
554
  const mergeAggregates = $set_(set()(mapExact0(args, (v, k) => to(v.merge(root().of(k).expr(), ctx()('new').of(k).expr())))));
555
555
  const gidPath = root().of(gid).expr();
@@ -573,12 +573,16 @@ const subMerge = (args, out, gid, extra, idPrefix) => {
573
573
  .with($set_(set()(addExtraAndMerge)))
574
574
  .with($merge_({
575
575
  ...out,
576
- vars: { new: ['new', root().expr()] },
577
- stages: 'ctx',
578
576
  on: root().of(gid),
579
- whenMatched: link()
580
- .with(mergeAggregates)
581
- .with(doubleReplace($set_(updater))).stages,
577
+ ...(first
578
+ ? { whenMatched: 'merge' }
579
+ : {
580
+ vars: { new: ['new', root().expr()] },
581
+ stages: 'ctx',
582
+ whenMatched: link()
583
+ .with(mergeAggregates)
584
+ .with(doubleReplace($set_(updater))).stages,
585
+ }),
582
586
  })).stages);
583
587
  };
584
588
 
@@ -586,11 +590,26 @@ const addGrp = (gid) => (expr) => {
586
590
  const omit = omitPick();
587
591
  return omit.backward(mergeExpr(omit.forward(expr), map1(gid, root().of('_id').expr())));
588
592
  };
589
- const $groupMerge = (id, args, out, gid, extra, idPrefix = '') => {
590
- return link()
593
+ const $groupMerge = (id, args, out, gid, extra, idPrefix = '') => ({
594
+ raw: (first) => link()
591
595
  .with(subGroup(id, args, addGrp(gid)))
592
- .with(subMerge(args, out, gid, extra, idPrefix)).stages;
593
- };
596
+ .with(subMerge(args, out, gid, extra, idPrefix, first)).stages,
597
+ teardown: c => c({
598
+ collection: out.into,
599
+ method: 'updateMany',
600
+ params: [
601
+ {},
602
+ [
603
+ {
604
+ $unset: Object.keys({
605
+ ...mapExactToObject(extra, () => 1),
606
+ ...mapExactToObject(args, () => 1),
607
+ }),
608
+ },
609
+ ],
610
+ ],
611
+ }),
612
+ });
594
613
  const $groupId = (id, args, out, extra) => $groupMerge(id, args, { into: out, whenNotMatched: 'fail' }, '_id', extra);
595
614
  const $group = (id, args, out, extra, idPrefix = '') => $groupMerge(id, args, { into: out, whenNotMatched: 'insert' }, '_grp', extra, idPrefix);
596
615
 
@@ -936,8 +955,12 @@ const join = ({ lField, rField, left, right, as }, leftSnapshot, rightSnapshot,
936
955
  const mergeForeignIntoDoc = concatStages($replaceWithDelta(mergeObjects(root().of('left').expr(), fieldM({ a: root().of('right').expr(), b: root().of('_id').expr() }, dictId))), stagesUntilNextLookup.delta);
937
956
  const lRunnerInput = concatStages(joinR_Delta, mergeForeignIntoDoc);
938
957
  const rRunnerInput = concatStages(joinL_Delta, mergeForeignIntoDoc);
939
- const lRunner = left.out(concatStages(lRunnerInput, finalInput));
940
- const rRunner = right.out(concatStages(rRunnerInput, finalInput));
958
+ const getRunner = (f, stages, final) => f.out({
959
+ raw: first => concatStages(stages, final.raw(first)),
960
+ teardown: final.teardown,
961
+ });
962
+ const lRunner = getRunner(left, lRunnerInput, finalInput);
963
+ const rRunner = getRunner(right, rRunnerInput, finalInput);
941
964
  return () => merge({ lsource: lRunner(), rsource: rRunner() });
942
965
  },
943
966
  };
@@ -1010,25 +1033,39 @@ const $replaceWith = (expr) => ({
1010
1033
  lin: $replaceWith_(expr),
1011
1034
  });
1012
1035
 
1013
- const $mergeId = () => (out, keys, id) => {
1014
- const omRORec = omitRORec();
1015
- const patch = mapExactToObject(keys, (_, k) => [k, root().of('after').of(k).expr()]);
1016
- const replacer = ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1017
- _id: ['_id', id],
1018
- touchedAt: ['touchedAt', current],
1019
- }))), field(omitPick().backward(spread(patch, {
1020
- _id: ['_id', root().of('after').of('_id').expr()],
1021
- touchedAt: ['touchedAt', current],
1022
- }))));
1023
- return link()
1024
- .with($replaceWith_(replacer))
1025
- .with($merge_({
1026
- into: out,
1027
- on: root().of('_id'),
1028
- whenNotMatched: 'fail',
1029
- whenMatched: 'merge',
1030
- })).stages;
1031
- };
1036
+ const $mergeId = () => (out, keys, id) => ({
1037
+ raw: (first) => {
1038
+ const omRORec = omitRORec();
1039
+ const patch = mapExactToObject(keys, (_, k) => [k, root().of('after').of(k).expr()]);
1040
+ const replacer = ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1041
+ _id: ['_id', id],
1042
+ touchedAt: ['touchedAt', current],
1043
+ }))), field(omitPick().backward(spread(patch, {
1044
+ _id: ['_id', root().of('after').of('_id').expr()],
1045
+ touchedAt: ['touchedAt', current],
1046
+ }))));
1047
+ return link()
1048
+ .with($replaceWith_(replacer))
1049
+ .with($merge_({
1050
+ into: out,
1051
+ on: root().of('_id'),
1052
+ whenNotMatched: 'fail',
1053
+ whenMatched: 'merge',
1054
+ })).stages;
1055
+ },
1056
+ teardown: c => c({
1057
+ collection: out,
1058
+ method: 'updateMany',
1059
+ params: [
1060
+ {},
1061
+ [
1062
+ {
1063
+ $unset: Object.keys(mapExactToObject(keys, () => 1)),
1064
+ },
1065
+ ],
1066
+ ],
1067
+ }),
1068
+ });
1032
1069
  const $simpleMerge = () => (out, keys) => $mergeId()(out, keys, root().of('_id').expr());
1033
1070
  const $merge = () => (out, keys) => $mergeId()(out, keys, assertNotNull$1(root()
1034
1071
  .of('before')
@@ -1036,21 +1073,28 @@ const $merge = () => (out, keys) => $mergeId()(out, keys, assertNotNull$1(root()
1036
1073
  .expr()));
1037
1074
  const assertNotNull$1 = (expr) => expr;
1038
1075
 
1039
- const $upsert = (out) => {
1040
- const replacer = ite(eq(root().of('after').expr())(nil), field({
1041
- deletedAt: ['deletedAt', current],
1042
- _id: ['_id', assertNotNull(root().of('before').of('_id').expr())],
1043
- touchedAt: ['touchedAt', current],
1044
- }), mergeObjects(assertNotNull(root().of('after').expr()), field({ deletedAt: ['deletedAt', nil], touchedAt: ['touchedAt', current] })));
1045
- return link()
1046
- .with($replaceWith_(replacer))
1047
- .with($merge_({
1048
- into: out,
1049
- on: root().of('_id'),
1050
- whenMatched: 'merge',
1051
- whenNotMatched: 'insert',
1052
- })).stages;
1053
- };
1076
+ const $upsert = (out) => ({
1077
+ teardown: c => c({
1078
+ collection: out,
1079
+ method: 'updateMany',
1080
+ params: [{}, [{ $set: { deletedAt: '$$NOW', touchedAt: '$$CLUSTER_TIME' } }]],
1081
+ }),
1082
+ raw: () => {
1083
+ const replacer = ite(eq(root().of('after').expr())(nil), field({
1084
+ deletedAt: ['deletedAt', current],
1085
+ _id: ['_id', assertNotNull(root().of('before').of('_id').expr())],
1086
+ touchedAt: ['touchedAt', current],
1087
+ }), mergeObjects(assertNotNull(root().of('after').expr()), field({ deletedAt: ['deletedAt', nil], touchedAt: ['touchedAt', current] })));
1088
+ return link()
1089
+ .with($replaceWith_(replacer))
1090
+ .with($merge_({
1091
+ into: out,
1092
+ on: root().of('_id'),
1093
+ whenMatched: 'merge',
1094
+ whenNotMatched: 'insert',
1095
+ })).stages;
1096
+ },
1097
+ });
1054
1098
  const assertNotNull = (expr) => expr;
1055
1099
 
1056
1100
  const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
@@ -1135,8 +1179,11 @@ const makeWatchStream = (db, { collection, projection: p, hardMatch: m }, startA
1135
1179
  return { tryNext, close: () => stream.close() };
1136
1180
  };
1137
1181
 
1182
+ const actions = {
1183
+ updateMany: (c, args) => c.updateMany(...args),
1184
+ };
1138
1185
  const streamNames = {};
1139
- const executes$1 = (view, input, streamName) => {
1186
+ const executes = (view, input, streamName) => {
1140
1187
  const hash = crypto$1
1141
1188
  .createHash('md5')
1142
1189
  .update(new Error().stack + '')
@@ -1173,9 +1220,7 @@ const executes$1 = (view, input, streamName) => {
1173
1220
  _id: ['_id', 1],
1174
1221
  }));
1175
1222
  const run = (finalInput) => {
1176
- const clear = async () => {
1177
- await snapshotCollection.drop();
1178
- };
1223
+ const clear = async () => Promise.all([snapshotCollection.drop(), last.deleteOne({ _id: streamName })]);
1179
1224
  const withStop = (next, tr) => {
1180
1225
  return addTeardown(() => ({ stop, next: next(), clear }), tr);
1181
1226
  };
@@ -1184,13 +1229,40 @@ const executes$1 = (view, input, streamName) => {
1184
1229
  data: [],
1185
1230
  info: { job, debug },
1186
1231
  });
1232
+ const data = {
1233
+ input: input.delta,
1234
+ finalInputFirst: finalInput.raw(true),
1235
+ finalInput: finalInput.raw(false),
1236
+ teardown: finalInput.teardown((x) => ({
1237
+ collection: x.collection.collectionName,
1238
+ method: x.method,
1239
+ params: x.params,
1240
+ })),
1241
+ };
1187
1242
  const step0 = () => Promise.resolve(next(step1, 'empty new collection'));
1188
1243
  const stop = withStop(step0);
1189
1244
  const step1 = async () => {
1190
1245
  await snapshotCollection.updateMany({ updated: true }, { $set: { updated: false, after: null } });
1191
1246
  return next(step2, 'get last update');
1192
1247
  };
1193
- const step2 = () => last.findOne({ _id: streamName }).then(ts => next(step3(ts), 'clone into new collection'));
1248
+ const step2 = () => Promise.all([
1249
+ last.findOne({ _id: streamName, data }),
1250
+ last.findOne({ _id: streamName }),
1251
+ ]).then(ts => next(step2_5(ts), 'handle teardown'));
1252
+ const step2_5 = ([same, exists]) => async () => {
1253
+ const handleTeardown = async (last) => {
1254
+ const { collection: c, method: m, params: p } = last.data.teardown;
1255
+ const { collection, method, params } = {
1256
+ collection: db.collection(c),
1257
+ method: m,
1258
+ params: p,
1259
+ };
1260
+ await actions[method](collection, params);
1261
+ };
1262
+ if (exists && !same)
1263
+ await handleTeardown(exists);
1264
+ return next(step3(same), 'clone into new collection');
1265
+ };
1194
1266
  const step3 = (lastTS) => async () => {
1195
1267
  const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch);
1196
1268
  const notDeleted = eq($ifNull(root().of('deletedAt').expr(), nil))(nil);
@@ -1212,10 +1284,10 @@ const executes$1 = (view, input, streamName) => {
1212
1284
  })).stages;
1213
1285
  const r = await aggregate(c => c({ coll: collection, input: cloneIntoNew }));
1214
1286
  await snapshotCollection.deleteMany({ updated: true, after: null, before: null });
1215
- return next(step4(r), 'run the aggregation');
1287
+ return next(step4({ result: r, ts: lastTS?.ts }), 'run the aggregation');
1216
1288
  };
1217
1289
  const makeStream = (startAt) => makeWatchStream(db, view, startAt);
1218
- const step4 = (result) => async () => {
1290
+ const step4 = ({ result, ts }) => async () => {
1219
1291
  const start = Date.now();
1220
1292
  await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
1221
1293
  const aggResult = await aggregate(c => c({
@@ -1224,7 +1296,7 @@ const executes$1 = (view, input, streamName) => {
1224
1296
  .with($match_(root().of('updated').has($eq(true))))
1225
1297
  .with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
1226
1298
  .with(input.delta)
1227
- .with(finalInput).stages,
1299
+ .with(finalInput.raw(ts === undefined)).stages,
1228
1300
  }), false, start);
1229
1301
  const stream = makeStream(result.cursor.atClusterTime);
1230
1302
  return next(step5({ result, aggResult, stream }), 'remove handled deleted updated', () => stream.close());
@@ -1250,7 +1322,12 @@ const executes$1 = (view, input, streamName) => {
1250
1322
  return next(step7(l), 'update __last');
1251
1323
  };
1252
1324
  const step7 = (l) => async () => {
1253
- await last.updateOne({ _id: streamName }, { $set: { ts: l.result.cursor.atClusterTime } }, { upsert: true });
1325
+ await last.updateOne({ _id: streamName }, {
1326
+ $set: {
1327
+ ts: l.result.cursor.atClusterTime,
1328
+ data,
1329
+ },
1330
+ }, { upsert: true });
1254
1331
  return step8(l);
1255
1332
  };
1256
1333
  const step8 = (l) => {
@@ -1272,80 +1349,7 @@ const executes$1 = (view, input, streamName) => {
1272
1349
  out: run,
1273
1350
  };
1274
1351
  };
1275
- const staging = (view, streamName) => pipe(input => executes$1(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1276
-
1277
- const executes = (view, input, streamName) => {
1278
- const hash = crypto$1
1279
- .createHash('md5')
1280
- .update(new Error().stack + '')
1281
- .digest('base64url');
1282
- if (!streamNames[streamName])
1283
- streamNames[streamName] = hash;
1284
- else if (streamNames[streamName] != hash)
1285
- throw new Error('streamName already used');
1286
- const { collection, projection, hardMatch, match } = view;
1287
- const job = {};
1288
- const db = collection.s.db, coll = collection.collectionName;
1289
- db.command({
1290
- collMod: coll,
1291
- changeStreamPreAndPostImages: { enabled: true },
1292
- });
1293
- createIndex(collection, { touchedAt: 1 }, {
1294
- partialFilterExpression: { deletedAt: { $eq: null } },
1295
- name: 'touchedAt_' + new UUID().toString('base64'),
1296
- });
1297
- const last = db.collection('__last');
1298
- const projectInput = $project_(spread(projection, {
1299
- deletedAt: ['deletedAt', 1],
1300
- _id: ['_id', 1],
1301
- }));
1302
- const notDeleted = root().of('deletedAt').has($eq(null));
1303
- const run = (finalInput) => {
1304
- const clear = async () => { };
1305
- const withStop = (next, tr) => {
1306
- return addTeardown(() => ({ stop, next: next(), clear }), tr);
1307
- };
1308
- const next = (next, debug, tr) => ({
1309
- cont: withStop(next, tr),
1310
- data: [],
1311
- info: { job, debug },
1312
- });
1313
- const step0 = () => Promise.resolve(next(step1, 'get last update'));
1314
- const stop = withStop(step0);
1315
- const step1 = () => last.findOne({ _id: streamName }).then(ts => next(step4(ts), 'clone into new collection'));
1316
- const makeStream = (startAt) => makeWatchStream(db, view, startAt);
1317
- const step4 = (lastTS) => async () => {
1318
- const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
1319
- const aggResult = await aggregate(c => c({
1320
- coll: collection,
1321
- input: link()
1322
- .with($match_(hardQuery))
1323
- .with(projectInput)
1324
- .with(input)
1325
- .with(finalInput).stages,
1326
- }));
1327
- const stream = makeStream(aggResult.cursor.atClusterTime);
1328
- return next(step7({ aggResult, result: aggResult, stream }), 'update __last', () => stream.close());
1329
- };
1330
- const step7 = (l) => async () => {
1331
- await last.updateOne({ _id: streamName }, { $set: { ts: l.result.cursor.atClusterTime } }, { upsert: true });
1332
- return step8(l);
1333
- };
1334
- const step8 = (l) => {
1335
- return {
1336
- data: l.aggResult.cursor.firstBatch,
1337
- info: { job: undefined, debug: 'wait for change' },
1338
- cont: withStop(() => l.stream.tryNext().then(doc => (doc ? next(step1, 'restart') : step8(l)))),
1339
- };
1340
- };
1341
- return stop;
1342
- };
1343
- return {
1344
- out: run,
1345
- };
1346
- };
1347
- const emptyLin = () => ({ lin: link().stages });
1348
- const from = (view, streamName) => pipe(input => executes(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
1352
+ const staging = (view, streamName) => pipe(input => executes(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1349
1353
 
1350
1354
  const dayAndMonthPart = (date) => asExpr({
1351
1355
  raw: f => asExprRaw({ $dateToString: { date: date.raw(f).get(), format: '%m-%d' } }),
@@ -1625,4 +1629,4 @@ const makeCol = async (docs, database, name) => {
1625
1629
  }
1626
1630
  };
1627
1631
 
1628
- export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, Field, Machine, add, and, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
1632
+ export { $accumulator, $and, $countDict, $entries, $eq, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $keys, $let, $lookup, $lt, $lte, $map, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $pushDict, $rand, $replaceWith, $set, $simpleMerge, $sum, $unwind, $unwindDelta, $upsert, Field, Machine, add, and, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, notNull, now, or, pair, prepare, rand, range, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
package/index.js CHANGED
@@ -551,7 +551,7 @@ const $merge_ = ({ into, on, whenNotMatched, ...notMatched }) => asStages([
551
551
  },
552
552
  ]);
553
553
 
554
- const subMerge = (args, out, gid, extra, idPrefix) => {
554
+ const subMerge = (args, out, gid, extra, idPrefix, first) => {
555
555
  const doubleReplace = (x) => x;
556
556
  const mergeAggregates = $set_(set()(mapExact0(args, (v, k) => to(v.merge(root().of(k).expr(), ctx()('new').of(k).expr())))));
557
557
  const gidPath = root().of(gid).expr();
@@ -575,12 +575,16 @@ const subMerge = (args, out, gid, extra, idPrefix) => {
575
575
  .with($set_(set()(addExtraAndMerge)))
576
576
  .with($merge_({
577
577
  ...out,
578
- vars: { new: ['new', root().expr()] },
579
- stages: 'ctx',
580
578
  on: root().of(gid),
581
- whenMatched: link()
582
- .with(mergeAggregates)
583
- .with(doubleReplace($set_(updater))).stages,
579
+ ...(first
580
+ ? { whenMatched: 'merge' }
581
+ : {
582
+ vars: { new: ['new', root().expr()] },
583
+ stages: 'ctx',
584
+ whenMatched: link()
585
+ .with(mergeAggregates)
586
+ .with(doubleReplace($set_(updater))).stages,
587
+ }),
584
588
  })).stages);
585
589
  };
586
590
 
@@ -588,11 +592,26 @@ const addGrp = (gid) => (expr) => {
588
592
  const omit = omitPick();
589
593
  return omit.backward(mergeExpr(omit.forward(expr), map1(gid, root().of('_id').expr())));
590
594
  };
591
- const $groupMerge = (id, args, out, gid, extra, idPrefix = '') => {
592
- return link()
595
+ const $groupMerge = (id, args, out, gid, extra, idPrefix = '') => ({
596
+ raw: (first) => link()
593
597
  .with(subGroup(id, args, addGrp(gid)))
594
- .with(subMerge(args, out, gid, extra, idPrefix)).stages;
595
- };
598
+ .with(subMerge(args, out, gid, extra, idPrefix, first)).stages,
599
+ teardown: c => c({
600
+ collection: out.into,
601
+ method: 'updateMany',
602
+ params: [
603
+ {},
604
+ [
605
+ {
606
+ $unset: Object.keys({
607
+ ...mapExactToObject(extra, () => 1),
608
+ ...mapExactToObject(args, () => 1),
609
+ }),
610
+ },
611
+ ],
612
+ ],
613
+ }),
614
+ });
596
615
  const $groupId = (id, args, out, extra) => $groupMerge(id, args, { into: out, whenNotMatched: 'fail' }, '_id', extra);
597
616
  const $group = (id, args, out, extra, idPrefix = '') => $groupMerge(id, args, { into: out, whenNotMatched: 'insert' }, '_grp', extra, idPrefix);
598
617
 
@@ -938,8 +957,12 @@ const join = ({ lField, rField, left, right, as }, leftSnapshot, rightSnapshot,
938
957
  const mergeForeignIntoDoc = concatStages($replaceWithDelta(mergeObjects(root().of('left').expr(), fieldM({ a: root().of('right').expr(), b: root().of('_id').expr() }, dictId))), stagesUntilNextLookup.delta);
939
958
  const lRunnerInput = concatStages(joinR_Delta, mergeForeignIntoDoc);
940
959
  const rRunnerInput = concatStages(joinL_Delta, mergeForeignIntoDoc);
941
- const lRunner = left.out(concatStages(lRunnerInput, finalInput));
942
- const rRunner = right.out(concatStages(rRunnerInput, finalInput));
960
+ const getRunner = (f, stages, final) => f.out({
961
+ raw: first => concatStages(stages, final.raw(first)),
962
+ teardown: final.teardown,
963
+ });
964
+ const lRunner = getRunner(left, lRunnerInput, finalInput);
965
+ const rRunner = getRunner(right, rRunnerInput, finalInput);
943
966
  return () => merge({ lsource: lRunner(), rsource: rRunner() });
944
967
  },
945
968
  };
@@ -1012,25 +1035,39 @@ const $replaceWith = (expr) => ({
1012
1035
  lin: $replaceWith_(expr),
1013
1036
  });
1014
1037
 
1015
- const $mergeId = () => (out, keys, id) => {
1016
- const omRORec = omitRORec();
1017
- const patch = mapExactToObject(keys, (_, k) => [k, root().of('after').of(k).expr()]);
1018
- const replacer = ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1019
- _id: ['_id', id],
1020
- touchedAt: ['touchedAt', current],
1021
- }))), field(omitPick().backward(spread(patch, {
1022
- _id: ['_id', root().of('after').of('_id').expr()],
1023
- touchedAt: ['touchedAt', current],
1024
- }))));
1025
- return link()
1026
- .with($replaceWith_(replacer))
1027
- .with($merge_({
1028
- into: out,
1029
- on: root().of('_id'),
1030
- whenNotMatched: 'fail',
1031
- whenMatched: 'merge',
1032
- })).stages;
1033
- };
1038
+ const $mergeId = () => (out, keys, id) => ({
1039
+ raw: (first) => {
1040
+ const omRORec = omitRORec();
1041
+ const patch = mapExactToObject(keys, (_, k) => [k, root().of('after').of(k).expr()]);
1042
+ const replacer = ite(eqTyped(root().of('after').expr(), nil), field(omRORec.backward(spread(mapExact(keys, () => nil), {
1043
+ _id: ['_id', id],
1044
+ touchedAt: ['touchedAt', current],
1045
+ }))), field(omitPick().backward(spread(patch, {
1046
+ _id: ['_id', root().of('after').of('_id').expr()],
1047
+ touchedAt: ['touchedAt', current],
1048
+ }))));
1049
+ return link()
1050
+ .with($replaceWith_(replacer))
1051
+ .with($merge_({
1052
+ into: out,
1053
+ on: root().of('_id'),
1054
+ whenNotMatched: 'fail',
1055
+ whenMatched: 'merge',
1056
+ })).stages;
1057
+ },
1058
+ teardown: c => c({
1059
+ collection: out,
1060
+ method: 'updateMany',
1061
+ params: [
1062
+ {},
1063
+ [
1064
+ {
1065
+ $unset: Object.keys(mapExactToObject(keys, () => 1)),
1066
+ },
1067
+ ],
1068
+ ],
1069
+ }),
1070
+ });
1034
1071
  const $simpleMerge = () => (out, keys) => $mergeId()(out, keys, root().of('_id').expr());
1035
1072
  const $merge = () => (out, keys) => $mergeId()(out, keys, assertNotNull$1(root()
1036
1073
  .of('before')
@@ -1038,21 +1075,28 @@ const $merge = () => (out, keys) => $mergeId()(out, keys, assertNotNull$1(root()
1038
1075
  .expr()));
1039
1076
  const assertNotNull$1 = (expr) => expr;
1040
1077
 
1041
- const $upsert = (out) => {
1042
- const replacer = ite(eq(root().of('after').expr())(nil), field({
1043
- deletedAt: ['deletedAt', current],
1044
- _id: ['_id', assertNotNull(root().of('before').of('_id').expr())],
1045
- touchedAt: ['touchedAt', current],
1046
- }), mergeObjects(assertNotNull(root().of('after').expr()), field({ deletedAt: ['deletedAt', nil], touchedAt: ['touchedAt', current] })));
1047
- return link()
1048
- .with($replaceWith_(replacer))
1049
- .with($merge_({
1050
- into: out,
1051
- on: root().of('_id'),
1052
- whenMatched: 'merge',
1053
- whenNotMatched: 'insert',
1054
- })).stages;
1055
- };
1078
+ const $upsert = (out) => ({
1079
+ teardown: c => c({
1080
+ collection: out,
1081
+ method: 'updateMany',
1082
+ params: [{}, [{ $set: { deletedAt: '$$NOW', touchedAt: '$$CLUSTER_TIME' } }]],
1083
+ }),
1084
+ raw: () => {
1085
+ const replacer = ite(eq(root().of('after').expr())(nil), field({
1086
+ deletedAt: ['deletedAt', current],
1087
+ _id: ['_id', assertNotNull(root().of('before').of('_id').expr())],
1088
+ touchedAt: ['touchedAt', current],
1089
+ }), mergeObjects(assertNotNull(root().of('after').expr()), field({ deletedAt: ['deletedAt', nil], touchedAt: ['touchedAt', current] })));
1090
+ return link()
1091
+ .with($replaceWith_(replacer))
1092
+ .with($merge_({
1093
+ into: out,
1094
+ on: root().of('_id'),
1095
+ whenMatched: 'merge',
1096
+ whenNotMatched: 'insert',
1097
+ })).stages;
1098
+ },
1099
+ });
1056
1100
  const assertNotNull = (expr) => expr;
1057
1101
 
1058
1102
  const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
@@ -1137,8 +1181,11 @@ const makeWatchStream = (db, { collection, projection: p, hardMatch: m }, startA
1137
1181
  return { tryNext, close: () => stream.close() };
1138
1182
  };
1139
1183
 
1184
+ const actions = {
1185
+ updateMany: (c, args) => c.updateMany(...args),
1186
+ };
1140
1187
  const streamNames = {};
1141
- const executes$1 = (view, input, streamName) => {
1188
+ const executes = (view, input, streamName) => {
1142
1189
  const hash = crypto$1
1143
1190
  .createHash('md5')
1144
1191
  .update(new Error().stack + '')
@@ -1175,9 +1222,7 @@ const executes$1 = (view, input, streamName) => {
1175
1222
  _id: ['_id', 1],
1176
1223
  }));
1177
1224
  const run = (finalInput) => {
1178
- const clear = async () => {
1179
- await snapshotCollection.drop();
1180
- };
1225
+ const clear = async () => Promise.all([snapshotCollection.drop(), last.deleteOne({ _id: streamName })]);
1181
1226
  const withStop = (next, tr) => {
1182
1227
  return addTeardown(() => ({ stop, next: next(), clear }), tr);
1183
1228
  };
@@ -1186,13 +1231,40 @@ const executes$1 = (view, input, streamName) => {
1186
1231
  data: [],
1187
1232
  info: { job, debug },
1188
1233
  });
1234
+ const data = {
1235
+ input: input.delta,
1236
+ finalInputFirst: finalInput.raw(true),
1237
+ finalInput: finalInput.raw(false),
1238
+ teardown: finalInput.teardown((x) => ({
1239
+ collection: x.collection.collectionName,
1240
+ method: x.method,
1241
+ params: x.params,
1242
+ })),
1243
+ };
1189
1244
  const step0 = () => Promise.resolve(next(step1, 'empty new collection'));
1190
1245
  const stop = withStop(step0);
1191
1246
  const step1 = async () => {
1192
1247
  await snapshotCollection.updateMany({ updated: true }, { $set: { updated: false, after: null } });
1193
1248
  return next(step2, 'get last update');
1194
1249
  };
1195
- const step2 = () => last.findOne({ _id: streamName }).then(ts => next(step3(ts), 'clone into new collection'));
1250
+ const step2 = () => Promise.all([
1251
+ last.findOne({ _id: streamName, data }),
1252
+ last.findOne({ _id: streamName }),
1253
+ ]).then(ts => next(step2_5(ts), 'handle teardown'));
1254
+ const step2_5 = ([same, exists]) => async () => {
1255
+ const handleTeardown = async (last) => {
1256
+ const { collection: c, method: m, params: p } = last.data.teardown;
1257
+ const { collection, method, params } = {
1258
+ collection: db.collection(c),
1259
+ method: m,
1260
+ params: p,
1261
+ };
1262
+ await actions[method](collection, params);
1263
+ };
1264
+ if (exists && !same)
1265
+ await handleTeardown(exists);
1266
+ return next(step3(same), 'clone into new collection');
1267
+ };
1196
1268
  const step3 = (lastTS) => async () => {
1197
1269
  const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch);
1198
1270
  const notDeleted = eq($ifNull(root().of('deletedAt').expr(), nil))(nil);
@@ -1214,10 +1286,10 @@ const executes$1 = (view, input, streamName) => {
1214
1286
  })).stages;
1215
1287
  const r = await aggregate(c => c({ coll: collection, input: cloneIntoNew }));
1216
1288
  await snapshotCollection.deleteMany({ updated: true, after: null, before: null });
1217
- return next(step4(r), 'run the aggregation');
1289
+ return next(step4({ result: r, ts: lastTS?.ts }), 'run the aggregation');
1218
1290
  };
1219
1291
  const makeStream = (startAt) => makeWatchStream(db, view, startAt);
1220
- const step4 = (result) => async () => {
1292
+ const step4 = ({ result, ts }) => async () => {
1221
1293
  const start = Date.now();
1222
1294
  await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
1223
1295
  const aggResult = await aggregate(c => c({
@@ -1226,7 +1298,7 @@ const executes$1 = (view, input, streamName) => {
1226
1298
  .with($match_(root().of('updated').has($eq(true))))
1227
1299
  .with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
1228
1300
  .with(input.delta)
1229
- .with(finalInput).stages,
1301
+ .with(finalInput.raw(ts === undefined)).stages,
1230
1302
  }), false, start);
1231
1303
  const stream = makeStream(result.cursor.atClusterTime);
1232
1304
  return next(step5({ result, aggResult, stream }), 'remove handled deleted updated', () => stream.close());
@@ -1252,7 +1324,12 @@ const executes$1 = (view, input, streamName) => {
1252
1324
  return next(step7(l), 'update __last');
1253
1325
  };
1254
1326
  const step7 = (l) => async () => {
1255
- await last.updateOne({ _id: streamName }, { $set: { ts: l.result.cursor.atClusterTime } }, { upsert: true });
1327
+ await last.updateOne({ _id: streamName }, {
1328
+ $set: {
1329
+ ts: l.result.cursor.atClusterTime,
1330
+ data,
1331
+ },
1332
+ }, { upsert: true });
1256
1333
  return step8(l);
1257
1334
  };
1258
1335
  const step8 = (l) => {
@@ -1274,80 +1351,7 @@ const executes$1 = (view, input, streamName) => {
1274
1351
  out: run,
1275
1352
  };
1276
1353
  };
1277
- const staging = (view, streamName) => pipe(input => executes$1(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1278
-
1279
- const executes = (view, input, streamName) => {
1280
- const hash = crypto$1
1281
- .createHash('md5')
1282
- .update(new Error().stack + '')
1283
- .digest('base64url');
1284
- if (!streamNames[streamName])
1285
- streamNames[streamName] = hash;
1286
- else if (streamNames[streamName] != hash)
1287
- throw new Error('streamName already used');
1288
- const { collection, projection, hardMatch, match } = view;
1289
- const job = {};
1290
- const db = collection.s.db, coll = collection.collectionName;
1291
- db.command({
1292
- collMod: coll,
1293
- changeStreamPreAndPostImages: { enabled: true },
1294
- });
1295
- createIndex(collection, { touchedAt: 1 }, {
1296
- partialFilterExpression: { deletedAt: { $eq: null } },
1297
- name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
1298
- });
1299
- const last = db.collection('__last');
1300
- const projectInput = $project_(spread(projection, {
1301
- deletedAt: ['deletedAt', 1],
1302
- _id: ['_id', 1],
1303
- }));
1304
- const notDeleted = root().of('deletedAt').has($eq(null));
1305
- const run = (finalInput) => {
1306
- const clear = async () => { };
1307
- const withStop = (next, tr) => {
1308
- return addTeardown(() => ({ stop, next: next(), clear }), tr);
1309
- };
1310
- const next = (next, debug, tr) => ({
1311
- cont: withStop(next, tr),
1312
- data: [],
1313
- info: { job, debug },
1314
- });
1315
- const step0 = () => Promise.resolve(next(step1, 'get last update'));
1316
- const stop = withStop(step0);
1317
- const step1 = () => last.findOne({ _id: streamName }).then(ts => next(step4(ts), 'clone into new collection'));
1318
- const makeStream = (startAt) => makeWatchStream(db, view, startAt);
1319
- const step4 = (lastTS) => async () => {
1320
- const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
1321
- const aggResult = await aggregate(c => c({
1322
- coll: collection,
1323
- input: link()
1324
- .with($match_(hardQuery))
1325
- .with(projectInput)
1326
- .with(input)
1327
- .with(finalInput).stages,
1328
- }));
1329
- const stream = makeStream(aggResult.cursor.atClusterTime);
1330
- return next(step7({ aggResult, result: aggResult, stream }), 'update __last', () => stream.close());
1331
- };
1332
- const step7 = (l) => async () => {
1333
- await last.updateOne({ _id: streamName }, { $set: { ts: l.result.cursor.atClusterTime } }, { upsert: true });
1334
- return step8(l);
1335
- };
1336
- const step8 = (l) => {
1337
- return {
1338
- data: l.aggResult.cursor.firstBatch,
1339
- info: { job: undefined, debug: 'wait for change' },
1340
- cont: withStop(() => l.stream.tryNext().then(doc => (doc ? next(step1, 'restart') : step8(l)))),
1341
- };
1342
- };
1343
- return stop;
1344
- };
1345
- return {
1346
- out: run,
1347
- };
1348
- };
1349
- const emptyLin = () => ({ lin: link().stages });
1350
- const from = (view, streamName) => pipe(input => executes(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
1354
+ const staging = (view, streamName) => pipe(input => executes(view, input, streamName), emptyDelta(), concatDelta, emptyDelta);
1351
1355
 
1352
1356
  const dayAndMonthPart = (date) => asExpr({
1353
1357
  raw: f => asExprRaw({ $dateToString: { date: date.raw(f).get(), format: '%m-%d' } }),
@@ -1698,7 +1702,6 @@ exports.filterDefined = filterDefined;
1698
1702
  exports.first = first$1;
1699
1703
  exports.firstSure = firstSure;
1700
1704
  exports.floor = floor;
1701
- exports.from = from;
1702
1705
  exports.func = func;
1703
1706
  exports.gt = gt;
1704
1707
  exports.gte = gte;
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "module": "index.esm.js",
4
4
  "typings": "index.d.ts",
5
5
  "name": "@omegup/msync",
6
- "version": "0.0.14",
6
+ "version": "0.0.16",
7
7
  "dependencies": {
8
8
  "dayjs": "^1.11.9",
9
9
  "dotenv": "^16.3.1",