@omegup/msync 0.0.66 → 0.0.68
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +11 -2
- package/index.esm.js +60 -30
- package/index.js +61 -29
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -499,14 +499,17 @@ declare const $insertX: <T extends doc, D extends O>(out: RWCollection<Merge<T>>
|
|
|
499
499
|
declare const $simpleInsert: <T extends doc>(out: RWCollection<Merge<T>>) => StreamRunnerParam<T, "out">;
|
|
500
500
|
declare const $insert: <T extends doc>(out: RWCollection<Merge<T>>) => StreamRunnerParam<Delta<T>, "out">;
|
|
501
501
|
|
|
502
|
+
declare const $group_: <V extends O>() => <ID, T extends O, C = unknown>(id: Expr<ID, T, C>, args: Accumulators<T, V, C>) => RawStages<O, T, Rec<"_id", ID> & V, C, 1>;
|
|
503
|
+
|
|
502
504
|
type DeltaPipe<Q extends O, T extends Q, F extends HKT<O2>, G extends HKT<O3>> = {
|
|
503
505
|
with: <Q2 extends O, V extends Q2>(map: (a: Stream<Q, T, F, G>) => Stream<Q | Q2, V, F, G>) => DeltaPipe<Q | Q2, V, F, G>;
|
|
504
506
|
then: <Q2 extends O, V extends Q2>(next: App<G, [Q2 | T, T, V]>) => DeltaPipe<Q | Q2, V, F, G>;
|
|
505
507
|
get: () => App<F, [Q, T]>;
|
|
506
508
|
};
|
|
507
509
|
|
|
508
|
-
type Allowed$1<K> = Exclude<K, 'deletedAt' | '_id'>;
|
|
509
510
|
type AllowedPick$1<V extends Model, K extends StrKey<V>> = OPickD<V, Allowed$1<K>>;
|
|
511
|
+
type Allowed$1<K> = Exclude<K, 'deletedAt' | '_id'>;
|
|
512
|
+
|
|
510
513
|
interface SnapshotStreamHKT extends HKT<O2> {
|
|
511
514
|
readonly out: SnapshotStreamExecutionResult<I<O2, this>[0], I<O2, this>[1]>;
|
|
512
515
|
}
|
|
@@ -525,6 +528,12 @@ interface StagesHKT extends HKT<O3> {
|
|
|
525
528
|
}
|
|
526
529
|
declare const from: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed<KK>>, streamName: string) => DeltaPipe<AllowedPick<V, KK>, AllowedPick<V, KK>, StreamRunnerHKT, StagesHKT>;
|
|
527
530
|
|
|
531
|
+
type SingleResult<out Result> = <Result2>(finalInput: RawStages<unknown, Result, Result2>) => Stages<unknown, Result2, unknown>;
|
|
532
|
+
interface SnapshotStreamHKT2 extends HKT<O2> {
|
|
533
|
+
readonly out: SingleResult<I<O2, this>[1]>;
|
|
534
|
+
}
|
|
535
|
+
declare const single: <V extends Model, KK extends StrKey<V>>(view: View<V, Allowed$1<KK>>) => DeltaPipe<AllowedPick$1<V, KK>, AllowedPick$1<V, KK>, SnapshotStreamHKT2, DeltaHKT>;
|
|
536
|
+
|
|
528
537
|
declare const max: <D, C>(...expr: Expr<number, D, C>[]) => Expr<number, D, C>;
|
|
529
538
|
declare const lt: <D, C>(expr_0: Expr<number, D, C>, expr_1: Expr<number, D, C>) => Expr<boolean, D, C>;
|
|
530
539
|
declare const gt: <D, C>(expr_0: Expr<number, D, C>, expr_1: Expr<number, D, C>) => Expr<boolean, D, C>;
|
|
@@ -682,4 +691,4 @@ declare const enablePreAndPostImages: <T extends doc>(coll: Collection<T>) => Pr
|
|
|
682
691
|
declare const prepare: (testName?: string) => Promise<MongoClient$1>;
|
|
683
692
|
declare const makeCol: <T extends ID>(docs: readonly OptionalUnlessRequiredId<T>[], database: Db, name?: string) => Promise<Collection<T>>;
|
|
684
693
|
|
|
685
|
-
export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $replaceWith, $set, $simpleInsert, $simpleMerge, $sum, $type, $unwind, $unwindDelta, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type MongoTypeNames, type N, type NoRaw, type NullToOBJ, type O, type OPick, type OPickD, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, anyElementTrue, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, type notArr, notNull, now, or, pair, prepare, rand, range, regex, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
|
|
694
|
+
export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $group_, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $replaceWith, $set, $simpleInsert, $simpleMerge, $sum, $type, $unwind, $unwindDelta, type Accumulators, type Arr, type AsLiteral, type Delta, type DeltaAccumulator, type DeltaAccumulators, Expr, type ExprHKT, type Exprs, type ExprsExact, type ExprsExactHKT, type ExprsPart, Field, type ID, type Loose, Machine, type Merge, type MergeArgs, type MergeInto, type MergeMapOArgs, type Model, type MongoTypeNames, type N, type NoRaw, type NullToOBJ, type O, type OPick, type OPickD, type RONoRaw, type RORec, type RawStages, type Rec, type Replace, type SnapshotStreamExecutionResult, type StrKey, type Strict, type TS, Type, type WriteonlyCollection, add, and, anyElementTrue, array, ceil, comp, concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, type doc, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, type notArr, notNull, now, or, pair, prepare, rand, range, regex, root, set, setField, single, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
|
package/index.esm.js
CHANGED
|
@@ -1506,8 +1506,31 @@ const actions = {
|
|
|
1506
1506
|
[`db['${c.collectionName}'].deleteMany(...`, args, ')'],
|
|
1507
1507
|
],
|
|
1508
1508
|
};
|
|
1509
|
+
|
|
1510
|
+
const getFirstStages = (view) => {
|
|
1511
|
+
const { projection, hardMatch: pre, match } = view;
|
|
1512
|
+
const projectInput = $project_(spread(projection, {
|
|
1513
|
+
deletedAt: ['deletedAt', 1],
|
|
1514
|
+
_id: ['_id', 1],
|
|
1515
|
+
}));
|
|
1516
|
+
const removeNotYetSynchronizedFields = Object.values(mapExactToObject(projection, (_, k) => k.startsWith('_') ? root().of(k).has($exists(true)) : null));
|
|
1517
|
+
const hardMatch = $and(pre, ...removeNotYetSynchronizedFields);
|
|
1518
|
+
const firstStages = (lastTS) => {
|
|
1519
|
+
const hardQuery = $and(lastTS
|
|
1520
|
+
? root().of('touchedAt').has($gtTs(lastTS.ts))
|
|
1521
|
+
: root().of('deletedAt').has($eq(null)), lastTS ? null : match && $expr(match), hardMatch);
|
|
1522
|
+
return link()
|
|
1523
|
+
.with($match_(hardQuery))
|
|
1524
|
+
.with(projectInput);
|
|
1525
|
+
};
|
|
1526
|
+
return { firstStages, hardMatch };
|
|
1527
|
+
};
|
|
1528
|
+
|
|
1509
1529
|
const streamNames = {};
|
|
1510
|
-
const executes$
|
|
1530
|
+
const executes$2 = (view, input, streamName, skip = false, after) => {
|
|
1531
|
+
const { collection, projection, match } = view;
|
|
1532
|
+
const { firstStages, hardMatch } = getFirstStages(view);
|
|
1533
|
+
const db = collection.s.db, coll = collection.collectionName;
|
|
1511
1534
|
const hash = crypto$1
|
|
1512
1535
|
.createHash('md5')
|
|
1513
1536
|
.update(new Error().stack + '')
|
|
@@ -1516,11 +1539,6 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1516
1539
|
streamNames[streamName] = hash;
|
|
1517
1540
|
else if (streamNames[streamName] != hash)
|
|
1518
1541
|
throw new Error(`streamName ${streamName} already used`);
|
|
1519
|
-
const { collection, projection, hardMatch: pre, match } = view;
|
|
1520
|
-
const removeNotYetSynchronizedFields = Object.values(mapExactToObject(projection, (_, k) => k.startsWith('_') ? root().of(k).has($exists(true)) : null));
|
|
1521
|
-
const hardMatch = $and(pre, ...removeNotYetSynchronizedFields);
|
|
1522
|
-
const job = {};
|
|
1523
|
-
const db = collection.s.db, coll = collection.collectionName;
|
|
1524
1542
|
db.command({
|
|
1525
1543
|
collMod: coll,
|
|
1526
1544
|
changeStreamPreAndPostImages: { enabled: true },
|
|
@@ -1541,12 +1559,12 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1541
1559
|
partialFilterExpression: { updated: true, after: null, before: null },
|
|
1542
1560
|
name: 'updated_nulls_' + new UUID().toString('base64'),
|
|
1543
1561
|
});
|
|
1544
|
-
const
|
|
1545
|
-
deletedAt: ['deletedAt', 1],
|
|
1546
|
-
_id: ['_id', 1],
|
|
1547
|
-
}));
|
|
1562
|
+
const job = {};
|
|
1548
1563
|
const run = (finalInput) => {
|
|
1549
|
-
const clear = async () => Promise.all([
|
|
1564
|
+
const clear = async () => Promise.all([
|
|
1565
|
+
snapshotCollection.drop().catch(noop).catch(noop),
|
|
1566
|
+
last.deleteOne({ _id: streamName }),
|
|
1567
|
+
]);
|
|
1550
1568
|
const withStop = (next, tr) => {
|
|
1551
1569
|
return addTeardown(() => ({ stop, next: next(), clear }), tr);
|
|
1552
1570
|
};
|
|
@@ -1594,11 +1612,11 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1594
1612
|
};
|
|
1595
1613
|
const [action, out] = actions[method](collection, params);
|
|
1596
1614
|
log('teardown', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
|
|
1597
|
-
await Promise.all([snapshotCollection.drop(), action]);
|
|
1615
|
+
await Promise.all([snapshotCollection.drop().catch(noop), action]);
|
|
1598
1616
|
log('teardown done', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
|
|
1599
1617
|
};
|
|
1600
|
-
if (exists
|
|
1601
|
-
await handleTeardown(exists);
|
|
1618
|
+
if (!exists || !same) {
|
|
1619
|
+
await handleTeardown(exists ?? { data });
|
|
1602
1620
|
}
|
|
1603
1621
|
await after?.();
|
|
1604
1622
|
return nextData([])(async () => {
|
|
@@ -1607,9 +1625,6 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1607
1625
|
}, 'clone into new collection');
|
|
1608
1626
|
};
|
|
1609
1627
|
const step3 = (lastTS) => async () => {
|
|
1610
|
-
const hardQuery = $and(lastTS
|
|
1611
|
-
? root().of('touchedAt').has($gtTs(lastTS.ts))
|
|
1612
|
-
: root().of('deletedAt').has($eq(null)), lastTS ? null : match && $expr(match), hardMatch);
|
|
1613
1628
|
const notDeleted = eq($ifNull(root().of('deletedAt').expr(), nil))(nil);
|
|
1614
1629
|
const query = match ? and(notDeleted, match) : notDeleted;
|
|
1615
1630
|
const replaceRaw = $replaceWith_(field({
|
|
@@ -1617,9 +1632,7 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1617
1632
|
updated: ['updated', val(true)],
|
|
1618
1633
|
_id: ['_id', root().of('_id').expr()],
|
|
1619
1634
|
}));
|
|
1620
|
-
const cloneIntoNew =
|
|
1621
|
-
.with($match_(hardQuery))
|
|
1622
|
-
.with(projectInput)
|
|
1635
|
+
const cloneIntoNew = firstStages(lastTS)
|
|
1623
1636
|
.with(replaceRaw)
|
|
1624
1637
|
.with($merge_({
|
|
1625
1638
|
into: snapshotCollection,
|
|
@@ -1702,9 +1715,9 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1702
1715
|
out: run,
|
|
1703
1716
|
};
|
|
1704
1717
|
};
|
|
1705
|
-
const staging = (view, streamName, skip = false, after) => pipe(input => executes$
|
|
1718
|
+
const staging = (view, streamName, skip = false, after) => pipe(input => executes$2(view, input, streamName, skip, after), emptyDelta(), concatDelta, emptyDelta);
|
|
1706
1719
|
|
|
1707
|
-
const executes = (view, input, streamName) => {
|
|
1720
|
+
const executes$1 = (view, input, streamName) => {
|
|
1708
1721
|
const hash = crypto$1
|
|
1709
1722
|
.createHash('md5')
|
|
1710
1723
|
.update(new Error().stack + '')
|
|
@@ -1732,6 +1745,13 @@ const executes = (view, input, streamName) => {
|
|
|
1732
1745
|
_id: ['_id', 1],
|
|
1733
1746
|
}));
|
|
1734
1747
|
const notDeleted = root().of('deletedAt').has($eq(null));
|
|
1748
|
+
const stages = (lastTS) => {
|
|
1749
|
+
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1750
|
+
return link()
|
|
1751
|
+
.with($match_(hardQuery))
|
|
1752
|
+
.with(projectInput)
|
|
1753
|
+
.with(input);
|
|
1754
|
+
};
|
|
1735
1755
|
const run = (finalInput) => {
|
|
1736
1756
|
const clear = async () => { };
|
|
1737
1757
|
const withStop = (next, tr) => {
|
|
@@ -1788,14 +1808,9 @@ const executes = (view, input, streamName) => {
|
|
|
1788
1808
|
};
|
|
1789
1809
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1790
1810
|
const step4 = (lastTS) => async () => {
|
|
1791
|
-
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1792
1811
|
const aggResult = await aggregate(streamName, c => c({
|
|
1793
1812
|
coll: collection,
|
|
1794
|
-
input:
|
|
1795
|
-
.with($match_(hardQuery))
|
|
1796
|
-
.with(projectInput)
|
|
1797
|
-
.with(input)
|
|
1798
|
-
.with(finalInput.raw(lastTS === null)).stages,
|
|
1813
|
+
input: stages(lastTS).with(finalInput.raw(lastTS === null)).stages,
|
|
1799
1814
|
}));
|
|
1800
1815
|
const stream = makeStream(aggResult.cursor.atClusterTime);
|
|
1801
1816
|
return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream }), 'update __last', () => stream.close());
|
|
@@ -1820,7 +1835,22 @@ const executes = (view, input, streamName) => {
|
|
|
1820
1835
|
};
|
|
1821
1836
|
};
|
|
1822
1837
|
const emptyLin = () => ({ lin: link().stages });
|
|
1823
|
-
const from = (view, streamName) => pipe(input => executes(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
|
|
1838
|
+
const from = (view, streamName) => pipe(input => executes$1(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
|
|
1839
|
+
|
|
1840
|
+
const executes = (view, input) => {
|
|
1841
|
+
const { firstStages } = getFirstStages(view);
|
|
1842
|
+
const { collection } = view;
|
|
1843
|
+
return (finalInput) => {
|
|
1844
|
+
const start = input.raw(root);
|
|
1845
|
+
const stages = firstStages(null).with(start).with(finalInput).stages;
|
|
1846
|
+
return c => c({
|
|
1847
|
+
coll: collection,
|
|
1848
|
+
input: link().stages,
|
|
1849
|
+
exec: stages,
|
|
1850
|
+
});
|
|
1851
|
+
};
|
|
1852
|
+
};
|
|
1853
|
+
const single = (view) => pipe(input => executes(view, input), emptyDelta(), concatDelta, emptyDelta);
|
|
1824
1854
|
|
|
1825
1855
|
require('dotenv').config();
|
|
1826
1856
|
const uri = process.env['MONGO_URL'];
|
|
@@ -1865,4 +1895,4 @@ const makeCol = async (docs, database, name) => {
|
|
|
1865
1895
|
}
|
|
1866
1896
|
};
|
|
1867
1897
|
|
|
1868
|
-
export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $replaceWith, $set, $simpleInsert, $simpleMerge, $sum, $type, $unwind, $unwindDelta, Field, Machine, add, and, anyElementTrue, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, notNull, now, or, pair, prepare, rand, range, regex, root, set, setField, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
|
|
1898
|
+
export { $accumulator, $and, $countDict, $entries, $eq, $exists, $expr, $getField, $group, $groupId, $groupMerge, $group_, $gt, $gtTs, $gte, $gteTs, $ifNull, $in, $insert, $insertX, $keys, $let, $lookup, $lt, $lte, $map, $map0, $map1, $match, $matchDelta, $merge, $merge_, $ne, $nin, $nor, $or, $outerLookup, $pushDict, $rand, $replaceWith, $set, $simpleInsert, $simpleMerge, $sum, $type, $unwind, $unwindDelta, Field, Machine, add, and, anyElementTrue, array, ceil, comp, concat$1 as concat, concatArray, createIndex, ctx, current, dateAdd, dateDiff, dateLt, datePart, dayAndMonthPart, divide, enablePreAndPostImages, eq, eqTyped, except, exprMapVal, field, fieldF, fieldM, filter, filterDefined, first$1 as first, firstSure, floor, from, func, gt, gte, inArray, isArray, ite, last, log, lt, lte, makeCol, map1, mapVal, max, maxDate, mergeExact, mergeExact0, mergeExpr, mergeObjects, minDate, monthPart, multiply, ne, nil, noop, not, notNull, now, or, pair, prepare, rand, range, regex, root, set, setField, single, size, slice, sortArray, staging, startOf, str, sub, subtract, to, toInt, val, weekPart, wrap, year };
|
package/index.js
CHANGED
|
@@ -1508,8 +1508,31 @@ const actions = {
|
|
|
1508
1508
|
[`db['${c.collectionName}'].deleteMany(...`, args, ')'],
|
|
1509
1509
|
],
|
|
1510
1510
|
};
|
|
1511
|
+
|
|
1512
|
+
const getFirstStages = (view) => {
|
|
1513
|
+
const { projection, hardMatch: pre, match } = view;
|
|
1514
|
+
const projectInput = $project_(spread(projection, {
|
|
1515
|
+
deletedAt: ['deletedAt', 1],
|
|
1516
|
+
_id: ['_id', 1],
|
|
1517
|
+
}));
|
|
1518
|
+
const removeNotYetSynchronizedFields = Object.values(mapExactToObject(projection, (_, k) => k.startsWith('_') ? root().of(k).has($exists(true)) : null));
|
|
1519
|
+
const hardMatch = $and(pre, ...removeNotYetSynchronizedFields);
|
|
1520
|
+
const firstStages = (lastTS) => {
|
|
1521
|
+
const hardQuery = $and(lastTS
|
|
1522
|
+
? root().of('touchedAt').has($gtTs(lastTS.ts))
|
|
1523
|
+
: root().of('deletedAt').has($eq(null)), lastTS ? null : match && $expr(match), hardMatch);
|
|
1524
|
+
return link()
|
|
1525
|
+
.with($match_(hardQuery))
|
|
1526
|
+
.with(projectInput);
|
|
1527
|
+
};
|
|
1528
|
+
return { firstStages, hardMatch };
|
|
1529
|
+
};
|
|
1530
|
+
|
|
1511
1531
|
const streamNames = {};
|
|
1512
|
-
const executes$
|
|
1532
|
+
const executes$2 = (view, input, streamName, skip = false, after) => {
|
|
1533
|
+
const { collection, projection, match } = view;
|
|
1534
|
+
const { firstStages, hardMatch } = getFirstStages(view);
|
|
1535
|
+
const db = collection.s.db, coll = collection.collectionName;
|
|
1513
1536
|
const hash = crypto$1
|
|
1514
1537
|
.createHash('md5')
|
|
1515
1538
|
.update(new Error().stack + '')
|
|
@@ -1518,11 +1541,6 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1518
1541
|
streamNames[streamName] = hash;
|
|
1519
1542
|
else if (streamNames[streamName] != hash)
|
|
1520
1543
|
throw new Error(`streamName ${streamName} already used`);
|
|
1521
|
-
const { collection, projection, hardMatch: pre, match } = view;
|
|
1522
|
-
const removeNotYetSynchronizedFields = Object.values(mapExactToObject(projection, (_, k) => k.startsWith('_') ? root().of(k).has($exists(true)) : null));
|
|
1523
|
-
const hardMatch = $and(pre, ...removeNotYetSynchronizedFields);
|
|
1524
|
-
const job = {};
|
|
1525
|
-
const db = collection.s.db, coll = collection.collectionName;
|
|
1526
1544
|
db.command({
|
|
1527
1545
|
collMod: coll,
|
|
1528
1546
|
changeStreamPreAndPostImages: { enabled: true },
|
|
@@ -1543,12 +1561,12 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1543
1561
|
partialFilterExpression: { updated: true, after: null, before: null },
|
|
1544
1562
|
name: 'updated_nulls_' + new mongodb.UUID().toString('base64'),
|
|
1545
1563
|
});
|
|
1546
|
-
const
|
|
1547
|
-
deletedAt: ['deletedAt', 1],
|
|
1548
|
-
_id: ['_id', 1],
|
|
1549
|
-
}));
|
|
1564
|
+
const job = {};
|
|
1550
1565
|
const run = (finalInput) => {
|
|
1551
|
-
const clear = async () => Promise.all([
|
|
1566
|
+
const clear = async () => Promise.all([
|
|
1567
|
+
snapshotCollection.drop().catch(noop).catch(noop),
|
|
1568
|
+
last.deleteOne({ _id: streamName }),
|
|
1569
|
+
]);
|
|
1552
1570
|
const withStop = (next, tr) => {
|
|
1553
1571
|
return addTeardown(() => ({ stop, next: next(), clear }), tr);
|
|
1554
1572
|
};
|
|
@@ -1596,11 +1614,11 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1596
1614
|
};
|
|
1597
1615
|
const [action, out] = actions[method](collection, params);
|
|
1598
1616
|
log('teardown', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
|
|
1599
|
-
await Promise.all([snapshotCollection.drop(), action]);
|
|
1617
|
+
await Promise.all([snapshotCollection.drop().catch(noop), action]);
|
|
1600
1618
|
log('teardown done', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
|
|
1601
1619
|
};
|
|
1602
|
-
if (exists
|
|
1603
|
-
await handleTeardown(exists);
|
|
1620
|
+
if (!exists || !same) {
|
|
1621
|
+
await handleTeardown(exists ?? { data });
|
|
1604
1622
|
}
|
|
1605
1623
|
await after?.();
|
|
1606
1624
|
return nextData([])(async () => {
|
|
@@ -1609,9 +1627,6 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1609
1627
|
}, 'clone into new collection');
|
|
1610
1628
|
};
|
|
1611
1629
|
const step3 = (lastTS) => async () => {
|
|
1612
|
-
const hardQuery = $and(lastTS
|
|
1613
|
-
? root().of('touchedAt').has($gtTs(lastTS.ts))
|
|
1614
|
-
: root().of('deletedAt').has($eq(null)), lastTS ? null : match && $expr(match), hardMatch);
|
|
1615
1630
|
const notDeleted = eq($ifNull(root().of('deletedAt').expr(), nil))(nil);
|
|
1616
1631
|
const query = match ? and(notDeleted, match) : notDeleted;
|
|
1617
1632
|
const replaceRaw = $replaceWith_(field({
|
|
@@ -1619,9 +1634,7 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1619
1634
|
updated: ['updated', val(true)],
|
|
1620
1635
|
_id: ['_id', root().of('_id').expr()],
|
|
1621
1636
|
}));
|
|
1622
|
-
const cloneIntoNew =
|
|
1623
|
-
.with($match_(hardQuery))
|
|
1624
|
-
.with(projectInput)
|
|
1637
|
+
const cloneIntoNew = firstStages(lastTS)
|
|
1625
1638
|
.with(replaceRaw)
|
|
1626
1639
|
.with($merge_({
|
|
1627
1640
|
into: snapshotCollection,
|
|
@@ -1704,9 +1717,9 @@ const executes$1 = (view, input, streamName, skip = false, after) => {
|
|
|
1704
1717
|
out: run,
|
|
1705
1718
|
};
|
|
1706
1719
|
};
|
|
1707
|
-
const staging = (view, streamName, skip = false, after) => pipe(input => executes$
|
|
1720
|
+
const staging = (view, streamName, skip = false, after) => pipe(input => executes$2(view, input, streamName, skip, after), emptyDelta(), concatDelta, emptyDelta);
|
|
1708
1721
|
|
|
1709
|
-
const executes = (view, input, streamName) => {
|
|
1722
|
+
const executes$1 = (view, input, streamName) => {
|
|
1710
1723
|
const hash = crypto$1
|
|
1711
1724
|
.createHash('md5')
|
|
1712
1725
|
.update(new Error().stack + '')
|
|
@@ -1734,6 +1747,13 @@ const executes = (view, input, streamName) => {
|
|
|
1734
1747
|
_id: ['_id', 1],
|
|
1735
1748
|
}));
|
|
1736
1749
|
const notDeleted = root().of('deletedAt').has($eq(null));
|
|
1750
|
+
const stages = (lastTS) => {
|
|
1751
|
+
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1752
|
+
return link()
|
|
1753
|
+
.with($match_(hardQuery))
|
|
1754
|
+
.with(projectInput)
|
|
1755
|
+
.with(input);
|
|
1756
|
+
};
|
|
1737
1757
|
const run = (finalInput) => {
|
|
1738
1758
|
const clear = async () => { };
|
|
1739
1759
|
const withStop = (next, tr) => {
|
|
@@ -1790,14 +1810,9 @@ const executes = (view, input, streamName) => {
|
|
|
1790
1810
|
};
|
|
1791
1811
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1792
1812
|
const step4 = (lastTS) => async () => {
|
|
1793
|
-
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1794
1813
|
const aggResult = await aggregate(streamName, c => c({
|
|
1795
1814
|
coll: collection,
|
|
1796
|
-
input:
|
|
1797
|
-
.with($match_(hardQuery))
|
|
1798
|
-
.with(projectInput)
|
|
1799
|
-
.with(input)
|
|
1800
|
-
.with(finalInput.raw(lastTS === null)).stages,
|
|
1815
|
+
input: stages(lastTS).with(finalInput.raw(lastTS === null)).stages,
|
|
1801
1816
|
}));
|
|
1802
1817
|
const stream = makeStream(aggResult.cursor.atClusterTime);
|
|
1803
1818
|
return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream }), 'update __last', () => stream.close());
|
|
@@ -1822,7 +1837,22 @@ const executes = (view, input, streamName) => {
|
|
|
1822
1837
|
};
|
|
1823
1838
|
};
|
|
1824
1839
|
const emptyLin = () => ({ lin: link().stages });
|
|
1825
|
-
const from = (view, streamName) => pipe(input => executes(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
|
|
1840
|
+
const from = (view, streamName) => pipe(input => executes$1(view, input.lin, streamName), { lin: link().stages }, ({ lin: a }, { lin: b }) => ({ lin: concatStages(a, b) }), emptyLin);
|
|
1841
|
+
|
|
1842
|
+
const executes = (view, input) => {
|
|
1843
|
+
const { firstStages } = getFirstStages(view);
|
|
1844
|
+
const { collection } = view;
|
|
1845
|
+
return (finalInput) => {
|
|
1846
|
+
const start = input.raw(root);
|
|
1847
|
+
const stages = firstStages(null).with(start).with(finalInput).stages;
|
|
1848
|
+
return c => c({
|
|
1849
|
+
coll: collection,
|
|
1850
|
+
input: link().stages,
|
|
1851
|
+
exec: stages,
|
|
1852
|
+
});
|
|
1853
|
+
};
|
|
1854
|
+
};
|
|
1855
|
+
const single = (view) => pipe(input => executes(view, input), emptyDelta(), concatDelta, emptyDelta);
|
|
1826
1856
|
|
|
1827
1857
|
require('dotenv').config();
|
|
1828
1858
|
const uri = process.env['MONGO_URL'];
|
|
@@ -1878,6 +1908,7 @@ exports.$getField = $getField;
|
|
|
1878
1908
|
exports.$group = $group;
|
|
1879
1909
|
exports.$groupId = $groupId;
|
|
1880
1910
|
exports.$groupMerge = $groupMerge;
|
|
1911
|
+
exports.$group_ = $group_;
|
|
1881
1912
|
exports.$gt = $gt;
|
|
1882
1913
|
exports.$gtTs = $gtTs;
|
|
1883
1914
|
exports.$gte = $gte;
|
|
@@ -1983,6 +2014,7 @@ exports.regex = regex;
|
|
|
1983
2014
|
exports.root = root;
|
|
1984
2015
|
exports.set = set;
|
|
1985
2016
|
exports.setField = setField;
|
|
2017
|
+
exports.single = single;
|
|
1986
2018
|
exports.size = size;
|
|
1987
2019
|
exports.slice = slice;
|
|
1988
2020
|
exports.sortArray = sortArray;
|