@omegup/msync 0.1.13 → 0.1.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.esm.js +44 -29
- package/index.js +44 -29
- package/package.json +1 -1
package/index.esm.js
CHANGED
|
@@ -829,12 +829,10 @@ const getWhenMatchedForMerge = (whenNotMatched) => {
|
|
|
829
829
|
};
|
|
830
830
|
const getWhenMatched = (whenNotMatched) => {
|
|
831
831
|
const orNull = (e) => whenNotMatched === 'discard' ? $ifNull(e, nil) : e;
|
|
832
|
-
const preMergeOld =
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
}))
|
|
837
|
-
: root().of('old').expr();
|
|
832
|
+
const preMergeOld = mergeObjects(root().of('old').expr(), field({
|
|
833
|
+
deletedAt: ['deletedAt', orNull(root().of('old').of('deletedAt').expr())],
|
|
834
|
+
touchedAt: ['touchedAt', root().of('merged').of('touchedAt').expr()],
|
|
835
|
+
}));
|
|
838
836
|
const same = eq(preMergeOld)(root().of('merged').expr());
|
|
839
837
|
return link().with($replaceWith_(ite(same, root().of('old').expr(), root().of('merged').expr()))).stages;
|
|
840
838
|
};
|
|
@@ -918,8 +916,11 @@ const subMerge = (args, out, gid, extra, idPrefix, first) => {
|
|
|
918
916
|
...F1,
|
|
919
917
|
...F2,
|
|
920
918
|
};
|
|
921
|
-
const
|
|
922
|
-
|
|
919
|
+
const addTSAndExtra = {
|
|
920
|
+
...mapExact0(e, to),
|
|
921
|
+
touchedAt: ['touchedAt', to(current)],
|
|
922
|
+
};
|
|
923
|
+
const updater = set()(addTSAndExtra);
|
|
923
924
|
const whenMatched = getWhenMatched(out.whenNotMatched);
|
|
924
925
|
return link()
|
|
925
926
|
.with($set_(set()(addExtraAndMerge)))
|
|
@@ -1760,16 +1761,23 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1760
1761
|
const step4 = ({ result, ts }) => async () => {
|
|
1761
1762
|
const start = Date.now();
|
|
1762
1763
|
await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
|
|
1764
|
+
const stages = finalInput.raw(ts === undefined);
|
|
1763
1765
|
const aggResult = await aggregate(streamName, c => c({
|
|
1764
1766
|
coll: snapshotCollection,
|
|
1765
1767
|
input: link()
|
|
1766
1768
|
.with($match_(root().of('updated').has($eq(true))))
|
|
1767
|
-
.with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
|
|
1768
1769
|
.with(input.delta)
|
|
1769
|
-
.with(
|
|
1770
|
+
.with(stages).stages,
|
|
1770
1771
|
}), false, start);
|
|
1771
1772
|
const stream = makeStream(result.cursor.atClusterTime);
|
|
1772
|
-
|
|
1773
|
+
const nextRes = stream.tryNext();
|
|
1774
|
+
const intoColl = stages.at(-1).$merge.into.coll;
|
|
1775
|
+
const startx = Date.now();
|
|
1776
|
+
await db
|
|
1777
|
+
.collection(intoColl)
|
|
1778
|
+
.countDocuments({ touchedAt: { $gte: result.cursor.atClusterTime } })
|
|
1779
|
+
.then(count => log(`documents updated ${intoColl}`, count, 'took', Date.now() - startx));
|
|
1780
|
+
return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes }), 'remove handled deleted updated', () => stream.close());
|
|
1773
1781
|
};
|
|
1774
1782
|
const step5 = (l) => async () => {
|
|
1775
1783
|
log(`remove handled deleted updated db['${snapshotCollection.collectionName}'].deleteMany({ updated: true, after: null })`);
|
|
@@ -1801,13 +1809,14 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1801
1809
|
return step8(l);
|
|
1802
1810
|
};
|
|
1803
1811
|
const step8 = (l) => {
|
|
1804
|
-
return nextData(l.aggResult.cursor.firstBatch)(() => l.
|
|
1805
|
-
.tryNext()
|
|
1812
|
+
return nextData(l.aggResult.cursor.firstBatch)(() => l.nextRes
|
|
1806
1813
|
.catch((err) => {
|
|
1807
1814
|
log('restarting', err);
|
|
1808
1815
|
return { ts: null };
|
|
1809
1816
|
})
|
|
1810
|
-
.then(doc =>
|
|
1817
|
+
.then(doc => doc
|
|
1818
|
+
? next(step3({ _id: streamName, ts: l.ts }), 'restart')
|
|
1819
|
+
: step8({ ...l, nextRes: l.stream.tryNext() })), 'wait for change');
|
|
1811
1820
|
};
|
|
1812
1821
|
return skip
|
|
1813
1822
|
? withStop(() => SynchronousPromise.resolve(next(step3(null), 'clone into new collection')))
|
|
@@ -1835,8 +1844,11 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1835
1844
|
else if (streamNames[streamName] != hash)
|
|
1836
1845
|
throw new Error('streamName already used');
|
|
1837
1846
|
const { collection, projection, hardMatch: pre, match } = view;
|
|
1838
|
-
const removeNotYetSynchronizedFields = projection &&
|
|
1839
|
-
|
|
1847
|
+
const removeNotYetSynchronizedFields = projection &&
|
|
1848
|
+
Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
|
|
1849
|
+
const hardMatch = removeNotYetSynchronizedFields
|
|
1850
|
+
? $and(pre, ...removeNotYetSynchronizedFields)
|
|
1851
|
+
: pre;
|
|
1840
1852
|
const job = {};
|
|
1841
1853
|
const db = collection.s.db, coll = collection.collectionName;
|
|
1842
1854
|
db.command({
|
|
@@ -1848,15 +1860,15 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1848
1860
|
name: 'touchedAt_' + new UUID().toString('base64'),
|
|
1849
1861
|
});
|
|
1850
1862
|
const last = db.collection('__last');
|
|
1851
|
-
const projectInput = projection &&
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
|
|
1863
|
+
const projectInput = projection &&
|
|
1864
|
+
$project_(spread(projection, {
|
|
1865
|
+
deletedAt: ['deletedAt', 1],
|
|
1866
|
+
_id: ['_id', 1],
|
|
1867
|
+
}));
|
|
1855
1868
|
const notDeleted = root().of('deletedAt').has($eq(null));
|
|
1856
1869
|
const stages = (lastTS) => {
|
|
1857
1870
|
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1858
|
-
const ln = link()
|
|
1859
|
-
.with($match_(hardQuery));
|
|
1871
|
+
const ln = link().with($match_(hardQuery));
|
|
1860
1872
|
return (projectInput ? ln.with(projectInput) : ln).with(input);
|
|
1861
1873
|
};
|
|
1862
1874
|
const run = (finalInput) => {
|
|
@@ -1907,20 +1919,22 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1907
1919
|
return {
|
|
1908
1920
|
cont: withStop(async () => {
|
|
1909
1921
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1910
|
-
return step4(same)
|
|
1922
|
+
return next(step4(same), 'clone into new collection');
|
|
1911
1923
|
}),
|
|
1912
1924
|
data: [],
|
|
1913
|
-
info: { debug: 'clone into new collection', job: undefined },
|
|
1925
|
+
info: { debug: 'wait for clone into new collection', job: undefined },
|
|
1914
1926
|
};
|
|
1915
1927
|
};
|
|
1916
1928
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1917
1929
|
const step4 = (lastTS) => async () => {
|
|
1930
|
+
const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
|
|
1918
1931
|
const aggResult = await aggregate(streamName, c => c({
|
|
1919
1932
|
coll: collection,
|
|
1920
|
-
input:
|
|
1933
|
+
input: raw,
|
|
1921
1934
|
}));
|
|
1922
1935
|
const stream = makeStream(aggResult.cursor.atClusterTime);
|
|
1923
|
-
|
|
1936
|
+
const nextRes = stream.tryNext();
|
|
1937
|
+
return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
|
|
1924
1938
|
};
|
|
1925
1939
|
const step7 = (l) => async () => {
|
|
1926
1940
|
await last.updateOne({ _id: streamName }, { $set: { ts: l.ts, data } }, { upsert: true });
|
|
@@ -1930,13 +1944,14 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1930
1944
|
return {
|
|
1931
1945
|
data: l.aggResult.cursor.firstBatch,
|
|
1932
1946
|
info: { job: undefined, debug: 'wait for change' },
|
|
1933
|
-
cont: withStop(() => l.
|
|
1934
|
-
.tryNext()
|
|
1947
|
+
cont: withStop(() => l.nextRes
|
|
1935
1948
|
.catch((err) => {
|
|
1936
1949
|
log('restarting', err);
|
|
1937
1950
|
return { ts: null };
|
|
1938
1951
|
})
|
|
1939
|
-
.then(doc =>
|
|
1952
|
+
.then(doc => doc
|
|
1953
|
+
? next(step4({ _id: streamName, ts: l.ts }), 'restart')
|
|
1954
|
+
: step8({ ...l, nextRes: l.stream.tryNext() }))),
|
|
1940
1955
|
};
|
|
1941
1956
|
};
|
|
1942
1957
|
return stop;
|
package/index.js
CHANGED
|
@@ -831,12 +831,10 @@ const getWhenMatchedForMerge = (whenNotMatched) => {
|
|
|
831
831
|
};
|
|
832
832
|
const getWhenMatched = (whenNotMatched) => {
|
|
833
833
|
const orNull = (e) => whenNotMatched === 'discard' ? $ifNull(e, nil) : e;
|
|
834
|
-
const preMergeOld =
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
}))
|
|
839
|
-
: root().of('old').expr();
|
|
834
|
+
const preMergeOld = mergeObjects(root().of('old').expr(), field({
|
|
835
|
+
deletedAt: ['deletedAt', orNull(root().of('old').of('deletedAt').expr())],
|
|
836
|
+
touchedAt: ['touchedAt', root().of('merged').of('touchedAt').expr()],
|
|
837
|
+
}));
|
|
840
838
|
const same = eq(preMergeOld)(root().of('merged').expr());
|
|
841
839
|
return link().with($replaceWith_(ite(same, root().of('old').expr(), root().of('merged').expr()))).stages;
|
|
842
840
|
};
|
|
@@ -920,8 +918,11 @@ const subMerge = (args, out, gid, extra, idPrefix, first) => {
|
|
|
920
918
|
...F1,
|
|
921
919
|
...F2,
|
|
922
920
|
};
|
|
923
|
-
const
|
|
924
|
-
|
|
921
|
+
const addTSAndExtra = {
|
|
922
|
+
...mapExact0(e, to),
|
|
923
|
+
touchedAt: ['touchedAt', to(current)],
|
|
924
|
+
};
|
|
925
|
+
const updater = set()(addTSAndExtra);
|
|
925
926
|
const whenMatched = getWhenMatched(out.whenNotMatched);
|
|
926
927
|
return link()
|
|
927
928
|
.with($set_(set()(addExtraAndMerge)))
|
|
@@ -1762,16 +1763,23 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1762
1763
|
const step4 = ({ result, ts }) => async () => {
|
|
1763
1764
|
const start = Date.now();
|
|
1764
1765
|
await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
|
|
1766
|
+
const stages = finalInput.raw(ts === undefined);
|
|
1765
1767
|
const aggResult = await aggregate(streamName, c => c({
|
|
1766
1768
|
coll: snapshotCollection,
|
|
1767
1769
|
input: link()
|
|
1768
1770
|
.with($match_(root().of('updated').has($eq(true))))
|
|
1769
|
-
.with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
|
|
1770
1771
|
.with(input.delta)
|
|
1771
|
-
.with(
|
|
1772
|
+
.with(stages).stages,
|
|
1772
1773
|
}), false, start);
|
|
1773
1774
|
const stream = makeStream(result.cursor.atClusterTime);
|
|
1774
|
-
|
|
1775
|
+
const nextRes = stream.tryNext();
|
|
1776
|
+
const intoColl = stages.at(-1).$merge.into.coll;
|
|
1777
|
+
const startx = Date.now();
|
|
1778
|
+
await db
|
|
1779
|
+
.collection(intoColl)
|
|
1780
|
+
.countDocuments({ touchedAt: { $gte: result.cursor.atClusterTime } })
|
|
1781
|
+
.then(count => log(`documents updated ${intoColl}`, count, 'took', Date.now() - startx));
|
|
1782
|
+
return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes }), 'remove handled deleted updated', () => stream.close());
|
|
1775
1783
|
};
|
|
1776
1784
|
const step5 = (l) => async () => {
|
|
1777
1785
|
log(`remove handled deleted updated db['${snapshotCollection.collectionName}'].deleteMany({ updated: true, after: null })`);
|
|
@@ -1803,13 +1811,14 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1803
1811
|
return step8(l);
|
|
1804
1812
|
};
|
|
1805
1813
|
const step8 = (l) => {
|
|
1806
|
-
return nextData(l.aggResult.cursor.firstBatch)(() => l.
|
|
1807
|
-
.tryNext()
|
|
1814
|
+
return nextData(l.aggResult.cursor.firstBatch)(() => l.nextRes
|
|
1808
1815
|
.catch((err) => {
|
|
1809
1816
|
log('restarting', err);
|
|
1810
1817
|
return { ts: null };
|
|
1811
1818
|
})
|
|
1812
|
-
.then(doc =>
|
|
1819
|
+
.then(doc => doc
|
|
1820
|
+
? next(step3({ _id: streamName, ts: l.ts }), 'restart')
|
|
1821
|
+
: step8({ ...l, nextRes: l.stream.tryNext() })), 'wait for change');
|
|
1813
1822
|
};
|
|
1814
1823
|
return skip
|
|
1815
1824
|
? withStop(() => synchronousPromise.SynchronousPromise.resolve(next(step3(null), 'clone into new collection')))
|
|
@@ -1837,8 +1846,11 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1837
1846
|
else if (streamNames[streamName] != hash)
|
|
1838
1847
|
throw new Error('streamName already used');
|
|
1839
1848
|
const { collection, projection, hardMatch: pre, match } = view;
|
|
1840
|
-
const removeNotYetSynchronizedFields = projection &&
|
|
1841
|
-
|
|
1849
|
+
const removeNotYetSynchronizedFields = projection &&
|
|
1850
|
+
Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
|
|
1851
|
+
const hardMatch = removeNotYetSynchronizedFields
|
|
1852
|
+
? $and(pre, ...removeNotYetSynchronizedFields)
|
|
1853
|
+
: pre;
|
|
1842
1854
|
const job = {};
|
|
1843
1855
|
const db = collection.s.db, coll = collection.collectionName;
|
|
1844
1856
|
db.command({
|
|
@@ -1850,15 +1862,15 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1850
1862
|
name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
|
|
1851
1863
|
});
|
|
1852
1864
|
const last = db.collection('__last');
|
|
1853
|
-
const projectInput = projection &&
|
|
1854
|
-
|
|
1855
|
-
|
|
1856
|
-
|
|
1865
|
+
const projectInput = projection &&
|
|
1866
|
+
$project_(spread(projection, {
|
|
1867
|
+
deletedAt: ['deletedAt', 1],
|
|
1868
|
+
_id: ['_id', 1],
|
|
1869
|
+
}));
|
|
1857
1870
|
const notDeleted = root().of('deletedAt').has($eq(null));
|
|
1858
1871
|
const stages = (lastTS) => {
|
|
1859
1872
|
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1860
|
-
const ln = link()
|
|
1861
|
-
.with($match_(hardQuery));
|
|
1873
|
+
const ln = link().with($match_(hardQuery));
|
|
1862
1874
|
return (projectInput ? ln.with(projectInput) : ln).with(input);
|
|
1863
1875
|
};
|
|
1864
1876
|
const run = (finalInput) => {
|
|
@@ -1909,20 +1921,22 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1909
1921
|
return {
|
|
1910
1922
|
cont: withStop(async () => {
|
|
1911
1923
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1912
|
-
return step4(same)
|
|
1924
|
+
return next(step4(same), 'clone into new collection');
|
|
1913
1925
|
}),
|
|
1914
1926
|
data: [],
|
|
1915
|
-
info: { debug: 'clone into new collection', job: undefined },
|
|
1927
|
+
info: { debug: 'wait for clone into new collection', job: undefined },
|
|
1916
1928
|
};
|
|
1917
1929
|
};
|
|
1918
1930
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1919
1931
|
const step4 = (lastTS) => async () => {
|
|
1932
|
+
const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
|
|
1920
1933
|
const aggResult = await aggregate(streamName, c => c({
|
|
1921
1934
|
coll: collection,
|
|
1922
|
-
input:
|
|
1935
|
+
input: raw,
|
|
1923
1936
|
}));
|
|
1924
1937
|
const stream = makeStream(aggResult.cursor.atClusterTime);
|
|
1925
|
-
|
|
1938
|
+
const nextRes = stream.tryNext();
|
|
1939
|
+
return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
|
|
1926
1940
|
};
|
|
1927
1941
|
const step7 = (l) => async () => {
|
|
1928
1942
|
await last.updateOne({ _id: streamName }, { $set: { ts: l.ts, data } }, { upsert: true });
|
|
@@ -1932,13 +1946,14 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1932
1946
|
return {
|
|
1933
1947
|
data: l.aggResult.cursor.firstBatch,
|
|
1934
1948
|
info: { job: undefined, debug: 'wait for change' },
|
|
1935
|
-
cont: withStop(() => l.
|
|
1936
|
-
.tryNext()
|
|
1949
|
+
cont: withStop(() => l.nextRes
|
|
1937
1950
|
.catch((err) => {
|
|
1938
1951
|
log('restarting', err);
|
|
1939
1952
|
return { ts: null };
|
|
1940
1953
|
})
|
|
1941
|
-
.then(doc =>
|
|
1954
|
+
.then(doc => doc
|
|
1955
|
+
? next(step4({ _id: streamName, ts: l.ts }), 'restart')
|
|
1956
|
+
: step8({ ...l, nextRes: l.stream.tryNext() }))),
|
|
1942
1957
|
};
|
|
1943
1958
|
};
|
|
1944
1959
|
return stop;
|