@omegup/msync 0.1.14 → 0.1.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.esm.js +80 -45
- package/index.js +80 -45
- package/package.json +1 -1
package/index.esm.js
CHANGED
|
@@ -1331,13 +1331,50 @@ const mergeIterators = (params) => {
|
|
|
1331
1331
|
};
|
|
1332
1332
|
};
|
|
1333
1333
|
|
|
1334
|
+
const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
|
|
1335
|
+
const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
|
|
1336
|
+
const json = (a) => replace(JSON.stringify(a));
|
|
1337
|
+
const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
|
|
1338
|
+
|
|
1339
|
+
const state = { steady: false };
|
|
1340
|
+
let timeout = null;
|
|
1341
|
+
const aggregate = (streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
|
|
1342
|
+
const req = {
|
|
1343
|
+
aggregate: coll.collectionName,
|
|
1344
|
+
pipeline: input,
|
|
1345
|
+
cursor: {},
|
|
1346
|
+
...(snapshot && { readConcern: { level: 'snapshot' } }),
|
|
1347
|
+
};
|
|
1348
|
+
if (timeout !== null) {
|
|
1349
|
+
clearTimeout(timeout);
|
|
1350
|
+
timeout = null;
|
|
1351
|
+
}
|
|
1352
|
+
log('exec', streamName, req);
|
|
1353
|
+
return coll.s.db.command(req).then(result => {
|
|
1354
|
+
const r = result;
|
|
1355
|
+
log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('$$CLUSTER_TIME', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
|
|
1356
|
+
if (!state.steady) {
|
|
1357
|
+
if (timeout !== null)
|
|
1358
|
+
throw new Error('timeout should be null');
|
|
1359
|
+
timeout = setTimeout(() => {
|
|
1360
|
+
state.steady = true;
|
|
1361
|
+
console.log('steady');
|
|
1362
|
+
}, 1000);
|
|
1363
|
+
}
|
|
1364
|
+
return r;
|
|
1365
|
+
}, err => {
|
|
1366
|
+
log('err', req, err);
|
|
1367
|
+
throw new Error(err);
|
|
1368
|
+
});
|
|
1369
|
+
});
|
|
1370
|
+
|
|
1334
1371
|
const firstWorksMerge = (iters) => {
|
|
1335
1372
|
const iterator = () => {
|
|
1336
1373
|
const results = iters.map(iter => iter());
|
|
1337
1374
|
const sources = { ...results };
|
|
1338
1375
|
return mergeIterators({
|
|
1339
1376
|
sources,
|
|
1340
|
-
interrupt: key =>
|
|
1377
|
+
interrupt: key => state.steady
|
|
1341
1378
|
});
|
|
1342
1379
|
};
|
|
1343
1380
|
return iterator;
|
|
@@ -1501,29 +1538,6 @@ const $insertPart = (out, ext) => {
|
|
|
1501
1538
|
const $insert = (out) => $insertPart(out, {});
|
|
1502
1539
|
const assertNotNull = (expr) => expr;
|
|
1503
1540
|
|
|
1504
|
-
const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
|
|
1505
|
-
const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
|
|
1506
|
-
const json = (a) => replace(JSON.stringify(a));
|
|
1507
|
-
const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
|
|
1508
|
-
|
|
1509
|
-
const aggregate = (streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
|
|
1510
|
-
const req = {
|
|
1511
|
-
aggregate: coll.collectionName,
|
|
1512
|
-
pipeline: input,
|
|
1513
|
-
cursor: {},
|
|
1514
|
-
...(snapshot && { readConcern: { level: 'snapshot' } }),
|
|
1515
|
-
};
|
|
1516
|
-
log('exec', streamName, req);
|
|
1517
|
-
return coll.s.db.command(req).then(result => {
|
|
1518
|
-
const r = result;
|
|
1519
|
-
log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('$$CLUSTER_TIME', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
|
|
1520
|
-
return r;
|
|
1521
|
-
}, err => {
|
|
1522
|
-
log('err', req, err);
|
|
1523
|
-
throw new Error(err);
|
|
1524
|
-
});
|
|
1525
|
-
});
|
|
1526
|
-
|
|
1527
1541
|
const addTeardown = (it, tr) => {
|
|
1528
1542
|
if (!tr)
|
|
1529
1543
|
return it;
|
|
@@ -1755,22 +1769,32 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1755
1769
|
})).stages;
|
|
1756
1770
|
const r = await aggregate(streamName, c => c({ coll: collection, input: cloneIntoNew }));
|
|
1757
1771
|
await snapshotCollection.deleteMany({ updated: true, after: null, before: null });
|
|
1772
|
+
const start = Date.now();
|
|
1773
|
+
const count = await snapshotCollection.countDocuments({ updated: true });
|
|
1774
|
+
log(streamName, `documents to update ${snapshotCollection.collectionName}`, count, 'took', Date.now() - start);
|
|
1758
1775
|
return next(step4({ result: r, ts: lastTS?.ts }), 'run the aggregation');
|
|
1759
1776
|
};
|
|
1760
1777
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1761
1778
|
const step4 = ({ result, ts }) => async () => {
|
|
1762
1779
|
const start = Date.now();
|
|
1763
1780
|
await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
|
|
1781
|
+
const stages = finalInput.raw(ts === undefined);
|
|
1764
1782
|
const aggResult = await aggregate(streamName, c => c({
|
|
1765
1783
|
coll: snapshotCollection,
|
|
1766
1784
|
input: link()
|
|
1767
1785
|
.with($match_(root().of('updated').has($eq(true))))
|
|
1768
|
-
.with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
|
|
1769
1786
|
.with(input.delta)
|
|
1770
|
-
.with(
|
|
1787
|
+
.with(stages).stages,
|
|
1771
1788
|
}), false, start);
|
|
1772
1789
|
const stream = makeStream(result.cursor.atClusterTime);
|
|
1773
|
-
|
|
1790
|
+
const nextRes = stream.tryNext();
|
|
1791
|
+
const intoColl = stages.at(-1).$merge.into.coll;
|
|
1792
|
+
const startx = Date.now();
|
|
1793
|
+
await db
|
|
1794
|
+
.collection(intoColl)
|
|
1795
|
+
.countDocuments({ touchedAt: { $gte: result.cursor.atClusterTime } })
|
|
1796
|
+
.then(count => log(`documents updated ${intoColl}`, count, 'took', Date.now() - startx));
|
|
1797
|
+
return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes }), 'remove handled deleted updated', () => stream.close());
|
|
1774
1798
|
};
|
|
1775
1799
|
const step5 = (l) => async () => {
|
|
1776
1800
|
log(`remove handled deleted updated db['${snapshotCollection.collectionName}'].deleteMany({ updated: true, after: null })`);
|
|
@@ -1802,13 +1826,14 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1802
1826
|
return step8(l);
|
|
1803
1827
|
};
|
|
1804
1828
|
const step8 = (l) => {
|
|
1805
|
-
return nextData(l.aggResult.cursor.firstBatch)(() => l.
|
|
1806
|
-
.tryNext()
|
|
1829
|
+
return nextData(l.aggResult.cursor.firstBatch)(() => l.nextRes
|
|
1807
1830
|
.catch((err) => {
|
|
1808
1831
|
log('restarting', err);
|
|
1809
1832
|
return { ts: null };
|
|
1810
1833
|
})
|
|
1811
|
-
.then(doc =>
|
|
1834
|
+
.then(doc => doc
|
|
1835
|
+
? next(step3({ _id: streamName, ts: l.ts }), 'restart')
|
|
1836
|
+
: step8({ ...l, nextRes: l.stream.tryNext() })), 'wait for change');
|
|
1812
1837
|
};
|
|
1813
1838
|
return skip
|
|
1814
1839
|
? withStop(() => SynchronousPromise.resolve(next(step3(null), 'clone into new collection')))
|
|
@@ -1836,8 +1861,11 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1836
1861
|
else if (streamNames[streamName] != hash)
|
|
1837
1862
|
throw new Error('streamName already used');
|
|
1838
1863
|
const { collection, projection, hardMatch: pre, match } = view;
|
|
1839
|
-
const removeNotYetSynchronizedFields = projection &&
|
|
1840
|
-
|
|
1864
|
+
const removeNotYetSynchronizedFields = projection &&
|
|
1865
|
+
Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
|
|
1866
|
+
const hardMatch = removeNotYetSynchronizedFields
|
|
1867
|
+
? $and(pre, ...removeNotYetSynchronizedFields)
|
|
1868
|
+
: pre;
|
|
1841
1869
|
const job = {};
|
|
1842
1870
|
const db = collection.s.db, coll = collection.collectionName;
|
|
1843
1871
|
db.command({
|
|
@@ -1849,15 +1877,15 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1849
1877
|
name: 'touchedAt_' + new UUID().toString('base64'),
|
|
1850
1878
|
});
|
|
1851
1879
|
const last = db.collection('__last');
|
|
1852
|
-
const projectInput = projection &&
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
|
|
1880
|
+
const projectInput = projection &&
|
|
1881
|
+
$project_(spread(projection, {
|
|
1882
|
+
deletedAt: ['deletedAt', 1],
|
|
1883
|
+
_id: ['_id', 1],
|
|
1884
|
+
}));
|
|
1856
1885
|
const notDeleted = root().of('deletedAt').has($eq(null));
|
|
1857
1886
|
const stages = (lastTS) => {
|
|
1858
1887
|
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1859
|
-
const ln = link()
|
|
1860
|
-
.with($match_(hardQuery));
|
|
1888
|
+
const ln = link().with($match_(hardQuery));
|
|
1861
1889
|
return (projectInput ? ln.with(projectInput) : ln).with(input);
|
|
1862
1890
|
};
|
|
1863
1891
|
const run = (finalInput) => {
|
|
@@ -1908,20 +1936,26 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1908
1936
|
return {
|
|
1909
1937
|
cont: withStop(async () => {
|
|
1910
1938
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1911
|
-
return step4(same)
|
|
1939
|
+
return next(step4(same), 'clone into new collection');
|
|
1912
1940
|
}),
|
|
1913
1941
|
data: [],
|
|
1914
|
-
info: { debug: 'clone into new collection', job: undefined },
|
|
1942
|
+
info: { debug: 'wait for clone into new collection', job: undefined },
|
|
1915
1943
|
};
|
|
1916
1944
|
};
|
|
1917
1945
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1918
1946
|
const step4 = (lastTS) => async () => {
|
|
1947
|
+
const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
|
|
1919
1948
|
const aggResult = await aggregate(streamName, c => c({
|
|
1920
1949
|
coll: collection,
|
|
1921
|
-
input:
|
|
1950
|
+
input: raw,
|
|
1922
1951
|
}));
|
|
1923
1952
|
const stream = makeStream(aggResult.cursor.atClusterTime);
|
|
1924
|
-
|
|
1953
|
+
const nextRes = stream.tryNext();
|
|
1954
|
+
const intoColl = raw.at(-1).$merge.into.coll;
|
|
1955
|
+
db.collection(intoColl)
|
|
1956
|
+
.countDocuments({ touchedAt: { $gte: aggResult.cursor.atClusterTime } })
|
|
1957
|
+
.then(count => log(`documents updated ${intoColl}`, count));
|
|
1958
|
+
return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
|
|
1925
1959
|
};
|
|
1926
1960
|
const step7 = (l) => async () => {
|
|
1927
1961
|
await last.updateOne({ _id: streamName }, { $set: { ts: l.ts, data } }, { upsert: true });
|
|
@@ -1931,13 +1965,14 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1931
1965
|
return {
|
|
1932
1966
|
data: l.aggResult.cursor.firstBatch,
|
|
1933
1967
|
info: { job: undefined, debug: 'wait for change' },
|
|
1934
|
-
cont: withStop(() => l.
|
|
1935
|
-
.tryNext()
|
|
1968
|
+
cont: withStop(() => l.nextRes
|
|
1936
1969
|
.catch((err) => {
|
|
1937
1970
|
log('restarting', err);
|
|
1938
1971
|
return { ts: null };
|
|
1939
1972
|
})
|
|
1940
|
-
.then(doc =>
|
|
1973
|
+
.then(doc => doc
|
|
1974
|
+
? next(step4({ _id: streamName, ts: l.ts }), 'restart')
|
|
1975
|
+
: step8({ ...l, nextRes: l.stream.tryNext() }))),
|
|
1941
1976
|
};
|
|
1942
1977
|
};
|
|
1943
1978
|
return stop;
|
package/index.js
CHANGED
|
@@ -1333,13 +1333,50 @@ const mergeIterators = (params) => {
|
|
|
1333
1333
|
};
|
|
1334
1334
|
};
|
|
1335
1335
|
|
|
1336
|
+
const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
|
|
1337
|
+
const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
|
|
1338
|
+
const json = (a) => replace(JSON.stringify(a));
|
|
1339
|
+
const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
|
|
1340
|
+
|
|
1341
|
+
const state = { steady: false };
|
|
1342
|
+
let timeout = null;
|
|
1343
|
+
const aggregate = (streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
|
|
1344
|
+
const req = {
|
|
1345
|
+
aggregate: coll.collectionName,
|
|
1346
|
+
pipeline: input,
|
|
1347
|
+
cursor: {},
|
|
1348
|
+
...(snapshot && { readConcern: { level: 'snapshot' } }),
|
|
1349
|
+
};
|
|
1350
|
+
if (timeout !== null) {
|
|
1351
|
+
clearTimeout(timeout);
|
|
1352
|
+
timeout = null;
|
|
1353
|
+
}
|
|
1354
|
+
log('exec', streamName, req);
|
|
1355
|
+
return coll.s.db.command(req).then(result => {
|
|
1356
|
+
const r = result;
|
|
1357
|
+
log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('$$CLUSTER_TIME', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
|
|
1358
|
+
if (!state.steady) {
|
|
1359
|
+
if (timeout !== null)
|
|
1360
|
+
throw new Error('timeout should be null');
|
|
1361
|
+
timeout = setTimeout(() => {
|
|
1362
|
+
state.steady = true;
|
|
1363
|
+
console.log('steady');
|
|
1364
|
+
}, 1000);
|
|
1365
|
+
}
|
|
1366
|
+
return r;
|
|
1367
|
+
}, err => {
|
|
1368
|
+
log('err', req, err);
|
|
1369
|
+
throw new Error(err);
|
|
1370
|
+
});
|
|
1371
|
+
});
|
|
1372
|
+
|
|
1336
1373
|
const firstWorksMerge = (iters) => {
|
|
1337
1374
|
const iterator = () => {
|
|
1338
1375
|
const results = iters.map(iter => iter());
|
|
1339
1376
|
const sources = { ...results };
|
|
1340
1377
|
return mergeIterators({
|
|
1341
1378
|
sources,
|
|
1342
|
-
interrupt: key =>
|
|
1379
|
+
interrupt: key => state.steady
|
|
1343
1380
|
});
|
|
1344
1381
|
};
|
|
1345
1382
|
return iterator;
|
|
@@ -1503,29 +1540,6 @@ const $insertPart = (out, ext) => {
|
|
|
1503
1540
|
const $insert = (out) => $insertPart(out, {});
|
|
1504
1541
|
const assertNotNull = (expr) => expr;
|
|
1505
1542
|
|
|
1506
|
-
const T = (s) => `Timestamp(${parseInt(`${BigInt(s) / 2n ** 32n}`)}, ${parseInt(`${BigInt(s) % 2n ** 32n}`)})`;
|
|
1507
|
-
const replace = (s) => s.replace(/\{"\$timestamp":"(\d+)"\}/g, (_, d) => T(d));
|
|
1508
|
-
const json = (a) => replace(JSON.stringify(a));
|
|
1509
|
-
const log = (...args) => console.log(new Date(), ...args.map(a => (typeof a === 'function' ? a(replace) : a && typeof a === 'object' ? json(a) : a)));
|
|
1510
|
-
|
|
1511
|
-
const aggregate = (streamName, input, snapshot = true, start = Date.now()) => input(({ coll, input }) => {
|
|
1512
|
-
const req = {
|
|
1513
|
-
aggregate: coll.collectionName,
|
|
1514
|
-
pipeline: input,
|
|
1515
|
-
cursor: {},
|
|
1516
|
-
...(snapshot && { readConcern: { level: 'snapshot' } }),
|
|
1517
|
-
};
|
|
1518
|
-
log('exec', streamName, req);
|
|
1519
|
-
return coll.s.db.command(req).then(result => {
|
|
1520
|
-
const r = result;
|
|
1521
|
-
log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('$$CLUSTER_TIME', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
|
|
1522
|
-
return r;
|
|
1523
|
-
}, err => {
|
|
1524
|
-
log('err', req, err);
|
|
1525
|
-
throw new Error(err);
|
|
1526
|
-
});
|
|
1527
|
-
});
|
|
1528
|
-
|
|
1529
1543
|
const addTeardown = (it, tr) => {
|
|
1530
1544
|
if (!tr)
|
|
1531
1545
|
return it;
|
|
@@ -1757,22 +1771,32 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1757
1771
|
})).stages;
|
|
1758
1772
|
const r = await aggregate(streamName, c => c({ coll: collection, input: cloneIntoNew }));
|
|
1759
1773
|
await snapshotCollection.deleteMany({ updated: true, after: null, before: null });
|
|
1774
|
+
const start = Date.now();
|
|
1775
|
+
const count = await snapshotCollection.countDocuments({ updated: true });
|
|
1776
|
+
log(streamName, `documents to update ${snapshotCollection.collectionName}`, count, 'took', Date.now() - start);
|
|
1760
1777
|
return next(step4({ result: r, ts: lastTS?.ts }), 'run the aggregation');
|
|
1761
1778
|
};
|
|
1762
1779
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1763
1780
|
const step4 = ({ result, ts }) => async () => {
|
|
1764
1781
|
const start = Date.now();
|
|
1765
1782
|
await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
|
|
1783
|
+
const stages = finalInput.raw(ts === undefined);
|
|
1766
1784
|
const aggResult = await aggregate(streamName, c => c({
|
|
1767
1785
|
coll: snapshotCollection,
|
|
1768
1786
|
input: link()
|
|
1769
1787
|
.with($match_(root().of('updated').has($eq(true))))
|
|
1770
|
-
.with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
|
|
1771
1788
|
.with(input.delta)
|
|
1772
|
-
.with(
|
|
1789
|
+
.with(stages).stages,
|
|
1773
1790
|
}), false, start);
|
|
1774
1791
|
const stream = makeStream(result.cursor.atClusterTime);
|
|
1775
|
-
|
|
1792
|
+
const nextRes = stream.tryNext();
|
|
1793
|
+
const intoColl = stages.at(-1).$merge.into.coll;
|
|
1794
|
+
const startx = Date.now();
|
|
1795
|
+
await db
|
|
1796
|
+
.collection(intoColl)
|
|
1797
|
+
.countDocuments({ touchedAt: { $gte: result.cursor.atClusterTime } })
|
|
1798
|
+
.then(count => log(`documents updated ${intoColl}`, count, 'took', Date.now() - startx));
|
|
1799
|
+
return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes }), 'remove handled deleted updated', () => stream.close());
|
|
1776
1800
|
};
|
|
1777
1801
|
const step5 = (l) => async () => {
|
|
1778
1802
|
log(`remove handled deleted updated db['${snapshotCollection.collectionName}'].deleteMany({ updated: true, after: null })`);
|
|
@@ -1804,13 +1828,14 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
|
|
|
1804
1828
|
return step8(l);
|
|
1805
1829
|
};
|
|
1806
1830
|
const step8 = (l) => {
|
|
1807
|
-
return nextData(l.aggResult.cursor.firstBatch)(() => l.
|
|
1808
|
-
.tryNext()
|
|
1831
|
+
return nextData(l.aggResult.cursor.firstBatch)(() => l.nextRes
|
|
1809
1832
|
.catch((err) => {
|
|
1810
1833
|
log('restarting', err);
|
|
1811
1834
|
return { ts: null };
|
|
1812
1835
|
})
|
|
1813
|
-
.then(doc =>
|
|
1836
|
+
.then(doc => doc
|
|
1837
|
+
? next(step3({ _id: streamName, ts: l.ts }), 'restart')
|
|
1838
|
+
: step8({ ...l, nextRes: l.stream.tryNext() })), 'wait for change');
|
|
1814
1839
|
};
|
|
1815
1840
|
return skip
|
|
1816
1841
|
? withStop(() => synchronousPromise.SynchronousPromise.resolve(next(step3(null), 'clone into new collection')))
|
|
@@ -1838,8 +1863,11 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1838
1863
|
else if (streamNames[streamName] != hash)
|
|
1839
1864
|
throw new Error('streamName already used');
|
|
1840
1865
|
const { collection, projection, hardMatch: pre, match } = view;
|
|
1841
|
-
const removeNotYetSynchronizedFields = projection &&
|
|
1842
|
-
|
|
1866
|
+
const removeNotYetSynchronizedFields = projection &&
|
|
1867
|
+
Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
|
|
1868
|
+
const hardMatch = removeNotYetSynchronizedFields
|
|
1869
|
+
? $and(pre, ...removeNotYetSynchronizedFields)
|
|
1870
|
+
: pre;
|
|
1843
1871
|
const job = {};
|
|
1844
1872
|
const db = collection.s.db, coll = collection.collectionName;
|
|
1845
1873
|
db.command({
|
|
@@ -1851,15 +1879,15 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1851
1879
|
name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
|
|
1852
1880
|
});
|
|
1853
1881
|
const last = db.collection('__last');
|
|
1854
|
-
const projectInput = projection &&
|
|
1855
|
-
|
|
1856
|
-
|
|
1857
|
-
|
|
1882
|
+
const projectInput = projection &&
|
|
1883
|
+
$project_(spread(projection, {
|
|
1884
|
+
deletedAt: ['deletedAt', 1],
|
|
1885
|
+
_id: ['_id', 1],
|
|
1886
|
+
}));
|
|
1858
1887
|
const notDeleted = root().of('deletedAt').has($eq(null));
|
|
1859
1888
|
const stages = (lastTS) => {
|
|
1860
1889
|
const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
|
|
1861
|
-
const ln = link()
|
|
1862
|
-
.with($match_(hardQuery));
|
|
1890
|
+
const ln = link().with($match_(hardQuery));
|
|
1863
1891
|
return (projectInput ? ln.with(projectInput) : ln).with(input);
|
|
1864
1892
|
};
|
|
1865
1893
|
const run = (finalInput) => {
|
|
@@ -1910,20 +1938,26 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1910
1938
|
return {
|
|
1911
1939
|
cont: withStop(async () => {
|
|
1912
1940
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1913
|
-
return step4(same)
|
|
1941
|
+
return next(step4(same), 'clone into new collection');
|
|
1914
1942
|
}),
|
|
1915
1943
|
data: [],
|
|
1916
|
-
info: { debug: 'clone into new collection', job: undefined },
|
|
1944
|
+
info: { debug: 'wait for clone into new collection', job: undefined },
|
|
1917
1945
|
};
|
|
1918
1946
|
};
|
|
1919
1947
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1920
1948
|
const step4 = (lastTS) => async () => {
|
|
1949
|
+
const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
|
|
1921
1950
|
const aggResult = await aggregate(streamName, c => c({
|
|
1922
1951
|
coll: collection,
|
|
1923
|
-
input:
|
|
1952
|
+
input: raw,
|
|
1924
1953
|
}));
|
|
1925
1954
|
const stream = makeStream(aggResult.cursor.atClusterTime);
|
|
1926
|
-
|
|
1955
|
+
const nextRes = stream.tryNext();
|
|
1956
|
+
const intoColl = raw.at(-1).$merge.into.coll;
|
|
1957
|
+
db.collection(intoColl)
|
|
1958
|
+
.countDocuments({ touchedAt: { $gte: aggResult.cursor.atClusterTime } })
|
|
1959
|
+
.then(count => log(`documents updated ${intoColl}`, count));
|
|
1960
|
+
return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
|
|
1927
1961
|
};
|
|
1928
1962
|
const step7 = (l) => async () => {
|
|
1929
1963
|
await last.updateOne({ _id: streamName }, { $set: { ts: l.ts, data } }, { upsert: true });
|
|
@@ -1933,13 +1967,14 @@ const executes$1 = (view, input, streamName, needs) => {
|
|
|
1933
1967
|
return {
|
|
1934
1968
|
data: l.aggResult.cursor.firstBatch,
|
|
1935
1969
|
info: { job: undefined, debug: 'wait for change' },
|
|
1936
|
-
cont: withStop(() => l.
|
|
1937
|
-
.tryNext()
|
|
1970
|
+
cont: withStop(() => l.nextRes
|
|
1938
1971
|
.catch((err) => {
|
|
1939
1972
|
log('restarting', err);
|
|
1940
1973
|
return { ts: null };
|
|
1941
1974
|
})
|
|
1942
|
-
.then(doc =>
|
|
1975
|
+
.then(doc => doc
|
|
1976
|
+
? next(step4({ _id: streamName, ts: l.ts }), 'restart')
|
|
1977
|
+
: step8({ ...l, nextRes: l.stream.tryNext() }))),
|
|
1943
1978
|
};
|
|
1944
1979
|
};
|
|
1945
1980
|
return stop;
|