@omegup/msync 0.1.14 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.esm.js +35 -21
  2. package/index.js +35 -21
  3. package/package.json +1 -1
package/index.esm.js CHANGED
@@ -1761,16 +1761,23 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1761
1761
  const step4 = ({ result, ts }) => async () => {
1762
1762
  const start = Date.now();
1763
1763
  await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
1764
+ const stages = finalInput.raw(ts === undefined);
1764
1765
  const aggResult = await aggregate(streamName, c => c({
1765
1766
  coll: snapshotCollection,
1766
1767
  input: link()
1767
1768
  .with($match_(root().of('updated').has($eq(true))))
1768
- .with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
1769
1769
  .with(input.delta)
1770
- .with(finalInput.raw(ts === undefined)).stages,
1770
+ .with(stages).stages,
1771
1771
  }), false, start);
1772
1772
  const stream = makeStream(result.cursor.atClusterTime);
1773
- return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream }), 'remove handled deleted updated', () => stream.close());
1773
+ const nextRes = stream.tryNext();
1774
+ const intoColl = stages.at(-1).$merge.into.coll;
1775
+ const startx = Date.now();
1776
+ await db
1777
+ .collection(intoColl)
1778
+ .countDocuments({ touchedAt: { $gte: result.cursor.atClusterTime } })
1779
+ .then(count => log(`documents updated ${intoColl}`, count, 'took', Date.now() - startx));
1780
+ return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes }), 'remove handled deleted updated', () => stream.close());
1774
1781
  };
1775
1782
  const step5 = (l) => async () => {
1776
1783
  log(`remove handled deleted updated db['${snapshotCollection.collectionName}'].deleteMany({ updated: true, after: null })`);
@@ -1802,13 +1809,14 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1802
1809
  return step8(l);
1803
1810
  };
1804
1811
  const step8 = (l) => {
1805
- return nextData(l.aggResult.cursor.firstBatch)(() => l.stream
1806
- .tryNext()
1812
+ return nextData(l.aggResult.cursor.firstBatch)(() => l.nextRes
1807
1813
  .catch((err) => {
1808
1814
  log('restarting', err);
1809
1815
  return { ts: null };
1810
1816
  })
1811
- .then(doc => (doc ? next(step3({ _id: streamName, ts: l.ts }), 'restart') : step8(l))), 'wait for change');
1817
+ .then(doc => doc
1818
+ ? next(step3({ _id: streamName, ts: l.ts }), 'restart')
1819
+ : step8({ ...l, nextRes: l.stream.tryNext() })), 'wait for change');
1812
1820
  };
1813
1821
  return skip
1814
1822
  ? withStop(() => SynchronousPromise.resolve(next(step3(null), 'clone into new collection')))
@@ -1836,8 +1844,11 @@ const executes$1 = (view, input, streamName, needs) => {
1836
1844
  else if (streamNames[streamName] != hash)
1837
1845
  throw new Error('streamName already used');
1838
1846
  const { collection, projection, hardMatch: pre, match } = view;
1839
- const removeNotYetSynchronizedFields = projection && Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
1840
- const hardMatch = removeNotYetSynchronizedFields ? $and(pre, ...removeNotYetSynchronizedFields) : pre;
1847
+ const removeNotYetSynchronizedFields = projection &&
1848
+ Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
1849
+ const hardMatch = removeNotYetSynchronizedFields
1850
+ ? $and(pre, ...removeNotYetSynchronizedFields)
1851
+ : pre;
1841
1852
  const job = {};
1842
1853
  const db = collection.s.db, coll = collection.collectionName;
1843
1854
  db.command({
@@ -1849,15 +1860,15 @@ const executes$1 = (view, input, streamName, needs) => {
1849
1860
  name: 'touchedAt_' + new UUID().toString('base64'),
1850
1861
  });
1851
1862
  const last = db.collection('__last');
1852
- const projectInput = projection && $project_(spread(projection, {
1853
- deletedAt: ['deletedAt', 1],
1854
- _id: ['_id', 1],
1855
- }));
1863
+ const projectInput = projection &&
1864
+ $project_(spread(projection, {
1865
+ deletedAt: ['deletedAt', 1],
1866
+ _id: ['_id', 1],
1867
+ }));
1856
1868
  const notDeleted = root().of('deletedAt').has($eq(null));
1857
1869
  const stages = (lastTS) => {
1858
1870
  const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
1859
- const ln = link()
1860
- .with($match_(hardQuery));
1871
+ const ln = link().with($match_(hardQuery));
1861
1872
  return (projectInput ? ln.with(projectInput) : ln).with(input);
1862
1873
  };
1863
1874
  const run = (finalInput) => {
@@ -1908,20 +1919,22 @@ const executes$1 = (view, input, streamName, needs) => {
1908
1919
  return {
1909
1920
  cont: withStop(async () => {
1910
1921
  await new Promise(resolve => setTimeout(resolve, 1000));
1911
- return step4(same)();
1922
+ return next(step4(same), 'clone into new collection');
1912
1923
  }),
1913
1924
  data: [],
1914
- info: { debug: 'clone into new collection', job: undefined },
1925
+ info: { debug: 'wait for clone into new collection', job: undefined },
1915
1926
  };
1916
1927
  };
1917
1928
  const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
1918
1929
  const step4 = (lastTS) => async () => {
1930
+ const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
1919
1931
  const aggResult = await aggregate(streamName, c => c({
1920
1932
  coll: collection,
1921
- input: stages(lastTS).with(finalInput.raw(lastTS === null)).stages,
1933
+ input: raw,
1922
1934
  }));
1923
1935
  const stream = makeStream(aggResult.cursor.atClusterTime);
1924
- return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream }), 'update __last', () => stream.close());
1936
+ const nextRes = stream.tryNext();
1937
+ return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
1925
1938
  };
1926
1939
  const step7 = (l) => async () => {
1927
1940
  await last.updateOne({ _id: streamName }, { $set: { ts: l.ts, data } }, { upsert: true });
@@ -1931,13 +1944,14 @@ const executes$1 = (view, input, streamName, needs) => {
1931
1944
  return {
1932
1945
  data: l.aggResult.cursor.firstBatch,
1933
1946
  info: { job: undefined, debug: 'wait for change' },
1934
- cont: withStop(() => l.stream
1935
- .tryNext()
1947
+ cont: withStop(() => l.nextRes
1936
1948
  .catch((err) => {
1937
1949
  log('restarting', err);
1938
1950
  return { ts: null };
1939
1951
  })
1940
- .then(doc => (doc ? next(step4({ _id: streamName, ts: l.ts }), 'restart') : step8(l)))),
1952
+ .then(doc => doc
1953
+ ? next(step4({ _id: streamName, ts: l.ts }), 'restart')
1954
+ : step8({ ...l, nextRes: l.stream.tryNext() }))),
1941
1955
  };
1942
1956
  };
1943
1957
  return stop;
package/index.js CHANGED
@@ -1763,16 +1763,23 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1763
1763
  const step4 = ({ result, ts }) => async () => {
1764
1764
  const start = Date.now();
1765
1765
  await snapshotCollection.updateMany({ before: null }, { $set: { before: null } });
1766
+ const stages = finalInput.raw(ts === undefined);
1766
1767
  const aggResult = await aggregate(streamName, c => c({
1767
1768
  coll: snapshotCollection,
1768
1769
  input: link()
1769
1770
  .with($match_(root().of('updated').has($eq(true))))
1770
- .with($match_($expr(ne(root().of('after').expr())(root().of('before').expr()))))
1771
1771
  .with(input.delta)
1772
- .with(finalInput.raw(ts === undefined)).stages,
1772
+ .with(stages).stages,
1773
1773
  }), false, start);
1774
1774
  const stream = makeStream(result.cursor.atClusterTime);
1775
- return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream }), 'remove handled deleted updated', () => stream.close());
1775
+ const nextRes = stream.tryNext();
1776
+ const intoColl = stages.at(-1).$merge.into.coll;
1777
+ const startx = Date.now();
1778
+ await db
1779
+ .collection(intoColl)
1780
+ .countDocuments({ touchedAt: { $gte: result.cursor.atClusterTime } })
1781
+ .then(count => log(`documents updated ${intoColl}`, count, 'took', Date.now() - startx));
1782
+ return next(step5({ ts: result.cursor.atClusterTime, aggResult, stream, nextRes }), 'remove handled deleted updated', () => stream.close());
1776
1783
  };
1777
1784
  const step5 = (l) => async () => {
1778
1785
  log(`remove handled deleted updated db['${snapshotCollection.collectionName}'].deleteMany({ updated: true, after: null })`);
@@ -1804,13 +1811,14 @@ const executes$2 = (view, input, streamName, skip = false, after, needs = {}) =>
1804
1811
  return step8(l);
1805
1812
  };
1806
1813
  const step8 = (l) => {
1807
- return nextData(l.aggResult.cursor.firstBatch)(() => l.stream
1808
- .tryNext()
1814
+ return nextData(l.aggResult.cursor.firstBatch)(() => l.nextRes
1809
1815
  .catch((err) => {
1810
1816
  log('restarting', err);
1811
1817
  return { ts: null };
1812
1818
  })
1813
- .then(doc => (doc ? next(step3({ _id: streamName, ts: l.ts }), 'restart') : step8(l))), 'wait for change');
1819
+ .then(doc => doc
1820
+ ? next(step3({ _id: streamName, ts: l.ts }), 'restart')
1821
+ : step8({ ...l, nextRes: l.stream.tryNext() })), 'wait for change');
1814
1822
  };
1815
1823
  return skip
1816
1824
  ? withStop(() => synchronousPromise.SynchronousPromise.resolve(next(step3(null), 'clone into new collection')))
@@ -1838,8 +1846,11 @@ const executes$1 = (view, input, streamName, needs) => {
1838
1846
  else if (streamNames[streamName] != hash)
1839
1847
  throw new Error('streamName already used');
1840
1848
  const { collection, projection, hardMatch: pre, match } = view;
1841
- const removeNotYetSynchronizedFields = projection && Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
1842
- const hardMatch = removeNotYetSynchronizedFields ? $and(pre, ...removeNotYetSynchronizedFields) : pre;
1849
+ const removeNotYetSynchronizedFields = projection &&
1850
+ Object.values(mapExactToObject(projection, (_, k) => (needs[k] ?? k.startsWith('_')) ? root().of(k).has($exists(true)) : null));
1851
+ const hardMatch = removeNotYetSynchronizedFields
1852
+ ? $and(pre, ...removeNotYetSynchronizedFields)
1853
+ : pre;
1843
1854
  const job = {};
1844
1855
  const db = collection.s.db, coll = collection.collectionName;
1845
1856
  db.command({
@@ -1851,15 +1862,15 @@ const executes$1 = (view, input, streamName, needs) => {
1851
1862
  name: 'touchedAt_' + new mongodb.UUID().toString('base64'),
1852
1863
  });
1853
1864
  const last = db.collection('__last');
1854
- const projectInput = projection && $project_(spread(projection, {
1855
- deletedAt: ['deletedAt', 1],
1856
- _id: ['_id', 1],
1857
- }));
1865
+ const projectInput = projection &&
1866
+ $project_(spread(projection, {
1867
+ deletedAt: ['deletedAt', 1],
1868
+ _id: ['_id', 1],
1869
+ }));
1858
1870
  const notDeleted = root().of('deletedAt').has($eq(null));
1859
1871
  const stages = (lastTS) => {
1860
1872
  const hardQuery = $and(lastTS && root().of('touchedAt').has($gteTs(lastTS.ts)), hardMatch, notDeleted, match && $expr(match));
1861
- const ln = link()
1862
- .with($match_(hardQuery));
1873
+ const ln = link().with($match_(hardQuery));
1863
1874
  return (projectInput ? ln.with(projectInput) : ln).with(input);
1864
1875
  };
1865
1876
  const run = (finalInput) => {
@@ -1910,20 +1921,22 @@ const executes$1 = (view, input, streamName, needs) => {
1910
1921
  return {
1911
1922
  cont: withStop(async () => {
1912
1923
  await new Promise(resolve => setTimeout(resolve, 1000));
1913
- return step4(same)();
1924
+ return next(step4(same), 'clone into new collection');
1914
1925
  }),
1915
1926
  data: [],
1916
- info: { debug: 'clone into new collection', job: undefined },
1927
+ info: { debug: 'wait for clone into new collection', job: undefined },
1917
1928
  };
1918
1929
  };
1919
1930
  const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
1920
1931
  const step4 = (lastTS) => async () => {
1932
+ const raw = stages(lastTS).with(finalInput.raw(lastTS === null)).stages;
1921
1933
  const aggResult = await aggregate(streamName, c => c({
1922
1934
  coll: collection,
1923
- input: stages(lastTS).with(finalInput.raw(lastTS === null)).stages,
1935
+ input: raw,
1924
1936
  }));
1925
1937
  const stream = makeStream(aggResult.cursor.atClusterTime);
1926
- return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream }), 'update __last', () => stream.close());
1938
+ const nextRes = stream.tryNext();
1939
+ return next(step7({ aggResult, ts: aggResult.cursor.atClusterTime, stream, nextRes }), 'update __last', () => stream.close());
1927
1940
  };
1928
1941
  const step7 = (l) => async () => {
1929
1942
  await last.updateOne({ _id: streamName }, { $set: { ts: l.ts, data } }, { upsert: true });
@@ -1933,13 +1946,14 @@ const executes$1 = (view, input, streamName, needs) => {
1933
1946
  return {
1934
1947
  data: l.aggResult.cursor.firstBatch,
1935
1948
  info: { job: undefined, debug: 'wait for change' },
1936
- cont: withStop(() => l.stream
1937
- .tryNext()
1949
+ cont: withStop(() => l.nextRes
1938
1950
  .catch((err) => {
1939
1951
  log('restarting', err);
1940
1952
  return { ts: null };
1941
1953
  })
1942
- .then(doc => (doc ? next(step4({ _id: streamName, ts: l.ts }), 'restart') : step8(l)))),
1954
+ .then(doc => doc
1955
+ ? next(step4({ _id: streamName, ts: l.ts }), 'restart')
1956
+ : step8({ ...l, nextRes: l.stream.tryNext() }))),
1943
1957
  };
1944
1958
  };
1945
1959
  return stop;
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "module": "index.esm.js",
4
4
  "typings": "index.d.ts",
5
5
  "name": "@omegup/msync",
6
- "version": "0.1.14",
6
+ "version": "0.1.15",
7
7
  "dependencies": {
8
8
  "dayjs": "^1.11.9",
9
9
  "dotenv": "^16.3.1",