@omegup/msync 0.0.53 → 0.0.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.esm.js +41 -33
- package/index.js +41 -33
- package/package.json +1 -1
package/index.esm.js
CHANGED
|
@@ -1113,7 +1113,7 @@ const createIndex = async (collection, indexSpec, options) => {
|
|
|
1113
1113
|
await collection.createIndex(indexSpec, options);
|
|
1114
1114
|
}
|
|
1115
1115
|
catch (e) {
|
|
1116
|
-
if (e.code
|
|
1116
|
+
if ([85, 276].includes(e.code)) {
|
|
1117
1117
|
break;
|
|
1118
1118
|
}
|
|
1119
1119
|
if (e.code == 12587) {
|
|
@@ -1392,8 +1392,9 @@ const aggregate = (streamName, input, snapshot = true, start = Date.now()) => in
|
|
|
1392
1392
|
};
|
|
1393
1393
|
log('exec', streamName, req);
|
|
1394
1394
|
return coll.s.db.command(req).then(result => {
|
|
1395
|
-
|
|
1396
|
-
|
|
1395
|
+
const r = result;
|
|
1396
|
+
log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('$$CLUSTER_TIME', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
|
|
1397
|
+
return r;
|
|
1397
1398
|
}, err => {
|
|
1398
1399
|
log('err', req, err);
|
|
1399
1400
|
throw new Error(err);
|
|
@@ -1444,23 +1445,21 @@ const makeWatchStream = (db, { collection, projection: p, hardMatch: m }, startA
|
|
|
1444
1445
|
},
|
|
1445
1446
|
});
|
|
1446
1447
|
pipeline.push({
|
|
1447
|
-
$
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
then: '$clusterTime',
|
|
1458
|
-
else: null,
|
|
1459
|
-
},
|
|
1460
|
-
},
|
|
1448
|
+
$match: {
|
|
1449
|
+
$or: [
|
|
1450
|
+
{ $expr: { $ne: ['$fullDocument', '$fullDocumentBeforeChange'] } },
|
|
1451
|
+
Object.fromEntries(changeKeys.map(k => [k, null])),
|
|
1452
|
+
],
|
|
1453
|
+
},
|
|
1454
|
+
});
|
|
1455
|
+
pipeline.push({
|
|
1456
|
+
$project: {
|
|
1457
|
+
_id: 1,
|
|
1461
1458
|
},
|
|
1462
1459
|
});
|
|
1463
|
-
const stream = db
|
|
1460
|
+
const stream = db
|
|
1461
|
+
.collection(collection.collectionName)
|
|
1462
|
+
.watch(pipeline, {
|
|
1464
1463
|
fullDocument: 'required',
|
|
1465
1464
|
fullDocumentBeforeChange: 'required',
|
|
1466
1465
|
startAtOperationTime: startAt,
|
|
@@ -1536,6 +1535,8 @@ const executes$1 = (view, input, streamName) => {
|
|
|
1536
1535
|
input: input.delta,
|
|
1537
1536
|
finalInputFirst: finalInput.raw(true),
|
|
1538
1537
|
finalInput: finalInput.raw(false),
|
|
1538
|
+
match: view.match?.raw(root()).get(),
|
|
1539
|
+
project: projection,
|
|
1539
1540
|
teardown: finalInput.teardown((x) => ({
|
|
1540
1541
|
collection: x.collection.collectionName,
|
|
1541
1542
|
method: x.method,
|
|
@@ -1571,13 +1572,17 @@ const executes$1 = (view, input, streamName) => {
|
|
|
1571
1572
|
await Promise.all([snapshotCollection.drop(), action]);
|
|
1572
1573
|
log('teardown done', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
|
|
1573
1574
|
};
|
|
1574
|
-
if (exists && !same)
|
|
1575
|
+
if (exists && !same) {
|
|
1575
1576
|
await handleTeardown(exists);
|
|
1576
|
-
|
|
1577
|
+
}
|
|
1578
|
+
return nextData([])(async () => {
|
|
1579
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1580
|
+
return step3(same)();
|
|
1581
|
+
}, 'clone into new collection');
|
|
1577
1582
|
};
|
|
1578
1583
|
const step3 = (lastTS) => async () => {
|
|
1579
1584
|
const hardQuery = $and(lastTS
|
|
1580
|
-
? root().of('touchedAt').has($
|
|
1585
|
+
? root().of('touchedAt').has($gtTs(lastTS.ts))
|
|
1581
1586
|
: root().of('deletedAt').has($eq(null)), lastTS ? null : match && $expr(match), hardMatch);
|
|
1582
1587
|
const notDeleted = eq($ifNull(root().of('deletedAt').expr(), nil))(nil);
|
|
1583
1588
|
const query = match ? and(notDeleted, match) : notDeleted;
|
|
@@ -1655,11 +1660,7 @@ const executes$1 = (view, input, streamName) => {
|
|
|
1655
1660
|
log('restarting', err);
|
|
1656
1661
|
return { ts: null };
|
|
1657
1662
|
})
|
|
1658
|
-
.then(doc => doc
|
|
1659
|
-
? doc.ts
|
|
1660
|
-
? next(step7({ ...l, ts: doc.ts }), 'nothing changed')
|
|
1661
|
-
: next(step2, 'restart')
|
|
1662
|
-
: step8(l)), 'wait for change');
|
|
1663
|
+
.then(doc => (doc ? next(step3({ _id: streamName, ts: l.ts }), 'restart') : step8(l))), 'wait for change');
|
|
1663
1664
|
};
|
|
1664
1665
|
return stop;
|
|
1665
1666
|
};
|
|
@@ -1684,7 +1685,9 @@ const executes = (view, input, streamName) => {
|
|
|
1684
1685
|
streamNames[streamName] = hash;
|
|
1685
1686
|
else if (streamNames[streamName] != hash)
|
|
1686
1687
|
throw new Error('streamName already used');
|
|
1687
|
-
const { collection, projection, hardMatch, match } = view;
|
|
1688
|
+
const { collection, projection, hardMatch: pre, match } = view;
|
|
1689
|
+
const removeNotYetSynchronizedFields = Object.values(mapExactToObject(projection, (_, k) => k.startsWith('_') ? root().of(k).has($exists(true)) : null));
|
|
1690
|
+
const hardMatch = $and(pre, ...removeNotYetSynchronizedFields);
|
|
1688
1691
|
const job = {};
|
|
1689
1692
|
const db = collection.s.db, coll = collection.collectionName;
|
|
1690
1693
|
db.command({
|
|
@@ -1715,6 +1718,8 @@ const executes = (view, input, streamName) => {
|
|
|
1715
1718
|
input: input,
|
|
1716
1719
|
finalInputFirst: finalInput.raw(true),
|
|
1717
1720
|
finalInput: finalInput.raw(false),
|
|
1721
|
+
match: view.match?.raw(root()).get(),
|
|
1722
|
+
project: projection,
|
|
1718
1723
|
teardown: finalInput.teardown((x) => ({
|
|
1719
1724
|
collection: x.collection.collectionName,
|
|
1720
1725
|
method: x.method,
|
|
@@ -1744,7 +1749,14 @@ const executes = (view, input, streamName) => {
|
|
|
1744
1749
|
};
|
|
1745
1750
|
if (exists && !same)
|
|
1746
1751
|
await handleTeardown(exists);
|
|
1747
|
-
return
|
|
1752
|
+
return {
|
|
1753
|
+
cont: withStop(async () => {
|
|
1754
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1755
|
+
return step4(same)();
|
|
1756
|
+
}),
|
|
1757
|
+
data: [],
|
|
1758
|
+
info: { debug: 'clone into new collection', job: undefined },
|
|
1759
|
+
};
|
|
1748
1760
|
};
|
|
1749
1761
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1750
1762
|
const step4 = (lastTS) => async () => {
|
|
@@ -1770,11 +1782,7 @@ const executes = (view, input, streamName) => {
|
|
|
1770
1782
|
info: { job: undefined, debug: 'wait for change' },
|
|
1771
1783
|
cont: withStop(() => l.stream
|
|
1772
1784
|
.tryNext()
|
|
1773
|
-
.then(doc => doc
|
|
1774
|
-
? doc.ts
|
|
1775
|
-
? next(step7({ ...l, ts: doc.ts }), 'nothing changed')
|
|
1776
|
-
: next(step1, 'restart')
|
|
1777
|
-
: step8(l))),
|
|
1785
|
+
.then(doc => (doc ? next(step4({ _id: streamName, ts: l.ts }), 'restart') : step8(l)))),
|
|
1778
1786
|
};
|
|
1779
1787
|
};
|
|
1780
1788
|
return stop;
|
package/index.js
CHANGED
|
@@ -1115,7 +1115,7 @@ const createIndex = async (collection, indexSpec, options) => {
|
|
|
1115
1115
|
await collection.createIndex(indexSpec, options);
|
|
1116
1116
|
}
|
|
1117
1117
|
catch (e) {
|
|
1118
|
-
if (e.code
|
|
1118
|
+
if ([85, 276].includes(e.code)) {
|
|
1119
1119
|
break;
|
|
1120
1120
|
}
|
|
1121
1121
|
if (e.code == 12587) {
|
|
@@ -1394,8 +1394,9 @@ const aggregate = (streamName, input, snapshot = true, start = Date.now()) => in
|
|
|
1394
1394
|
};
|
|
1395
1395
|
log('exec', streamName, req);
|
|
1396
1396
|
return coll.s.db.command(req).then(result => {
|
|
1397
|
-
|
|
1398
|
-
|
|
1397
|
+
const r = result;
|
|
1398
|
+
log('execed', streamName, (replace) => replace(JSON.stringify(req).replaceAll('$$CLUSTER_TIME', JSON.stringify(r.cursor.atClusterTime))), result, 'took', Date.now() - start);
|
|
1399
|
+
return r;
|
|
1399
1400
|
}, err => {
|
|
1400
1401
|
log('err', req, err);
|
|
1401
1402
|
throw new Error(err);
|
|
@@ -1446,23 +1447,21 @@ const makeWatchStream = (db, { collection, projection: p, hardMatch: m }, startA
|
|
|
1446
1447
|
},
|
|
1447
1448
|
});
|
|
1448
1449
|
pipeline.push({
|
|
1449
|
-
$
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
then: '$clusterTime',
|
|
1460
|
-
else: null,
|
|
1461
|
-
},
|
|
1462
|
-
},
|
|
1450
|
+
$match: {
|
|
1451
|
+
$or: [
|
|
1452
|
+
{ $expr: { $ne: ['$fullDocument', '$fullDocumentBeforeChange'] } },
|
|
1453
|
+
Object.fromEntries(changeKeys.map(k => [k, null])),
|
|
1454
|
+
],
|
|
1455
|
+
},
|
|
1456
|
+
});
|
|
1457
|
+
pipeline.push({
|
|
1458
|
+
$project: {
|
|
1459
|
+
_id: 1,
|
|
1463
1460
|
},
|
|
1464
1461
|
});
|
|
1465
|
-
const stream = db
|
|
1462
|
+
const stream = db
|
|
1463
|
+
.collection(collection.collectionName)
|
|
1464
|
+
.watch(pipeline, {
|
|
1466
1465
|
fullDocument: 'required',
|
|
1467
1466
|
fullDocumentBeforeChange: 'required',
|
|
1468
1467
|
startAtOperationTime: startAt,
|
|
@@ -1538,6 +1537,8 @@ const executes$1 = (view, input, streamName) => {
|
|
|
1538
1537
|
input: input.delta,
|
|
1539
1538
|
finalInputFirst: finalInput.raw(true),
|
|
1540
1539
|
finalInput: finalInput.raw(false),
|
|
1540
|
+
match: view.match?.raw(root()).get(),
|
|
1541
|
+
project: projection,
|
|
1541
1542
|
teardown: finalInput.teardown((x) => ({
|
|
1542
1543
|
collection: x.collection.collectionName,
|
|
1543
1544
|
method: x.method,
|
|
@@ -1573,13 +1574,17 @@ const executes$1 = (view, input, streamName) => {
|
|
|
1573
1574
|
await Promise.all([snapshotCollection.drop(), action]);
|
|
1574
1575
|
log('teardown done', `db['${snapshotCollection.collectionName}'].drop()`, ...out);
|
|
1575
1576
|
};
|
|
1576
|
-
if (exists && !same)
|
|
1577
|
+
if (exists && !same) {
|
|
1577
1578
|
await handleTeardown(exists);
|
|
1578
|
-
|
|
1579
|
+
}
|
|
1580
|
+
return nextData([])(async () => {
|
|
1581
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1582
|
+
return step3(same)();
|
|
1583
|
+
}, 'clone into new collection');
|
|
1579
1584
|
};
|
|
1580
1585
|
const step3 = (lastTS) => async () => {
|
|
1581
1586
|
const hardQuery = $and(lastTS
|
|
1582
|
-
? root().of('touchedAt').has($
|
|
1587
|
+
? root().of('touchedAt').has($gtTs(lastTS.ts))
|
|
1583
1588
|
: root().of('deletedAt').has($eq(null)), lastTS ? null : match && $expr(match), hardMatch);
|
|
1584
1589
|
const notDeleted = eq($ifNull(root().of('deletedAt').expr(), nil))(nil);
|
|
1585
1590
|
const query = match ? and(notDeleted, match) : notDeleted;
|
|
@@ -1657,11 +1662,7 @@ const executes$1 = (view, input, streamName) => {
|
|
|
1657
1662
|
log('restarting', err);
|
|
1658
1663
|
return { ts: null };
|
|
1659
1664
|
})
|
|
1660
|
-
.then(doc => doc
|
|
1661
|
-
? doc.ts
|
|
1662
|
-
? next(step7({ ...l, ts: doc.ts }), 'nothing changed')
|
|
1663
|
-
: next(step2, 'restart')
|
|
1664
|
-
: step8(l)), 'wait for change');
|
|
1665
|
+
.then(doc => (doc ? next(step3({ _id: streamName, ts: l.ts }), 'restart') : step8(l))), 'wait for change');
|
|
1665
1666
|
};
|
|
1666
1667
|
return stop;
|
|
1667
1668
|
};
|
|
@@ -1686,7 +1687,9 @@ const executes = (view, input, streamName) => {
|
|
|
1686
1687
|
streamNames[streamName] = hash;
|
|
1687
1688
|
else if (streamNames[streamName] != hash)
|
|
1688
1689
|
throw new Error('streamName already used');
|
|
1689
|
-
const { collection, projection, hardMatch, match } = view;
|
|
1690
|
+
const { collection, projection, hardMatch: pre, match } = view;
|
|
1691
|
+
const removeNotYetSynchronizedFields = Object.values(mapExactToObject(projection, (_, k) => k.startsWith('_') ? root().of(k).has($exists(true)) : null));
|
|
1692
|
+
const hardMatch = $and(pre, ...removeNotYetSynchronizedFields);
|
|
1690
1693
|
const job = {};
|
|
1691
1694
|
const db = collection.s.db, coll = collection.collectionName;
|
|
1692
1695
|
db.command({
|
|
@@ -1717,6 +1720,8 @@ const executes = (view, input, streamName) => {
|
|
|
1717
1720
|
input: input,
|
|
1718
1721
|
finalInputFirst: finalInput.raw(true),
|
|
1719
1722
|
finalInput: finalInput.raw(false),
|
|
1723
|
+
match: view.match?.raw(root()).get(),
|
|
1724
|
+
project: projection,
|
|
1720
1725
|
teardown: finalInput.teardown((x) => ({
|
|
1721
1726
|
collection: x.collection.collectionName,
|
|
1722
1727
|
method: x.method,
|
|
@@ -1746,7 +1751,14 @@ const executes = (view, input, streamName) => {
|
|
|
1746
1751
|
};
|
|
1747
1752
|
if (exists && !same)
|
|
1748
1753
|
await handleTeardown(exists);
|
|
1749
|
-
return
|
|
1754
|
+
return {
|
|
1755
|
+
cont: withStop(async () => {
|
|
1756
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1757
|
+
return step4(same)();
|
|
1758
|
+
}),
|
|
1759
|
+
data: [],
|
|
1760
|
+
info: { debug: 'clone into new collection', job: undefined },
|
|
1761
|
+
};
|
|
1750
1762
|
};
|
|
1751
1763
|
const makeStream = (startAt) => makeWatchStream(db, view, startAt, streamName);
|
|
1752
1764
|
const step4 = (lastTS) => async () => {
|
|
@@ -1772,11 +1784,7 @@ const executes = (view, input, streamName) => {
|
|
|
1772
1784
|
info: { job: undefined, debug: 'wait for change' },
|
|
1773
1785
|
cont: withStop(() => l.stream
|
|
1774
1786
|
.tryNext()
|
|
1775
|
-
.then(doc => doc
|
|
1776
|
-
? doc.ts
|
|
1777
|
-
? next(step7({ ...l, ts: doc.ts }), 'nothing changed')
|
|
1778
|
-
: next(step1, 'restart')
|
|
1779
|
-
: step8(l))),
|
|
1787
|
+
.then(doc => (doc ? next(step4({ _id: streamName, ts: l.ts }), 'restart') : step8(l)))),
|
|
1780
1788
|
};
|
|
1781
1789
|
};
|
|
1782
1790
|
return stop;
|