@powersync/common 0.0.0-dev-20260311103504 → 0.0.0-dev-20260503073249
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.cjs +791 -489
- package/dist/bundle.cjs.map +1 -1
- package/dist/bundle.mjs +785 -485
- package/dist/bundle.mjs.map +1 -1
- package/dist/bundle.node.cjs +789 -488
- package/dist/bundle.node.cjs.map +1 -1
- package/dist/bundle.node.mjs +783 -484
- package/dist/bundle.node.mjs.map +1 -1
- package/dist/index.d.cts +165 -103
- package/lib/attachments/AttachmentQueue.d.ts +10 -4
- package/lib/attachments/AttachmentQueue.js +10 -4
- package/lib/attachments/AttachmentQueue.js.map +1 -1
- package/lib/attachments/AttachmentService.js +2 -3
- package/lib/attachments/AttachmentService.js.map +1 -1
- package/lib/attachments/SyncingService.d.ts +2 -1
- package/lib/attachments/SyncingService.js +4 -5
- package/lib/attachments/SyncingService.js.map +1 -1
- package/lib/client/AbstractPowerSyncDatabase.d.ts +5 -1
- package/lib/client/AbstractPowerSyncDatabase.js +9 -5
- package/lib/client/AbstractPowerSyncDatabase.js.map +1 -1
- package/lib/client/sync/stream/AbstractRemote.d.ts +29 -8
- package/lib/client/sync/stream/AbstractRemote.js +154 -177
- package/lib/client/sync/stream/AbstractRemote.js.map +1 -1
- package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +4 -0
- package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +88 -88
- package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
- package/lib/db/DBAdapter.d.ts +55 -9
- package/lib/db/DBAdapter.js +126 -0
- package/lib/db/DBAdapter.js.map +1 -1
- package/lib/db/crud/SyncStatus.d.ts +0 -4
- package/lib/db/crud/SyncStatus.js +0 -4
- package/lib/db/crud/SyncStatus.js.map +1 -1
- package/lib/db/schema/RawTable.d.ts +0 -5
- package/lib/db/schema/Schema.d.ts +0 -2
- package/lib/db/schema/Schema.js +0 -2
- package/lib/db/schema/Schema.js.map +1 -1
- package/lib/index.d.ts +1 -1
- package/lib/index.js +0 -1
- package/lib/index.js.map +1 -1
- package/lib/utils/async.d.ts +0 -9
- package/lib/utils/async.js +0 -9
- package/lib/utils/async.js.map +1 -1
- package/lib/utils/mutex.d.ts +47 -5
- package/lib/utils/mutex.js +146 -21
- package/lib/utils/mutex.js.map +1 -1
- package/lib/utils/queue.d.ts +16 -0
- package/lib/utils/queue.js +42 -0
- package/lib/utils/queue.js.map +1 -0
- package/lib/utils/stream_transform.d.ts +39 -0
- package/lib/utils/stream_transform.js +206 -0
- package/lib/utils/stream_transform.js.map +1 -0
- package/package.json +9 -8
- package/src/attachments/AttachmentQueue.ts +10 -4
- package/src/attachments/AttachmentService.ts +2 -3
- package/src/attachments/README.md +6 -4
- package/src/attachments/SyncingService.ts +4 -5
- package/src/client/AbstractPowerSyncDatabase.ts +9 -5
- package/src/client/sync/stream/AbstractRemote.ts +182 -206
- package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +96 -83
- package/src/db/DBAdapter.ts +167 -9
- package/src/db/crud/SyncStatus.ts +0 -4
- package/src/db/schema/RawTable.ts +0 -5
- package/src/db/schema/Schema.ts +0 -2
- package/src/index.ts +1 -1
- package/src/utils/async.ts +0 -11
- package/src/utils/mutex.ts +184 -26
- package/src/utils/queue.ts +48 -0
- package/src/utils/stream_transform.ts +252 -0
- package/lib/utils/DataStream.d.ts +0 -62
- package/lib/utils/DataStream.js +0 -169
- package/lib/utils/DataStream.js.map +0 -1
- package/src/utils/DataStream.ts +0 -222
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
export const doneResult = { done: true, value: undefined };
|
|
2
|
+
export function valueResult(value) {
|
|
3
|
+
return { done: false, value };
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* A variant of {@link Array.map} for async iterators.
|
|
7
|
+
*/
|
|
8
|
+
export function map(source, map) {
|
|
9
|
+
return {
|
|
10
|
+
next: async () => {
|
|
11
|
+
const value = await source.next();
|
|
12
|
+
if (value.done) {
|
|
13
|
+
return value;
|
|
14
|
+
}
|
|
15
|
+
else {
|
|
16
|
+
return { value: map(value.value) };
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Expands a source async iterator by allowing to inject events asynchronously.
|
|
23
|
+
*
|
|
24
|
+
* The resulting iterator will emit all events from its source. Additionally though, events can be injected. These
|
|
25
|
+
* events are dropped once the main iterator completes, but are otherwise forwarded.
|
|
26
|
+
*
|
|
27
|
+
* The iterator completes when its source completes, and it supports backpressure by only calling `next()` on the source
|
|
28
|
+
* in response to a `next()` call from downstream if no pending injected events can be dispatched.
|
|
29
|
+
*/
|
|
30
|
+
export function injectable(source) {
|
|
31
|
+
let sourceIsDone = false;
|
|
32
|
+
let waiter = undefined; // An active, waiting next() call.
|
|
33
|
+
// A pending upstream event that couldn't be dispatched because inject() has been called before it was resolved.
|
|
34
|
+
let pendingSourceEvent = null;
|
|
35
|
+
let pendingInjectedEvents = [];
|
|
36
|
+
const consumeWaiter = () => {
|
|
37
|
+
const pending = waiter;
|
|
38
|
+
waiter = undefined;
|
|
39
|
+
return pending;
|
|
40
|
+
};
|
|
41
|
+
const fetchFromSource = () => {
|
|
42
|
+
const resolveWaiter = (propagate) => {
|
|
43
|
+
const active = consumeWaiter();
|
|
44
|
+
if (active) {
|
|
45
|
+
propagate(active);
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
pendingSourceEvent = propagate;
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
const nextFromSource = source.next();
|
|
52
|
+
nextFromSource.then((value) => {
|
|
53
|
+
sourceIsDone = value.done == true;
|
|
54
|
+
resolveWaiter((w) => w.resolve(value));
|
|
55
|
+
}, (error) => {
|
|
56
|
+
resolveWaiter((w) => w.reject(error));
|
|
57
|
+
});
|
|
58
|
+
};
|
|
59
|
+
return {
|
|
60
|
+
next: () => {
|
|
61
|
+
return new Promise((resolve, reject) => {
|
|
62
|
+
// First priority: Dispatch ready upstream events.
|
|
63
|
+
if (sourceIsDone) {
|
|
64
|
+
return resolve(doneResult);
|
|
65
|
+
}
|
|
66
|
+
if (pendingSourceEvent) {
|
|
67
|
+
pendingSourceEvent({ resolve, reject });
|
|
68
|
+
pendingSourceEvent = null;
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
// Second priority: Dispatch injected events
|
|
72
|
+
if (pendingInjectedEvents.length) {
|
|
73
|
+
return resolve(valueResult(pendingInjectedEvents.shift()));
|
|
74
|
+
}
|
|
75
|
+
// Nothing pending? Fetch from source
|
|
76
|
+
waiter = { resolve, reject };
|
|
77
|
+
return fetchFromSource();
|
|
78
|
+
});
|
|
79
|
+
},
|
|
80
|
+
inject: (event) => {
|
|
81
|
+
const pending = consumeWaiter();
|
|
82
|
+
if (pending != null) {
|
|
83
|
+
pending.resolve(valueResult(event));
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
pendingInjectedEvents.push(event);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Splits a byte stream at line endings, emitting each line as a string.
|
|
93
|
+
*/
|
|
94
|
+
export function extractJsonLines(source, decoder) {
|
|
95
|
+
let buffer = '';
|
|
96
|
+
const pendingLines = [];
|
|
97
|
+
let isFinalEvent = false;
|
|
98
|
+
return {
|
|
99
|
+
next: async () => {
|
|
100
|
+
while (true) {
|
|
101
|
+
if (isFinalEvent) {
|
|
102
|
+
return doneResult;
|
|
103
|
+
}
|
|
104
|
+
{
|
|
105
|
+
const first = pendingLines.shift();
|
|
106
|
+
if (first) {
|
|
107
|
+
return { done: false, value: first };
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
const { done, value } = await source.next();
|
|
111
|
+
if (done) {
|
|
112
|
+
const remaining = buffer.trim();
|
|
113
|
+
if (remaining.length != 0) {
|
|
114
|
+
isFinalEvent = true;
|
|
115
|
+
return { done: false, value: remaining };
|
|
116
|
+
}
|
|
117
|
+
return doneResult;
|
|
118
|
+
}
|
|
119
|
+
const data = decoder.decode(value, { stream: true });
|
|
120
|
+
buffer += data;
|
|
121
|
+
const lines = buffer.split('\n');
|
|
122
|
+
for (let i = 0; i < lines.length - 1; i++) {
|
|
123
|
+
const l = lines[i].trim();
|
|
124
|
+
if (l.length > 0) {
|
|
125
|
+
pendingLines.push(l);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
buffer = lines[lines.length - 1];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Splits a concatenated stream of BSON objects by emitting individual objects.
|
|
135
|
+
*/
|
|
136
|
+
export function extractBsonObjects(source) {
|
|
137
|
+
// Fully read but not emitted yet.
|
|
138
|
+
const completedObjects = [];
|
|
139
|
+
// Whether source has returned { done: true }. We do the same once completed objects have been emitted.
|
|
140
|
+
let isDone = false;
|
|
141
|
+
const lengthBuffer = new DataView(new ArrayBuffer(4));
|
|
142
|
+
let objectBody = null;
|
|
143
|
+
// If we're parsing the length field, a number between 1 and 4 (inclusive) describing remaining bytes in the header.
|
|
144
|
+
// If we're consuming a document, the bytes remaining.
|
|
145
|
+
let remainingLength = 4;
|
|
146
|
+
return {
|
|
147
|
+
async next() {
|
|
148
|
+
while (true) {
|
|
149
|
+
// Before fetching new data from upstream, return completed objects.
|
|
150
|
+
if (completedObjects.length) {
|
|
151
|
+
return valueResult(completedObjects.shift());
|
|
152
|
+
}
|
|
153
|
+
if (isDone) {
|
|
154
|
+
return doneResult;
|
|
155
|
+
}
|
|
156
|
+
const upstreamEvent = await source.next();
|
|
157
|
+
if (upstreamEvent.done) {
|
|
158
|
+
isDone = true;
|
|
159
|
+
if (objectBody || remainingLength != 4) {
|
|
160
|
+
throw new Error('illegal end of stream in BSON object');
|
|
161
|
+
}
|
|
162
|
+
return doneResult;
|
|
163
|
+
}
|
|
164
|
+
const chunk = upstreamEvent.value;
|
|
165
|
+
for (let i = 0; i < chunk.length;) {
|
|
166
|
+
const availableInData = chunk.length - i;
|
|
167
|
+
if (objectBody) {
|
|
168
|
+
// We're in the middle of reading a BSON document.
|
|
169
|
+
const bytesToRead = Math.min(availableInData, remainingLength);
|
|
170
|
+
const copySource = new Uint8Array(chunk.buffer, chunk.byteOffset + i, bytesToRead);
|
|
171
|
+
objectBody.set(copySource, objectBody.length - remainingLength);
|
|
172
|
+
i += bytesToRead;
|
|
173
|
+
remainingLength -= bytesToRead;
|
|
174
|
+
if (remainingLength == 0) {
|
|
175
|
+
completedObjects.push(objectBody);
|
|
176
|
+
// Prepare to read another document, starting with its length
|
|
177
|
+
objectBody = null;
|
|
178
|
+
remainingLength = 4;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
else {
|
|
182
|
+
// Copy up to 4 bytes into lengthBuffer, depending on how many we still need.
|
|
183
|
+
const bytesToRead = Math.min(availableInData, remainingLength);
|
|
184
|
+
for (let j = 0; j < bytesToRead; j++) {
|
|
185
|
+
lengthBuffer.setUint8(4 - remainingLength + j, chunk[i + j]);
|
|
186
|
+
}
|
|
187
|
+
i += bytesToRead;
|
|
188
|
+
remainingLength -= bytesToRead;
|
|
189
|
+
if (remainingLength == 0) {
|
|
190
|
+
// Transition from reading length header to reading document. Subtracting 4 because the length of the
|
|
191
|
+
// header is included in length.
|
|
192
|
+
const length = lengthBuffer.getInt32(0, true /* little endian */);
|
|
193
|
+
remainingLength = length - 4;
|
|
194
|
+
if (remainingLength < 1) {
|
|
195
|
+
throw new Error(`invalid length for bson: ${length}`);
|
|
196
|
+
}
|
|
197
|
+
objectBody = new Uint8Array(length);
|
|
198
|
+
new DataView(objectBody.buffer).setInt32(0, length, true);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
//# sourceMappingURL=stream_transform.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream_transform.js","sourceRoot":"","sources":["../../src/utils/stream_transform.ts"],"names":[],"mappings":"AAUA,MAAM,CAAC,MAAM,UAAU,GAA8B,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;AAEtF,MAAM,UAAU,WAAW,CAAI,KAAQ;IACrC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC;AAChC,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,GAAG,CAAS,MAA+B,EAAE,GAAuB;IAClF,OAAO;QACL,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;YAClC,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC;iBAAM,CAAC;gBACN,OAAO,EAAE,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC;YACrC,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC;AAMD;;;;;;;;GAQG;AACH,MAAM,UAAU,UAAU,CAAI,MAA8B;IAG1D,IAAI,YAAY,GAAG,KAAK,CAAC;IACzB,IAAI,MAAM,GAAuB,SAAS,CAAC,CAAC,kCAAkC;IAC9E,gHAAgH;IAChH,IAAI,kBAAkB,GAAiC,IAAI,CAAC;IAE5D,IAAI,qBAAqB,GAAQ,EAAE,CAAC;IAEpC,MAAM,aAAa,GAAG,GAAG,EAAE;QACzB,MAAM,OAAO,GAAG,MAAM,CAAC;QACvB,MAAM,GAAG,SAAS,CAAC;QACnB,OAAO,OAAO,CAAC;IACjB,CAAC,CAAC;IAEF,MAAM,eAAe,GAAG,GAAG,EAAE;QAC3B,MAAM,aAAa,GAAG,CAAC,SAA8B,EAAE,EAAE;YACvD,MAAM,MAAM,GAAG,aAAa,EAAE,CAAC;YAC/B,IAAI,MAAM,EAAE,CAAC;gBACX,SAAS,CAAC,MAAM,CAAC,CAAC;YACpB,CAAC;iBAAM,CAAC;gBACN,kBAAkB,GAAG,SAAS,CAAC;YACjC,CAAC;QACH,CAAC,CAAC;QAEF,MAAM,cAAc,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;QACrC,cAAc,CAAC,IAAI,CACjB,CAAC,KAAK,EAAE,EAAE;YACR,YAAY,GAAG,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC;YAClC,aAAa,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;QACzC,CAAC,EACD,CAAC,KAAK,EAAE,EAAE;YACR,aAAa,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;QACxC,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;IAEF,OAAO;QACL,IAAI,EAAE,GAAG,EAAE;YACT,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,kDAAkD;gBAClD,IAAI,YAAY,EAAE,CAAC;oBACjB,OAAO,OAAO,CAAC,UAAU,CAAC,CAAC;gBAC7B,CAAC;gBACD,IAAI,kBAAkB,EAAE,CAAC;oBACvB,kBAAkB,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBACxC,kBAAkB,GAAG,IAAI,CAAC;oBAC1B,OAAO;gBACT,CAAC;gBAED,4CAA4C;gBAC5C,IAAI,qBAAqB,CAAC,MAAM,EAAE,CAAC;oBACjC,OAAO,OAAO,CAAC,WAAW,CAAC,qBAAqB,CAAC,KAAK,EAAG,CAAC,CAAC,CAAC;gBAC9D,CAAC;gBAED,qCAAqC;gBACrC,MAAM,GAAG,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;gBAC7B,OAAO,eAAe,EAAE,CAAC;YAC3B,CAAC,CAAC,CAAC;QACL,CAAC;QACD,MAAM,EAAE,CAAC,KAAK,EAAE,EAAE;YAChB,MAAM,OAAO,GAAG,aAAa,EAAE,CAAC;YAChC,IAAI,OAAO,IAAI,IAAI,EAAE,CAAC;gBACpB,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC;YACtC,CAAC;iBAAM,CAAC;gBACN,qBAAqB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACpC,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAC9B,MAAuC,EACvC,OAAoB;IAEpB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,YAAY,GAAa,EAAE,CAAC;IAClC,IAAI,YAAY,GAAG,KAAK,CAAC;IAEzB,OAAO;QACL,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,OAAO,IAAI,EAAE,CAAC;gBACZ,IAAI,YAAY,EAAE,CAAC;oBACjB,OAAO,UAAU,CAAC;gBACpB,CAAC;gBAED,CAAC;oBACC,MAAM,KAAK,GAAG,YAAY,CAAC,KAAK,EAAE,CAAC;oBACnC,IAAI,KAAK,EAAE,CAAC;wBACV,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC;oBACvC,CAAC;gBACH,CAAC;gBAED,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBAC5C,IAAI,IAAI,EAAE,CAAC;oBACT,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;oBAChC,IAAI,SAAS,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;wBAC1B,YAAY,GAAG,IAAI,CAAC;wBACpB,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;oBAC3C,CAAC;oBAED,OAAO,UAAU,CAAC;gBACpB,CAAC;gBAED,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC;gBACrD,MAAM,IAAI,IAAI,CAAC;gBAEf,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBACjC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;oBAC1C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;oBAC1B,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;wBACjB,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;oBACvB,CAAC;gBACH,CAAC;gBAED,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACnC,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,kBAAkB,CAAC,MAAuC;IACxE,kCAAkC;IAClC,MAAM,gBAAgB,GAAiB,EAAE,CAAC;IAE1C,uGAAuG;IACvG,IAAI,MAAM,GAAG,KAAK,CAAC;IAEnB,MAAM,YAAY,GAAG,IAAI,QAAQ,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IACtD,IAAI,UAAU,GAAsB,IAAI,CAAC;IACzC,oHAAoH;IACpH,sDAAsD;IACtD,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO;QACL,KAAK,CAAC,IAAI;YACR,OAAO,IAAI,EAAE,CAAC;gBACZ,oEAAoE;gBACpE,IAAI,gBAAgB,CAAC,MAAM,EAAE,CAAC;oBAC5B,OAAO,WAAW,CAAC,gBAAgB,CAAC,KAAK,EAAG,CAAC,CAAC;gBAChD,CAAC;gBACD,IAAI,MAAM,EAAE,CAAC;oBACX,OAAO,UAAU,CAAC;gBACpB,CAAC;gBAED,MAAM,aAAa,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBAC1C,IAAI,aAAa,CAAC,IAAI,EAAE,CAAC;oBACvB,MAAM,GAAG,IAAI,CAAC;oBACd,IAAI,UAAU,IAAI,eAAe,IAAI,CAAC,EAAE,CAAC;wBACvC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;oBAC1D,CAAC;oBACD,OAAO,UAAU,CAAC;gBACpB,CAAC;gBAED,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC;gBAClC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,GAAI,CAAC;oBACnC,MAAM,eAAe,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;oBAEzC,IAAI,UAAU,EAAE,CAAC;wBACf,kDAAkD;wBAClD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;wBAC/D,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;wBACnF,UAAU,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,MAAM,GAAG,eAAe,CAAC,CAAC;wBAChE,CAAC,IAAI,WAAW,CAAC;wBACjB,eAAe,IAAI,WAAW,CAAC;wBAE/B,IAAI,eAAe,IAAI,CAAC,EAAE,CAAC;4BACzB,gBAAgB,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;4BAElC,6DAA6D;4BAC7D,UAAU,GAAG,IAAI,CAAC;4BAClB,eAAe,GAAG,CAAC,CAAC;wBACtB,CAAC;oBACH,CAAC;yBAAM,CAAC;wBACN,6EAA6E;wBAC7E,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;wBAC/D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE,CAAC;4BACrC,YAAY,CAAC,QAAQ,CAAC,CAAC,GAAG,eAAe,GAAG,CAAC,EAAE,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;wBAC/D,CAAC;wBACD,CAAC,IAAI,WAAW,CAAC;wBACjB,eAAe,IAAI,WAAW,CAAC;wBAE/B,IAAI,eAAe,IAAI,CAAC,EAAE,CAAC;4BACzB,qGAAqG;4BACrG,gCAAgC;4BAChC,MAAM,MAAM,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,mBAAmB,CAAC,CAAC;4BAClE,eAAe,GAAG,MAAM,GAAG,CAAC,CAAC;4BAC7B,IAAI,eAAe,GAAG,CAAC,EAAE,CAAC;gCACxB,MAAM,IAAI,KAAK,CAAC,4BAA4B,MAAM,EAAE,CAAC,CAAC;4BACxD,CAAC;4BAED,UAAU,GAAG,IAAI,UAAU,CAAC,MAAM,CAAC,CAAC;4BACpC,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;wBAC5D,CAAC;oBACH,CAAC;gBACH,CAAC;YACH,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@powersync/common",
|
|
3
|
-
"version": "0.0.0-dev-
|
|
3
|
+
"version": "0.0.0-dev-20260503073249",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"registry": "https://registry.npmjs.org/",
|
|
6
6
|
"access": "public"
|
|
7
7
|
},
|
|
8
|
-
"description": "API definitions for
|
|
8
|
+
"description": "API definitions for PowerSync",
|
|
9
9
|
"type": "module",
|
|
10
10
|
"main": "dist/bundle.mjs",
|
|
11
11
|
"module": "dist/bundle.mjs",
|
|
@@ -32,7 +32,7 @@
|
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
34
|
},
|
|
35
|
-
"author": "
|
|
35
|
+
"author": "PowerSync",
|
|
36
36
|
"license": "Apache-2.0",
|
|
37
37
|
"files": [
|
|
38
38
|
"lib",
|
|
@@ -48,7 +48,6 @@
|
|
|
48
48
|
},
|
|
49
49
|
"homepage": "https://docs.powersync.com",
|
|
50
50
|
"dependencies": {
|
|
51
|
-
"async-mutex": "^0.5.0",
|
|
52
51
|
"event-iterator": "^2.0.0"
|
|
53
52
|
},
|
|
54
53
|
"devDependencies": {
|
|
@@ -58,14 +57,16 @@
|
|
|
58
57
|
"@rollup/plugin-node-resolve": "^16.0.3",
|
|
59
58
|
"@types/node": "^24.0.0",
|
|
60
59
|
"@types/uuid": "^9.0.6",
|
|
60
|
+
"bson": "^6.10.4",
|
|
61
61
|
"buffer": "^6.0.3",
|
|
62
|
-
"rollup": "^4.52.5",
|
|
63
|
-
"rollup-plugin-dts": "^6.2.1",
|
|
64
62
|
"cross-fetch": "^4.1.0",
|
|
63
|
+
"estree-walker": "^3.0.3",
|
|
65
64
|
"js-logger": "^1.6.1",
|
|
65
|
+
"magic-string": "^0.30.21",
|
|
66
|
+
"rollup": "^4.52.5",
|
|
67
|
+
"rollup-plugin-dts": "^6.2.1",
|
|
66
68
|
"rsocket-core": "1.0.0-alpha.3",
|
|
67
|
-
"rsocket-websocket-client": "1.0.0-alpha.3"
|
|
68
|
-
"bson": "^6.10.4"
|
|
69
|
+
"rsocket-websocket-client": "1.0.0-alpha.3"
|
|
69
70
|
},
|
|
70
71
|
"scripts": {
|
|
71
72
|
"build": "tsc -b && rollup -c rollup.config.mjs",
|
|
@@ -51,10 +51,16 @@ export class AttachmentQueue {
|
|
|
51
51
|
/** Logger instance for diagnostic information */
|
|
52
52
|
readonly logger: ILogger;
|
|
53
53
|
|
|
54
|
-
/** Interval in milliseconds between periodic sync operations.
|
|
54
|
+
/** Interval in milliseconds between periodic sync operations. Acts as a polling timer to retry
|
|
55
|
+
* failed uploads/downloads, especially after the app goes offline. Default: 30000 (30 seconds) */
|
|
55
56
|
readonly syncIntervalMs: number = 30 * 1000;
|
|
56
57
|
|
|
57
|
-
/**
|
|
58
|
+
/** Throttle duration in milliseconds for the reactive watch query on the attachments table.
|
|
59
|
+
* When attachment records change, a watch query detects the change and triggers a sync.
|
|
60
|
+
* This throttle prevents the sync from firing too rapidly when many changes happen in
|
|
61
|
+
* quick succession (e.g., bulk inserts). This is distinct from syncIntervalMs — it controls
|
|
62
|
+
* how quickly the queue reacts to changes, while syncIntervalMs controls how often it polls
|
|
63
|
+
* for retries. Default: 30 (from DEFAULT_WATCH_THROTTLE_MS) */
|
|
58
64
|
readonly syncThrottleDuration: number;
|
|
59
65
|
|
|
60
66
|
/** Whether to automatically download remote attachments. Default: true */
|
|
@@ -86,8 +92,8 @@ export class AttachmentQueue {
|
|
|
86
92
|
* @param options.watchAttachments - Callback for monitoring attachment changes in your data model
|
|
87
93
|
* @param options.tableName - Name of the table to store attachment records. Default: 'ps_attachment_queue'
|
|
88
94
|
* @param options.logger - Logger instance. Defaults to db.logger
|
|
89
|
-
* @param options.syncIntervalMs -
|
|
90
|
-
* @param options.syncThrottleDuration - Throttle duration for
|
|
95
|
+
* @param options.syncIntervalMs - Periodic polling interval in milliseconds for retrying failed uploads/downloads. Default: 30000
|
|
96
|
+
* @param options.syncThrottleDuration - Throttle duration in milliseconds for the reactive watch query that detects attachment changes. Prevents rapid-fire syncs during bulk changes. Default: 30
|
|
91
97
|
* @param options.downloadAttachments - Whether to automatically download remote attachments. Default: true
|
|
92
98
|
* @param options.archivedCacheLimit - Maximum archived attachments before cleanup. Default: 100
|
|
93
99
|
*/
|
|
@@ -1,8 +1,7 @@
|
|
|
1
|
-
import { Mutex } from 'async-mutex';
|
|
2
1
|
import { AbstractPowerSyncDatabase } from '../client/AbstractPowerSyncDatabase.js';
|
|
3
2
|
import { DifferentialWatchedQuery } from '../client/watched/processors/DifferentialQueryProcessor.js';
|
|
4
3
|
import { ILogger } from '../utils/Logger.js';
|
|
5
|
-
import {
|
|
4
|
+
import { Mutex } from '../utils/mutex.js';
|
|
6
5
|
import { AttachmentContext } from './AttachmentContext.js';
|
|
7
6
|
import { AttachmentRecord, AttachmentState } from './Schema.js';
|
|
8
7
|
|
|
@@ -55,7 +54,7 @@ export class AttachmentService {
|
|
|
55
54
|
* Executes a callback with exclusive access to the attachment context.
|
|
56
55
|
*/
|
|
57
56
|
async withContext<T>(callback: (context: AttachmentContext) => Promise<T>): Promise<T> {
|
|
58
|
-
return
|
|
57
|
+
return this.mutex.runExclusive(async () => {
|
|
59
58
|
return callback(this.context);
|
|
60
59
|
});
|
|
61
60
|
}
|
|
@@ -289,8 +289,8 @@ new AttachmentQueue(options: AttachmentQueueOptions)
|
|
|
289
289
|
| `watchAttachments` | `(onUpdate: (attachments: WatchedAttachmentItem[]) => Promise<void>) => void` | Yes | - | Callback to determine which attachments to handle by the queue from your user defined query |
|
|
290
290
|
| `tableName` | `string` | No | `'attachments'` | Name of the attachments table |
|
|
291
291
|
| `logger` | `ILogger` | No | `db.logger` | Logger instance for diagnostic output |
|
|
292
|
-
| `syncIntervalMs` | `number` | No | `30000` |
|
|
293
|
-
| `syncThrottleDuration` | `number` | No | `30` | Throttle duration for sync
|
|
292
|
+
| `syncIntervalMs` | `number` | No | `30000` | Periodic polling interval (in milliseconds) for retrying failed uploads/downloads. A `setInterval` timer that calls `syncStorage()` on this cadence, ensuring operations are retried even if no database changes occur (e.g., after coming back online). |
|
|
293
|
+
| `syncThrottleDuration` | `number` | No | `30` | Throttle duration (in milliseconds) for the reactive watch query on the attachments table. When attachment records change (e.g., a new file is queued), a watch query detects the change and triggers a sync. This throttle prevents the sync from firing too rapidly when many changes happen in quick succession (e.g., bulk inserts). This is distinct from `syncIntervalMs` — it controls how quickly the queue *reacts* to changes, while `syncIntervalMs` controls how often it *polls* for retries. |
|
|
294
294
|
| `downloadAttachments` | `boolean` | No | `true` | Whether to automatically download remote attachments |
|
|
295
295
|
| `archivedCacheLimit` | `number` | No | `100` | Maximum number of archived attachments before cleanup |
|
|
296
296
|
| `errorHandler` | `AttachmentErrorHandler` | No | `undefined` | Custom error handler for upload/download/delete operations |
|
|
@@ -676,11 +676,13 @@ Adjust sync frequency based on your needs:
|
|
|
676
676
|
```typescript
|
|
677
677
|
const queue = new AttachmentQueue({
|
|
678
678
|
// ... other options
|
|
679
|
-
syncIntervalMs: 60000, //
|
|
679
|
+
syncIntervalMs: 60000, // Poll for retries every 60 seconds instead of 30
|
|
680
|
+
syncThrottleDuration: 100, // React to attachment changes within 100ms (default: 30ms)
|
|
680
681
|
});
|
|
681
682
|
```
|
|
682
683
|
|
|
683
|
-
|
|
684
|
+
- **`syncIntervalMs`** controls the periodic polling timer — how often the queue retries failed operations.
|
|
685
|
+
- **`syncThrottleDuration`** controls how quickly the queue reacts to attachment table changes. The default (30ms) is fast enough for most use cases. Increase it if you see performance issues during bulk attachment operations.
|
|
684
686
|
|
|
685
687
|
### Archive and Cache Management
|
|
686
688
|
|
|
@@ -54,7 +54,7 @@ export class SyncingService {
|
|
|
54
54
|
updatedAttachments.push(downloaded);
|
|
55
55
|
break;
|
|
56
56
|
case AttachmentState.QUEUED_DELETE:
|
|
57
|
-
const deleted = await this.deleteAttachment(attachment);
|
|
57
|
+
const deleted = await this.deleteAttachment(attachment, context);
|
|
58
58
|
updatedAttachments.push(deleted);
|
|
59
59
|
break;
|
|
60
60
|
|
|
@@ -143,18 +143,17 @@ export class SyncingService {
|
|
|
143
143
|
* On failure, defers to error handler or archives.
|
|
144
144
|
*
|
|
145
145
|
* @param attachment - The attachment record to delete
|
|
146
|
+
* @param context - Attachment context for database operations
|
|
146
147
|
* @returns Updated attachment record
|
|
147
148
|
*/
|
|
148
|
-
async deleteAttachment(attachment: AttachmentRecord): Promise<AttachmentRecord> {
|
|
149
|
+
async deleteAttachment(attachment: AttachmentRecord, context: AttachmentContext): Promise<AttachmentRecord> {
|
|
149
150
|
try {
|
|
150
151
|
await this.remoteStorage.deleteFile(attachment);
|
|
151
152
|
if (attachment.localUri) {
|
|
152
153
|
await this.localStorage.deleteFile(attachment.localUri);
|
|
153
154
|
}
|
|
154
155
|
|
|
155
|
-
await
|
|
156
|
-
await ctx.deleteAttachment(attachment.id);
|
|
157
|
-
});
|
|
156
|
+
await context.deleteAttachment(attachment.id);
|
|
158
157
|
|
|
159
158
|
return {
|
|
160
159
|
...attachment,
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Mutex } from 'async-mutex';
|
|
2
1
|
import { EventIterator } from 'event-iterator';
|
|
3
2
|
import Logger, { ILogger } from 'js-logger';
|
|
4
3
|
import {
|
|
@@ -14,7 +13,7 @@ import { UploadQueueStats } from '../db/crud/UploadQueueStatus.js';
|
|
|
14
13
|
import { Schema } from '../db/schema/Schema.js';
|
|
15
14
|
import { BaseObserver } from '../utils/BaseObserver.js';
|
|
16
15
|
import { ControlledExecutor } from '../utils/ControlledExecutor.js';
|
|
17
|
-
import {
|
|
16
|
+
import { throttleTrailing } from '../utils/async.js';
|
|
18
17
|
import {
|
|
19
18
|
ConnectionManager,
|
|
20
19
|
CreateSyncImplementationOptions,
|
|
@@ -46,6 +45,7 @@ import { TriggerManagerImpl } from './triggers/TriggerManagerImpl.js';
|
|
|
46
45
|
import { DEFAULT_WATCH_THROTTLE_MS, WatchCompatibleQuery } from './watched/WatchedQuery.js';
|
|
47
46
|
import { OnChangeQueryProcessor } from './watched/processors/OnChangeQueryProcessor.js';
|
|
48
47
|
import { WatchedQueryComparator } from './watched/processors/comparators.js';
|
|
48
|
+
import { Mutex } from '../utils/mutex.js';
|
|
49
49
|
|
|
50
50
|
export interface DisconnectAndClearOptions {
|
|
51
51
|
/** When set to false, data in local-only tables is preserved. */
|
|
@@ -704,7 +704,7 @@ export abstract class AbstractPowerSyncDatabase extends BaseObserver<PowerSyncDB
|
|
|
704
704
|
* @returns A transaction of CRUD operations to upload, or null if there are none
|
|
705
705
|
*/
|
|
706
706
|
async getNextCrudTransaction(): Promise<CrudTransaction | null> {
|
|
707
|
-
const iterator = this.getCrudTransactions()[
|
|
707
|
+
const iterator = this.getCrudTransactions()[Symbol.asyncIterator]();
|
|
708
708
|
return (await iterator.next()).value;
|
|
709
709
|
}
|
|
710
710
|
|
|
@@ -741,7 +741,7 @@ export abstract class AbstractPowerSyncDatabase extends BaseObserver<PowerSyncDB
|
|
|
741
741
|
*/
|
|
742
742
|
getCrudTransactions(): AsyncIterable<CrudTransaction, null> {
|
|
743
743
|
return {
|
|
744
|
-
[
|
|
744
|
+
[Symbol.asyncIterator]: () => {
|
|
745
745
|
let lastCrudItemId = -1;
|
|
746
746
|
const sql = `
|
|
747
747
|
WITH RECURSIVE crud_entries AS (
|
|
@@ -813,6 +813,10 @@ SELECT * FROM crud_entries;
|
|
|
813
813
|
* Execute a SQL write (INSERT/UPDATE/DELETE) query
|
|
814
814
|
* and optionally return results.
|
|
815
815
|
*
|
|
816
|
+
* When using the default client-side [JSON-based view system](https://docs.powersync.com/architecture/client-architecture#client-side-schema-and-sqlite-database-structure),
|
|
817
|
+
* the returned result's `rowsAffected` may be `0` for successful `UPDATE` and `DELETE` statements.
|
|
818
|
+
* Use a `RETURNING` clause and inspect `result.rows` when you need to confirm which rows changed.
|
|
819
|
+
*
|
|
816
820
|
* @param sql The SQL query to execute
|
|
817
821
|
* @param parameters Optional array of parameters to bind to the query
|
|
818
822
|
* @returns The query result as an object with structured key-value pairs
|
|
@@ -921,7 +925,7 @@ SELECT * FROM crud_entries;
|
|
|
921
925
|
await this.waitForReady();
|
|
922
926
|
return this.database.readTransaction(
|
|
923
927
|
async (tx) => {
|
|
924
|
-
const res = await callback(
|
|
928
|
+
const res = await callback(tx);
|
|
925
929
|
await tx.rollback();
|
|
926
930
|
return res;
|
|
927
931
|
},
|