cry-synced-db-client 0.1.72 → 0.1.74
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +374 -125
- package/dist/src/db/DexieDb.d.ts +1 -0
- package/dist/src/db/RestProxy.d.ts +22 -0
- package/dist/src/db/sync/ServerUpdateHandler.d.ts +1 -0
- package/dist/src/types/I_DexieDb.d.ts +2 -0
- package/dist/src/types/I_RestInterface.d.ts +5 -0
- package/dist/src/types/I_SyncedDb.d.ts +0 -2
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -125,7 +125,7 @@ function sortItems(items, sort) {
|
|
|
125
125
|
const sortEntries = Object.entries(sort);
|
|
126
126
|
if (sortEntries.length === 0)
|
|
127
127
|
return items;
|
|
128
|
-
return
|
|
128
|
+
return items.slice().sort((a, b) => {
|
|
129
129
|
for (const [field, direction] of sortEntries) {
|
|
130
130
|
const aVal = getNestedValue(a, field);
|
|
131
131
|
const bVal = getNestedValue(b, field);
|
|
@@ -194,7 +194,7 @@ function projectItem(item, project) {
|
|
|
194
194
|
}
|
|
195
195
|
return result;
|
|
196
196
|
} else {
|
|
197
|
-
const result = {
|
|
197
|
+
const result = Object.assign({}, item);
|
|
198
198
|
for (const [field, value] of entries) {
|
|
199
199
|
if (value === false || value === 0) {
|
|
200
200
|
delete result[field];
|
|
@@ -237,7 +237,9 @@ class InMemManager {
|
|
|
237
237
|
if (operation === "upsert") {
|
|
238
238
|
this.inMemDb.saveMany(collection, items);
|
|
239
239
|
if (this.useObjectMetadata && config?.hasMetadata) {
|
|
240
|
-
const ids =
|
|
240
|
+
const ids = [];
|
|
241
|
+
for (const item of items)
|
|
242
|
+
ids.push(item._id);
|
|
241
243
|
let metadatas;
|
|
242
244
|
if (config.onObjectsUpdated) {
|
|
243
245
|
try {
|
|
@@ -259,7 +261,9 @@ class InMemManager {
|
|
|
259
261
|
this.setObjectsMetadataInternal(collection, ids, metadatas);
|
|
260
262
|
}
|
|
261
263
|
} else if (operation === "delete") {
|
|
262
|
-
const ids =
|
|
264
|
+
const ids = [];
|
|
265
|
+
for (const item of items)
|
|
266
|
+
ids.push(item._id);
|
|
263
267
|
this.inMemDb.deleteManyByIds(collection, ids);
|
|
264
268
|
if (this.useObjectMetadata && config?.hasMetadata) {
|
|
265
269
|
this.deleteObjectsMetadataInternal(collection, ids);
|
|
@@ -271,7 +275,9 @@ class InMemManager {
|
|
|
271
275
|
if (this.useObjectMetadata && items.length > 0) {
|
|
272
276
|
const config = this.collections.get(collection);
|
|
273
277
|
if (config?.hasMetadata) {
|
|
274
|
-
const ids =
|
|
278
|
+
const ids = [];
|
|
279
|
+
for (const item of items)
|
|
280
|
+
ids.push(item._id);
|
|
275
281
|
let metadatas;
|
|
276
282
|
if (config.onObjectsUpdated) {
|
|
277
283
|
try {
|
|
@@ -1722,11 +1728,12 @@ function savePendingWrite(tenant, collection, id, delta) {
|
|
|
1722
1728
|
let pending;
|
|
1723
1729
|
if (existingRaw) {
|
|
1724
1730
|
const existing = dist_default.parse(existingRaw);
|
|
1731
|
+
Object.assign(existing.data, delta);
|
|
1725
1732
|
pending = {
|
|
1726
1733
|
tenant,
|
|
1727
1734
|
collection,
|
|
1728
1735
|
id: String(id),
|
|
1729
|
-
data:
|
|
1736
|
+
data: existing.data,
|
|
1730
1737
|
timestamp: now
|
|
1731
1738
|
};
|
|
1732
1739
|
} else {
|
|
@@ -1808,7 +1815,7 @@ class PendingChangesManager {
|
|
|
1808
1815
|
}
|
|
1809
1816
|
const deltaWithId = existing ? data : { _id: id, ...data };
|
|
1810
1817
|
savePendingWrite(this.tenant, collection, id, deltaWithId);
|
|
1811
|
-
const fullData = existing ?
|
|
1818
|
+
const fullData = existing ? Object.assign(existing.data, data) : { _id: id, ...data };
|
|
1812
1819
|
const timer = setTimeout(() => {
|
|
1813
1820
|
this.executePendingChange(key);
|
|
1814
1821
|
}, this.debounceDexieWritesMs);
|
|
@@ -1879,7 +1886,8 @@ class PendingChangesManager {
|
|
|
1879
1886
|
baseMeta: { _ts: existing?._ts, _rev: existing?._rev }
|
|
1880
1887
|
});
|
|
1881
1888
|
if (existing) {
|
|
1882
|
-
|
|
1889
|
+
Object.assign(existing, write.data);
|
|
1890
|
+
saveBatch.push(existing);
|
|
1883
1891
|
} else {
|
|
1884
1892
|
insertBatch.push(write.data);
|
|
1885
1893
|
}
|
|
@@ -1887,7 +1895,7 @@ class PendingChangesManager {
|
|
|
1887
1895
|
if (dirtyChangesBatch.length > 0) {
|
|
1888
1896
|
await this.deps.dexieDb.addDirtyChangesBatch(collection, dirtyChangesBatch);
|
|
1889
1897
|
}
|
|
1890
|
-
const allToSave =
|
|
1898
|
+
const allToSave = saveBatch.concat(insertBatch);
|
|
1891
1899
|
if (allToSave.length > 0) {
|
|
1892
1900
|
await this.deps.dexieDb.saveMany(collection, allToSave);
|
|
1893
1901
|
}
|
|
@@ -2063,20 +2071,26 @@ function mergeObjects(local, external) {
|
|
|
2063
2071
|
}
|
|
2064
2072
|
function mergeArrays(local, external) {
|
|
2065
2073
|
if (local.length === 0) {
|
|
2066
|
-
return
|
|
2074
|
+
return external.slice();
|
|
2067
2075
|
}
|
|
2068
2076
|
const firstLocal = local[0];
|
|
2069
2077
|
const firstExternal = external[0];
|
|
2070
2078
|
if (typeof firstLocal === "string" || typeof firstExternal === "string") {
|
|
2071
|
-
|
|
2079
|
+
const set2 = new Set(local);
|
|
2080
|
+
for (const item of external)
|
|
2081
|
+
set2.add(item);
|
|
2082
|
+
return Array.from(set2);
|
|
2072
2083
|
}
|
|
2073
2084
|
if (isPlainObject3(firstLocal) || isPlainObject3(firstExternal)) {
|
|
2074
2085
|
return mergeObjectArrays(local, external);
|
|
2075
2086
|
}
|
|
2076
|
-
|
|
2087
|
+
const set = new Set(local);
|
|
2088
|
+
for (const item of external)
|
|
2089
|
+
set.add(item);
|
|
2090
|
+
return Array.from(set);
|
|
2077
2091
|
}
|
|
2078
2092
|
function mergeObjectArrays(local, external) {
|
|
2079
|
-
const result =
|
|
2093
|
+
const result = local.slice();
|
|
2080
2094
|
const localIds = new Map;
|
|
2081
2095
|
for (let i = 0;i < local.length; i++) {
|
|
2082
2096
|
const item = local[i];
|
|
@@ -2155,33 +2169,45 @@ class SyncEngine {
|
|
|
2155
2169
|
}
|
|
2156
2170
|
this.callOnFindNewerManyCall(syncSpecs, calledFrom);
|
|
2157
2171
|
const findNewerManyStartTime = Date.now();
|
|
2158
|
-
|
|
2172
|
+
const collectionState = new Map;
|
|
2173
|
+
for (const [name] of configMap) {
|
|
2174
|
+
collectionState.set(name, {
|
|
2175
|
+
maxTs: undefined,
|
|
2176
|
+
conflicts: 0,
|
|
2177
|
+
receivedCount: 0
|
|
2178
|
+
});
|
|
2179
|
+
}
|
|
2159
2180
|
try {
|
|
2160
|
-
|
|
2161
|
-
|
|
2181
|
+
await this.deps.withSyncTimeout(this.restInterface.findNewerManyStream(syncSpecs, async (collection, items) => {
|
|
2182
|
+
const config = configMap.get(collection);
|
|
2183
|
+
if (!config)
|
|
2184
|
+
return;
|
|
2185
|
+
const state = collectionState.get(collection);
|
|
2186
|
+
state.receivedCount += items.length;
|
|
2187
|
+
const stats = await this.processIncomingServerData(collection, config, items);
|
|
2188
|
+
state.conflicts += stats.conflictsResolved;
|
|
2189
|
+
if (stats.maxTs) {
|
|
2190
|
+
if (!state.maxTs || this.compareTimestamps(stats.maxTs, state.maxTs) > 0) {
|
|
2191
|
+
state.maxTs = stats.maxTs;
|
|
2192
|
+
}
|
|
2193
|
+
}
|
|
2194
|
+
if (stats.updatedIds.length > 0) {
|
|
2195
|
+
this.deps.broadcastUpdates({ [collection]: stats.updatedIds });
|
|
2196
|
+
}
|
|
2197
|
+
}), "findNewerManyStream");
|
|
2198
|
+
for (const [name, state] of collectionState) {
|
|
2199
|
+
receivedCount += state.receivedCount;
|
|
2200
|
+
conflictsResolved += state.conflicts;
|
|
2201
|
+
collectionStats[name] = {
|
|
2202
|
+
receivedCount: state.receivedCount,
|
|
2203
|
+
sentCount: 0
|
|
2204
|
+
};
|
|
2205
|
+
}
|
|
2206
|
+
this.callOnFindNewerManyResult(syncSpecs, {}, findNewerManyStartTime, true, calledFrom);
|
|
2162
2207
|
} catch (err) {
|
|
2163
2208
|
this.callOnFindNewerManyResult(syncSpecs, {}, findNewerManyStartTime, false, calledFrom, err);
|
|
2164
2209
|
throw err;
|
|
2165
2210
|
}
|
|
2166
|
-
const allUpdatedIds = {};
|
|
2167
|
-
for (const [collectionName, config] of configMap) {
|
|
2168
|
-
const serverData = allServerData[collectionName] || [];
|
|
2169
|
-
delete allServerData[collectionName];
|
|
2170
|
-
receivedCount += serverData.length;
|
|
2171
|
-
collectionStats[collectionName] = {
|
|
2172
|
-
receivedCount: serverData.length,
|
|
2173
|
-
sentCount: 0,
|
|
2174
|
-
receivedItems: []
|
|
2175
|
-
};
|
|
2176
|
-
const stats = await this.processIncomingServerData(collectionName, config, serverData);
|
|
2177
|
-
conflictsResolved += stats.conflictsResolved;
|
|
2178
|
-
if (stats.updatedIds.length > 0) {
|
|
2179
|
-
allUpdatedIds[collectionName] = stats.updatedIds;
|
|
2180
|
-
}
|
|
2181
|
-
}
|
|
2182
|
-
if (Object.keys(allUpdatedIds).length > 0) {
|
|
2183
|
-
this.deps.broadcastUpdates(allUpdatedIds);
|
|
2184
|
-
}
|
|
2185
2211
|
const uploadStats = await this.uploadDirtyItems(calledFrom);
|
|
2186
2212
|
sentCount = uploadStats.sentCount;
|
|
2187
2213
|
for (const [collectionName, stats] of Object.entries(uploadStats.collectionSentCounts || {})) {
|
|
@@ -2190,8 +2216,7 @@ class SyncEngine {
|
|
|
2190
2216
|
} else {
|
|
2191
2217
|
collectionStats[collectionName] = {
|
|
2192
2218
|
receivedCount: 0,
|
|
2193
|
-
sentCount: stats
|
|
2194
|
-
receivedItems: []
|
|
2219
|
+
sentCount: stats
|
|
2195
2220
|
};
|
|
2196
2221
|
}
|
|
2197
2222
|
}
|
|
@@ -2278,18 +2303,22 @@ class SyncEngine {
|
|
|
2278
2303
|
const collectionSentCounts = {};
|
|
2279
2304
|
for (const result of results) {
|
|
2280
2305
|
const { collection, results: { inserted, updated, deleted, errors } } = result;
|
|
2281
|
-
const allSuccessIds = [
|
|
2282
|
-
|
|
2283
|
-
|
|
2284
|
-
|
|
2285
|
-
|
|
2306
|
+
const allSuccessIds = [];
|
|
2307
|
+
for (const e of inserted)
|
|
2308
|
+
allSuccessIds.push(e._id);
|
|
2309
|
+
for (const e of updated)
|
|
2310
|
+
allSuccessIds.push(e._id);
|
|
2311
|
+
for (const e of deleted)
|
|
2312
|
+
allSuccessIds.push(e._id);
|
|
2286
2313
|
if (allSuccessIds.length > 0) {
|
|
2287
2314
|
await this.dexieDb.clearDirtyChangesBatch(collection, allSuccessIds);
|
|
2288
2315
|
}
|
|
2289
2316
|
let collectionSentCount = 0;
|
|
2290
|
-
const insertedAndUpdated =
|
|
2317
|
+
const insertedAndUpdated = inserted.concat(updated);
|
|
2291
2318
|
if (insertedAndUpdated.length > 0) {
|
|
2292
|
-
const idsToCheck =
|
|
2319
|
+
const idsToCheck = [];
|
|
2320
|
+
for (const e of insertedAndUpdated)
|
|
2321
|
+
idsToCheck.push(e._id);
|
|
2293
2322
|
const dexieItems = await this.dexieDb.getByIds(collection, idsToCheck);
|
|
2294
2323
|
const dexieSaveBatch = [];
|
|
2295
2324
|
const inMemUpdateBatch = [];
|
|
@@ -2297,11 +2326,9 @@ class SyncEngine {
|
|
|
2297
2326
|
const entity = insertedAndUpdated[i];
|
|
2298
2327
|
const dexieItem = dexieItems[i];
|
|
2299
2328
|
if (dexieItem) {
|
|
2300
|
-
|
|
2301
|
-
|
|
2302
|
-
|
|
2303
|
-
_ts: entity._ts
|
|
2304
|
-
});
|
|
2329
|
+
dexieItem._rev = entity._rev;
|
|
2330
|
+
dexieItem._ts = entity._ts;
|
|
2331
|
+
dexieSaveBatch.push(dexieItem);
|
|
2305
2332
|
if (!dexieItem._deleted) {
|
|
2306
2333
|
const inMemItem = this.deps.getInMemById(collection, entity._id);
|
|
2307
2334
|
if (inMemItem) {
|
|
@@ -2324,21 +2351,27 @@ class SyncEngine {
|
|
|
2324
2351
|
collectionSentCount += insertedAndUpdated.length;
|
|
2325
2352
|
}
|
|
2326
2353
|
if (deleted.length > 0) {
|
|
2327
|
-
const deleteIds =
|
|
2354
|
+
const deleteIds = [];
|
|
2355
|
+
const deleteDbEntities = [];
|
|
2356
|
+
for (const e of deleted) {
|
|
2357
|
+
deleteIds.push(e._id);
|
|
2358
|
+
deleteDbEntities.push({ _id: e._id });
|
|
2359
|
+
}
|
|
2328
2360
|
await this.dexieDb.deleteMany(collection, deleteIds);
|
|
2329
|
-
this.deps.writeToInMemBatch(collection,
|
|
2361
|
+
this.deps.writeToInMemBatch(collection, deleteDbEntities, "delete");
|
|
2330
2362
|
sentCount += deleted.length;
|
|
2331
2363
|
collectionSentCount += deleted.length;
|
|
2332
2364
|
}
|
|
2333
2365
|
if (collectionSentCount > 0) {
|
|
2334
2366
|
collectionSentCounts[collection] = collectionSentCount;
|
|
2335
2367
|
}
|
|
2336
|
-
const allItems = [...inserted, ...updated, ...deleted];
|
|
2337
2368
|
let maxTs = undefined;
|
|
2338
|
-
for (const
|
|
2339
|
-
|
|
2340
|
-
if (
|
|
2341
|
-
maxTs
|
|
2369
|
+
for (const arr of [inserted, updated, deleted]) {
|
|
2370
|
+
for (const item of arr) {
|
|
2371
|
+
if (item._ts) {
|
|
2372
|
+
if (!maxTs || this.compareTimestamps(item._ts, maxTs) > 0) {
|
|
2373
|
+
maxTs = item._ts;
|
|
2374
|
+
}
|
|
2342
2375
|
}
|
|
2343
2376
|
}
|
|
2344
2377
|
}
|
|
@@ -2399,11 +2432,13 @@ class SyncEngine {
|
|
|
2399
2432
|
let sentCount = 0;
|
|
2400
2433
|
for (const result of results) {
|
|
2401
2434
|
const { results: { inserted, updated, deleted } } = result;
|
|
2402
|
-
const allSuccessIds = [
|
|
2403
|
-
|
|
2404
|
-
|
|
2405
|
-
|
|
2406
|
-
|
|
2435
|
+
const allSuccessIds = [];
|
|
2436
|
+
for (const e of inserted)
|
|
2437
|
+
allSuccessIds.push(e._id);
|
|
2438
|
+
for (const e of updated)
|
|
2439
|
+
allSuccessIds.push(e._id);
|
|
2440
|
+
for (const e of deleted)
|
|
2441
|
+
allSuccessIds.push(e._id);
|
|
2407
2442
|
if (allSuccessIds.length > 0) {
|
|
2408
2443
|
await this.dexieDb.clearDirtyChangesBatch(collection, allSuccessIds);
|
|
2409
2444
|
}
|
|
@@ -2432,7 +2467,9 @@ class SyncEngine {
|
|
|
2432
2467
|
const BATCH = SyncEngine.SYNC_BATCH_SIZE;
|
|
2433
2468
|
for (let offset = 0;offset < serverData.length; offset += BATCH) {
|
|
2434
2469
|
const chunk = serverData.slice(offset, offset + BATCH);
|
|
2435
|
-
const chunkIds =
|
|
2470
|
+
const chunkIds = [];
|
|
2471
|
+
for (const item of chunk)
|
|
2472
|
+
chunkIds.push(item._id);
|
|
2436
2473
|
const localItems = await this.dexieDb.getByIds(collectionName, chunkIds);
|
|
2437
2474
|
const dirtyChangesMap = await this.dexieDb.getDirtyChangesBatch(collectionName, chunkIds);
|
|
2438
2475
|
const dexieBatch = [];
|
|
@@ -2442,6 +2479,7 @@ class SyncEngine {
|
|
|
2442
2479
|
const serverItem = chunk[i];
|
|
2443
2480
|
const localItem = localItems[i];
|
|
2444
2481
|
const dirtyChange = dirtyChangesMap.get(String(serverItem._id));
|
|
2482
|
+
allUpdatedIds.push(String(serverItem._id));
|
|
2445
2483
|
if (serverItem._ts) {
|
|
2446
2484
|
if (!maxTs || this.compareTimestamps(serverItem._ts, maxTs) > 0) {
|
|
2447
2485
|
maxTs = serverItem._ts;
|
|
@@ -2481,9 +2519,6 @@ class SyncEngine {
|
|
|
2481
2519
|
if (inMemDeleteIds.length > 0) {
|
|
2482
2520
|
this.deps.writeToInMemBatch(collectionName, inMemDeleteIds.map((id) => ({ _id: id })), "delete");
|
|
2483
2521
|
}
|
|
2484
|
-
for (const id of chunkIds) {
|
|
2485
|
-
allUpdatedIds.push(String(id));
|
|
2486
|
-
}
|
|
2487
2522
|
}
|
|
2488
2523
|
if (maxTs) {
|
|
2489
2524
|
await this.dexieDb.setSyncMeta(collectionName, maxTs);
|
|
@@ -2740,9 +2775,9 @@ class ServerUpdateHandler {
|
|
|
2740
2775
|
if (pendingChange) {
|
|
2741
2776
|
const newFields = this.getNewFieldsFromServer(localItem, serverDelta);
|
|
2742
2777
|
if (Object.keys(newFields).length > 0) {
|
|
2743
|
-
|
|
2778
|
+
Object.assign(pendingChange.data, newFields);
|
|
2744
2779
|
}
|
|
2745
|
-
const currentInMemState = {
|
|
2780
|
+
const currentInMemState = Object.assign({}, localItem, pendingChange.data);
|
|
2746
2781
|
const merged = this.mergeLocalWithDelta(currentInMemState, serverDelta);
|
|
2747
2782
|
if (!merged._deleted && !merged._archived) {
|
|
2748
2783
|
this.deps.writeToInMemBatch(collection, [this.stripLocalFields(merged)], "upsert");
|
|
@@ -2801,13 +2836,12 @@ class ServerUpdateHandler {
|
|
|
2801
2836
|
return false;
|
|
2802
2837
|
}
|
|
2803
2838
|
mergeLocalWithDelta(local, delta) {
|
|
2804
|
-
const result = { ...local };
|
|
2805
2839
|
for (const key of Object.keys(delta)) {
|
|
2806
2840
|
if (key === "_id" || key === "_dirty")
|
|
2807
2841
|
continue;
|
|
2808
|
-
|
|
2842
|
+
local[key] = delta[key];
|
|
2809
2843
|
}
|
|
2810
|
-
return
|
|
2844
|
+
return local;
|
|
2811
2845
|
}
|
|
2812
2846
|
getNewFieldsFromServer(local, server) {
|
|
2813
2847
|
const newFields = {};
|
|
@@ -3259,16 +3293,20 @@ class SyncedDb {
|
|
|
3259
3293
|
}
|
|
3260
3294
|
await this.pendingChanges.recoverPendingWrites();
|
|
3261
3295
|
for (const [name] of this.collections) {
|
|
3262
|
-
|
|
3263
|
-
|
|
3264
|
-
|
|
3265
|
-
|
|
3266
|
-
|
|
3267
|
-
|
|
3296
|
+
this.inMemManager.clearCollection(name);
|
|
3297
|
+
await this.dexieDb.forEachBatch(name, 2000, async (chunk) => {
|
|
3298
|
+
let writeIdx = 0;
|
|
3299
|
+
for (let i = 0;i < chunk.length; i++) {
|
|
3300
|
+
const item = chunk[i];
|
|
3301
|
+
if (!item._deleted && !item._archived) {
|
|
3302
|
+
chunk[writeIdx++] = item;
|
|
3303
|
+
}
|
|
3268
3304
|
}
|
|
3269
|
-
|
|
3270
|
-
|
|
3271
|
-
|
|
3305
|
+
chunk.length = writeIdx;
|
|
3306
|
+
if (chunk.length > 0) {
|
|
3307
|
+
this.inMemManager.writeBatch(name, chunk, "upsert");
|
|
3308
|
+
}
|
|
3309
|
+
});
|
|
3272
3310
|
const meta = await this.dexieDb.getSyncMeta(name);
|
|
3273
3311
|
if (meta) {
|
|
3274
3312
|
this.syncMetaCache.set(name, meta);
|
|
@@ -3430,26 +3468,48 @@ class SyncedDb {
|
|
|
3430
3468
|
await this.syncCollectionForFind(collection, query, opts);
|
|
3431
3469
|
}
|
|
3432
3470
|
const all = await this.dexieDb.getAll(collection);
|
|
3433
|
-
const
|
|
3434
|
-
|
|
3435
|
-
|
|
3436
|
-
|
|
3437
|
-
|
|
3438
|
-
|
|
3439
|
-
|
|
3440
|
-
|
|
3441
|
-
|
|
3442
|
-
|
|
3471
|
+
const returnDeleted = opts?.returnDeleted;
|
|
3472
|
+
const returnArchived = opts?.returnArchived;
|
|
3473
|
+
const hasSort = opts?.sort && Object.keys(opts.sort).length > 0;
|
|
3474
|
+
if (hasSort) {
|
|
3475
|
+
const filtered = [];
|
|
3476
|
+
for (const item of all) {
|
|
3477
|
+
if (!returnDeleted && item._deleted)
|
|
3478
|
+
continue;
|
|
3479
|
+
if (!returnArchived && item._archived)
|
|
3480
|
+
continue;
|
|
3481
|
+
if (!matchesQuery(item, query))
|
|
3482
|
+
continue;
|
|
3483
|
+
filtered.push(item);
|
|
3484
|
+
}
|
|
3485
|
+
if (filtered.length === 0) {
|
|
3486
|
+
if (opts?.referToServer && this.isOnline())
|
|
3487
|
+
this.referToServerSync(collection, query);
|
|
3488
|
+
return null;
|
|
3489
|
+
}
|
|
3490
|
+
const sorted = applyQueryOpts(filtered, { sort: opts.sort, project: opts?.project });
|
|
3491
|
+
if (opts?.referToServer && this.isOnline())
|
|
3443
3492
|
this.referToServerSync(collection, query);
|
|
3493
|
+
return sorted[0] ?? null;
|
|
3494
|
+
} else {
|
|
3495
|
+
let result = null;
|
|
3496
|
+
for (const item of all) {
|
|
3497
|
+
if (!returnDeleted && item._deleted)
|
|
3498
|
+
continue;
|
|
3499
|
+
if (!returnArchived && item._archived)
|
|
3500
|
+
continue;
|
|
3501
|
+
if (!matchesQuery(item, query))
|
|
3502
|
+
continue;
|
|
3503
|
+
result = item;
|
|
3504
|
+
break;
|
|
3444
3505
|
}
|
|
3445
|
-
|
|
3446
|
-
|
|
3447
|
-
|
|
3448
|
-
|
|
3449
|
-
|
|
3450
|
-
|
|
3506
|
+
if (result && opts?.project) {
|
|
3507
|
+
result = projectItem(result, opts.project);
|
|
3508
|
+
}
|
|
3509
|
+
if (opts?.referToServer && this.isOnline())
|
|
3510
|
+
this.referToServerSync(collection, query);
|
|
3511
|
+
return result;
|
|
3451
3512
|
}
|
|
3452
|
-
return result;
|
|
3453
3513
|
}
|
|
3454
3514
|
async find(collection, query, opts) {
|
|
3455
3515
|
this.assertCollection(collection);
|
|
@@ -3458,14 +3518,19 @@ class SyncedDb {
|
|
|
3458
3518
|
await this.syncCollectionForFind(collection, query, opts);
|
|
3459
3519
|
}
|
|
3460
3520
|
const all = await this.dexieDb.getAll(collection);
|
|
3461
|
-
const
|
|
3462
|
-
|
|
3463
|
-
|
|
3464
|
-
|
|
3465
|
-
|
|
3466
|
-
|
|
3467
|
-
|
|
3468
|
-
|
|
3521
|
+
const returnDeleted = opts?.returnDeleted;
|
|
3522
|
+
const returnArchived = opts?.returnArchived;
|
|
3523
|
+
const hasQuery = query && Object.keys(query).length > 0;
|
|
3524
|
+
const filtered = [];
|
|
3525
|
+
for (const item of all) {
|
|
3526
|
+
if (!returnDeleted && item._deleted)
|
|
3527
|
+
continue;
|
|
3528
|
+
if (!returnArchived && item._archived)
|
|
3529
|
+
continue;
|
|
3530
|
+
if (hasQuery && !matchesQuery(item, query))
|
|
3531
|
+
continue;
|
|
3532
|
+
filtered.push(item);
|
|
3533
|
+
}
|
|
3469
3534
|
const result = applyQueryOpts(filtered, opts);
|
|
3470
3535
|
if (opts?.referToServer && this.isOnline()) {
|
|
3471
3536
|
this.referToServerSync(collection, query);
|
|
@@ -3938,7 +4003,9 @@ class DexieDb extends Dexie {
|
|
|
3938
4003
|
if (ids.length === 0)
|
|
3939
4004
|
return;
|
|
3940
4005
|
const table = this.getTable(collection);
|
|
3941
|
-
const keys =
|
|
4006
|
+
const keys = [];
|
|
4007
|
+
for (const id of ids)
|
|
4008
|
+
keys.push(this.idToString(id));
|
|
3942
4009
|
await table.bulkDelete(keys);
|
|
3943
4010
|
}
|
|
3944
4011
|
async saveCollection(collection, data) {
|
|
@@ -3961,36 +4028,50 @@ class DexieDb extends Dexie {
|
|
|
3961
4028
|
if (ids.length === 0)
|
|
3962
4029
|
return [];
|
|
3963
4030
|
const table = this.getTable(collection);
|
|
3964
|
-
const keys =
|
|
4031
|
+
const keys = [];
|
|
4032
|
+
for (const id of ids)
|
|
4033
|
+
keys.push(this.idToString(id));
|
|
3965
4034
|
return await table.bulkGet(keys);
|
|
3966
4035
|
}
|
|
3967
4036
|
async getAll(collection) {
|
|
3968
4037
|
const table = this.getTable(collection);
|
|
3969
4038
|
return await table.toArray();
|
|
3970
4039
|
}
|
|
4040
|
+
async forEachBatch(collection, batchSize, callback) {
|
|
4041
|
+
const table = this.getTable(collection);
|
|
4042
|
+
let offset = 0;
|
|
4043
|
+
while (true) {
|
|
4044
|
+
const items = await table.offset(offset).limit(batchSize).toArray();
|
|
4045
|
+
if (items.length === 0)
|
|
4046
|
+
break;
|
|
4047
|
+
await callback(items);
|
|
4048
|
+
if (items.length < batchSize)
|
|
4049
|
+
break;
|
|
4050
|
+
offset += items.length;
|
|
4051
|
+
}
|
|
4052
|
+
}
|
|
3971
4053
|
async count(collection) {
|
|
3972
4054
|
const table = this.getTable(collection);
|
|
3973
4055
|
return await table.count();
|
|
3974
4056
|
}
|
|
3975
4057
|
async getDirty(collection) {
|
|
3976
4058
|
const dirtyEntries = await this.dirtyChanges.where("[collection+id]").between([collection, Dexie.minKey], [collection, Dexie.maxKey]).toArray();
|
|
3977
|
-
|
|
3978
|
-
|
|
3979
|
-
|
|
3980
|
-
|
|
3981
|
-
|
|
3982
|
-
}
|
|
4059
|
+
const result = [];
|
|
4060
|
+
for (const entry of dirtyEntries) {
|
|
4061
|
+
const obj = { _id: entry.id, _ts: entry.baseTs, _rev: entry.baseRev };
|
|
4062
|
+
Object.assign(obj, entry.changes);
|
|
4063
|
+
result.push(obj);
|
|
4064
|
+
}
|
|
4065
|
+
return result;
|
|
3983
4066
|
}
|
|
3984
4067
|
async addDirtyChange(collection, id, changes, baseMeta) {
|
|
3985
4068
|
const stringId = this.idToString(id);
|
|
3986
4069
|
const existing = await this.dirtyChanges.get([collection, stringId]);
|
|
3987
4070
|
const now = Date.now();
|
|
3988
4071
|
if (existing) {
|
|
3989
|
-
|
|
3990
|
-
|
|
3991
|
-
|
|
3992
|
-
updatedAt: now
|
|
3993
|
-
});
|
|
4072
|
+
Object.assign(existing.changes, changes);
|
|
4073
|
+
existing.updatedAt = now;
|
|
4074
|
+
await this.dirtyChanges.put(existing);
|
|
3994
4075
|
} else {
|
|
3995
4076
|
await this.dirtyChanges.put({
|
|
3996
4077
|
collection,
|
|
@@ -4007,7 +4088,9 @@ class DexieDb extends Dexie {
|
|
|
4007
4088
|
if (changesList.length === 0)
|
|
4008
4089
|
return;
|
|
4009
4090
|
const now = Date.now();
|
|
4010
|
-
const keys =
|
|
4091
|
+
const keys = [];
|
|
4092
|
+
for (const c of changesList)
|
|
4093
|
+
keys.push([collection, this.idToString(c.id)]);
|
|
4011
4094
|
const existingEntries = await this.dirtyChanges.bulkGet(keys);
|
|
4012
4095
|
const toWrite = [];
|
|
4013
4096
|
for (let i = 0;i < changesList.length; i++) {
|
|
@@ -4015,11 +4098,9 @@ class DexieDb extends Dexie {
|
|
|
4015
4098
|
const stringId = this.idToString(changeItem.id);
|
|
4016
4099
|
const existing = existingEntries[i];
|
|
4017
4100
|
if (existing) {
|
|
4018
|
-
|
|
4019
|
-
|
|
4020
|
-
|
|
4021
|
-
updatedAt: now
|
|
4022
|
-
});
|
|
4101
|
+
Object.assign(existing.changes, changeItem.changes);
|
|
4102
|
+
existing.updatedAt = now;
|
|
4103
|
+
toWrite.push(existing);
|
|
4023
4104
|
} else {
|
|
4024
4105
|
toWrite.push({
|
|
4025
4106
|
collection,
|
|
@@ -4042,7 +4123,9 @@ class DexieDb extends Dexie {
|
|
|
4042
4123
|
const result = new Map;
|
|
4043
4124
|
if (ids.length === 0)
|
|
4044
4125
|
return result;
|
|
4045
|
-
const keys =
|
|
4126
|
+
const keys = [];
|
|
4127
|
+
for (const id of ids)
|
|
4128
|
+
keys.push([collection, this.idToString(id)]);
|
|
4046
4129
|
const entries = await this.dirtyChanges.bulkGet(keys);
|
|
4047
4130
|
for (let i = 0;i < ids.length; i++) {
|
|
4048
4131
|
const entry = entries[i];
|
|
@@ -4059,7 +4142,9 @@ class DexieDb extends Dexie {
|
|
|
4059
4142
|
async clearDirtyChangesBatch(collection, ids) {
|
|
4060
4143
|
if (ids.length === 0)
|
|
4061
4144
|
return;
|
|
4062
|
-
const keys =
|
|
4145
|
+
const keys = [];
|
|
4146
|
+
for (const id of ids)
|
|
4147
|
+
keys.push([collection, this.idToString(id)]);
|
|
4063
4148
|
await this.dirtyChanges.bulkDelete(keys);
|
|
4064
4149
|
}
|
|
4065
4150
|
async clearDirtyChanges(collection) {
|
|
@@ -6227,6 +6312,47 @@ var unpack2 = (x) => unpackr.unpack(x);
|
|
|
6227
6312
|
var DEFAULT_TIMEOUT = 5000;
|
|
6228
6313
|
var DEFAULT_PROGRESS_CHUNK_SIZE = 16 * 1024;
|
|
6229
6314
|
|
|
6315
|
+
class StreamBuffer {
|
|
6316
|
+
buf;
|
|
6317
|
+
rPos = 0;
|
|
6318
|
+
wPos = 0;
|
|
6319
|
+
constructor(initialCapacity = 64 * 1024) {
|
|
6320
|
+
this.buf = new Uint8Array(initialCapacity);
|
|
6321
|
+
}
|
|
6322
|
+
get length() {
|
|
6323
|
+
return this.wPos - this.rPos;
|
|
6324
|
+
}
|
|
6325
|
+
append(data) {
|
|
6326
|
+
if (this.rPos > 0 && this.wPos + data.length > this.buf.length) {
|
|
6327
|
+
this.buf.copyWithin(0, this.rPos, this.wPos);
|
|
6328
|
+
this.wPos -= this.rPos;
|
|
6329
|
+
this.rPos = 0;
|
|
6330
|
+
}
|
|
6331
|
+
if (this.wPos + data.length > this.buf.length) {
|
|
6332
|
+
const newSize = Math.max(this.buf.length * 2, this.wPos + data.length);
|
|
6333
|
+
const newBuf = new Uint8Array(newSize);
|
|
6334
|
+
newBuf.set(this.buf.subarray(0, this.wPos));
|
|
6335
|
+
this.buf = newBuf;
|
|
6336
|
+
}
|
|
6337
|
+
this.buf.set(data, this.wPos);
|
|
6338
|
+
this.wPos += data.length;
|
|
6339
|
+
}
|
|
6340
|
+
subarray(start, end) {
|
|
6341
|
+
return this.buf.subarray(this.rPos + start, this.rPos + end);
|
|
6342
|
+
}
|
|
6343
|
+
at(offset) {
|
|
6344
|
+
return this.buf[this.rPos + offset];
|
|
6345
|
+
}
|
|
6346
|
+
consume(n) {
|
|
6347
|
+
this.rPos += n;
|
|
6348
|
+
if (this.rPos > this.buf.length >>> 1) {
|
|
6349
|
+
this.buf.copyWithin(0, this.rPos, this.wPos);
|
|
6350
|
+
this.wPos -= this.rPos;
|
|
6351
|
+
this.rPos = 0;
|
|
6352
|
+
}
|
|
6353
|
+
}
|
|
6354
|
+
}
|
|
6355
|
+
|
|
6230
6356
|
class RestProxy {
|
|
6231
6357
|
endpoint;
|
|
6232
6358
|
tenant;
|
|
@@ -6386,6 +6512,129 @@ class RestProxy {
|
|
|
6386
6512
|
async findNewerMany(spec) {
|
|
6387
6513
|
return await this.restCall("findNewerMany", { spec });
|
|
6388
6514
|
}
|
|
6515
|
+
async findNewerManyStream(spec, onChunk, options) {
|
|
6516
|
+
const connectTimeout = options?.timeoutMs ?? this.defaultTimeoutMs;
|
|
6517
|
+
const activityTimeout = options?.activityTimeoutMs ?? 30000;
|
|
6518
|
+
const externalSignal = options?.signal ?? this.globalSignal;
|
|
6519
|
+
const startTime = this.timeRequests ? performance.now() : 0;
|
|
6520
|
+
const data = {
|
|
6521
|
+
payload: {
|
|
6522
|
+
db: this.tenant,
|
|
6523
|
+
operation: "findNewerMany",
|
|
6524
|
+
spec
|
|
6525
|
+
},
|
|
6526
|
+
audit: {
|
|
6527
|
+
tenant: this.tenant,
|
|
6528
|
+
user: this.audit.user,
|
|
6529
|
+
naprava: this.audit.device
|
|
6530
|
+
}
|
|
6531
|
+
};
|
|
6532
|
+
const body = pack2(data);
|
|
6533
|
+
const requestUrl = this.apiKey ? `${this.endpoint}?apikey=${this.apiKey}&stream=1` : `${this.endpoint}?stream=1`;
|
|
6534
|
+
const controller = new AbortController;
|
|
6535
|
+
let timeoutId = setTimeout(() => controller.abort(), connectTimeout);
|
|
6536
|
+
const combinedSignal = externalSignal ? this.combineSignals(externalSignal, controller.signal) : controller.signal;
|
|
6537
|
+
try {
|
|
6538
|
+
const response = await fetch(requestUrl, {
|
|
6539
|
+
method: "POST",
|
|
6540
|
+
headers: { "Content-Type": "application/octet-stream" },
|
|
6541
|
+
body,
|
|
6542
|
+
signal: combinedSignal
|
|
6543
|
+
});
|
|
6544
|
+
clearTimeout(timeoutId);
|
|
6545
|
+
timeoutId = undefined;
|
|
6546
|
+
if (!response.ok) {
|
|
6547
|
+
const errorText = await response.text();
|
|
6548
|
+
throw new Error(`REST call failed: ${response.status} - ${errorText}`);
|
|
6549
|
+
}
|
|
6550
|
+
const resetActivity = () => {
|
|
6551
|
+
if (timeoutId !== undefined)
|
|
6552
|
+
clearTimeout(timeoutId);
|
|
6553
|
+
timeoutId = setTimeout(() => controller.abort(), activityTimeout);
|
|
6554
|
+
};
|
|
6555
|
+
resetActivity();
|
|
6556
|
+
await this.parseStreamingResponse(response, onChunk, resetActivity);
|
|
6557
|
+
if (timeoutId !== undefined)
|
|
6558
|
+
clearTimeout(timeoutId);
|
|
6559
|
+
timeoutId = undefined;
|
|
6560
|
+
if (this.timeRequests) {
|
|
6561
|
+
const elapsed = performance.now() - startTime;
|
|
6562
|
+
this._lastRequestMs = elapsed;
|
|
6563
|
+
this._totalRequestMs += elapsed;
|
|
6564
|
+
this._requestCount++;
|
|
6565
|
+
if (this.timeRequestsPrint) {
|
|
6566
|
+
console.log(`[RestProxy] findNewerManyStream: ${elapsed.toFixed(2)}ms (total: ${this._totalRequestMs.toFixed(2)}ms, count: ${this._requestCount})`);
|
|
6567
|
+
}
|
|
6568
|
+
}
|
|
6569
|
+
} catch (err) {
|
|
6570
|
+
if (timeoutId !== undefined)
|
|
6571
|
+
clearTimeout(timeoutId);
|
|
6572
|
+
if (err.name === "AbortError") {
|
|
6573
|
+
if (controller.signal.aborted && !externalSignal?.aborted) {
|
|
6574
|
+
throw new Error(`REST call timeout: findNewerManyStream`);
|
|
6575
|
+
}
|
|
6576
|
+
throw new Error("REST call aborted: findNewerManyStream");
|
|
6577
|
+
}
|
|
6578
|
+
throw err;
|
|
6579
|
+
}
|
|
6580
|
+
}
|
|
6581
|
+
async parseStreamingResponse(response, onChunk, onActivity) {
|
|
6582
|
+
const reader = response.body.getReader();
|
|
6583
|
+
const buffer = new StreamBuffer;
|
|
6584
|
+
const decoder2 = new TextDecoder;
|
|
6585
|
+
const readMore = async () => {
|
|
6586
|
+
const { done, value } = await reader.read();
|
|
6587
|
+
if (done)
|
|
6588
|
+
return false;
|
|
6589
|
+
onActivity();
|
|
6590
|
+
buffer.append(value);
|
|
6591
|
+
return true;
|
|
6592
|
+
};
|
|
6593
|
+
while (buffer.length < 1) {
|
|
6594
|
+
if (!await readMore())
|
|
6595
|
+
return;
|
|
6596
|
+
}
|
|
6597
|
+
const firstByte = buffer.at(0);
|
|
6598
|
+
if (firstByte !== 0 && firstByte !== 1) {
|
|
6599
|
+
while (await readMore()) {}
|
|
6600
|
+
const result = unpack2(buffer.subarray(0, buffer.length));
|
|
6601
|
+
for (const [collection, items] of Object.entries(result)) {
|
|
6602
|
+
if (items.length > 0) {
|
|
6603
|
+
await onChunk(collection, items);
|
|
6604
|
+
}
|
|
6605
|
+
}
|
|
6606
|
+
return;
|
|
6607
|
+
}
|
|
6608
|
+
while (true) {
|
|
6609
|
+
while (buffer.length < 1) {
|
|
6610
|
+
if (!await readMore())
|
|
6611
|
+
return;
|
|
6612
|
+
}
|
|
6613
|
+
if (buffer.at(0) === 0)
|
|
6614
|
+
return;
|
|
6615
|
+
while (buffer.length < 3) {
|
|
6616
|
+
if (!await readMore())
|
|
6617
|
+
throw new Error("Unexpected end of stream in chunk header");
|
|
6618
|
+
}
|
|
6619
|
+
const nameLen = buffer.at(1) << 8 | buffer.at(2);
|
|
6620
|
+
const headerSize = 1 + 2 + nameLen + 4;
|
|
6621
|
+
while (buffer.length < headerSize) {
|
|
6622
|
+
if (!await readMore())
|
|
6623
|
+
throw new Error("Unexpected end of stream in chunk header");
|
|
6624
|
+
}
|
|
6625
|
+
const collection = decoder2.decode(buffer.subarray(3, 3 + nameLen));
|
|
6626
|
+
const dataOffset = 3 + nameLen;
|
|
6627
|
+
const dataLen = buffer.at(dataOffset) << 24 | buffer.at(dataOffset + 1) << 16 | buffer.at(dataOffset + 2) << 8 | buffer.at(dataOffset + 3);
|
|
6628
|
+
const totalChunkSize = headerSize + dataLen;
|
|
6629
|
+
while (buffer.length < totalChunkSize) {
|
|
6630
|
+
if (!await readMore())
|
|
6631
|
+
throw new Error("Unexpected end of stream in chunk data");
|
|
6632
|
+
}
|
|
6633
|
+
const items = unpack2(buffer.subarray(headerSize, totalChunkSize));
|
|
6634
|
+
buffer.consume(totalChunkSize);
|
|
6635
|
+
await onChunk(collection, items);
|
|
6636
|
+
}
|
|
6637
|
+
}
|
|
6389
6638
|
async deleteOne(collection, query) {
|
|
6390
6639
|
return await this.restCall("deleteOne", { collection, query });
|
|
6391
6640
|
}
|
package/dist/src/db/DexieDb.d.ts
CHANGED
|
@@ -25,6 +25,7 @@ export declare class DexieDb extends Dexie implements I_DexieDb {
|
|
|
25
25
|
getById<T extends LocalDbEntity>(collection: string, id: Id): Promise<T | undefined>;
|
|
26
26
|
getByIds<T extends LocalDbEntity>(collection: string, ids: Id[]): Promise<(T | undefined)[]>;
|
|
27
27
|
getAll<T extends LocalDbEntity>(collection: string): Promise<T[]>;
|
|
28
|
+
forEachBatch<T extends LocalDbEntity>(collection: string, batchSize: number, callback: (items: T[]) => Promise<void>): Promise<void>;
|
|
28
29
|
count(collection: string): Promise<number>;
|
|
29
30
|
getDirty<T extends LocalDbEntity>(collection: string): Promise<Partial<T>[]>;
|
|
30
31
|
addDirtyChange(collection: string, id: Id, changes: Record<string, any>, baseMeta?: {
|
|
@@ -101,6 +101,28 @@ export declare class RestProxy implements I_RestInterface {
|
|
|
101
101
|
findByIds<T>(collection: string, ids: Id[]): Promise<T[]>;
|
|
102
102
|
findNewer<T>(collection: string, timestamp: Timestamp | number | string | Date, query?: QuerySpec<T>, opts?: QueryOpts): Promise<T[]>;
|
|
103
103
|
findNewerMany<T>(spec?: GetNewerSpec<T>[]): Promise<Record<string, any[]>>;
|
|
104
|
+
/**
|
|
105
|
+
* Streaming variant of findNewerMany.
|
|
106
|
+
* Reads chunked binary response and calls onChunk for each batch.
|
|
107
|
+
* Peak memory = one chunk (~200 docs) instead of entire result set.
|
|
108
|
+
*
|
|
109
|
+
* Binary chunk format:
|
|
110
|
+
* [type:1][nameLen:2][name:N][dataLen:4][msgpack(items[]):M]
|
|
111
|
+
* type=0x01 for data, type=0x00 for end-of-stream.
|
|
112
|
+
*/
|
|
113
|
+
findNewerManyStream<T>(spec: GetNewerSpec<T>[], onChunk: (collection: string, items: T[]) => Promise<void>, options?: {
|
|
114
|
+
timeoutMs?: number;
|
|
115
|
+
signal?: AbortSignal;
|
|
116
|
+
activityTimeoutMs?: number;
|
|
117
|
+
}): Promise<void>;
|
|
118
|
+
/**
|
|
119
|
+
* Parse streaming response. Auto-detects format:
|
|
120
|
+
* - Streaming: first byte is 0x00 (end) or 0x01 (data chunk)
|
|
121
|
+
* - Legacy msgpack: first byte is msgpack type marker (0x80+ for map, etc.)
|
|
122
|
+
*
|
|
123
|
+
* Streaming chunk format: [type:1][nameLen:2][name:N][dataLen:4][msgpack(items[]):M]
|
|
124
|
+
*/
|
|
125
|
+
private parseStreamingResponse;
|
|
104
126
|
deleteOne<T>(collection: string, query: QuerySpec<T>): Promise<T>;
|
|
105
127
|
aggregate<T>(collection: string, pipeline: object[], opts?: AggregateOptions): Promise<T[]>;
|
|
106
128
|
upsertBatch<T>(collection: string, batch: BatchSpec<T>): Promise<T[]>;
|
|
@@ -38,6 +38,7 @@ export declare class ServerUpdateHandler implements I_ServerUpdateHandler {
|
|
|
38
38
|
handleServerItemDelete(collection: string, id: Id): Promise<void>;
|
|
39
39
|
private stripLocalFields;
|
|
40
40
|
private timestampsEqual;
|
|
41
|
+
/** @mutates local — mutira vhodni objekt namesto kopiranja */
|
|
41
42
|
private mergeLocalWithDelta;
|
|
42
43
|
private getNewFieldsFromServer;
|
|
43
44
|
private callOnWsNotification;
|
|
@@ -64,6 +64,8 @@ export interface I_DexieDb {
|
|
|
64
64
|
getByIds<T extends LocalDbEntity>(collection: string, ids: Id[]): Promise<(T | undefined)[]>;
|
|
65
65
|
/** Vrne vse objekte v kolekciji */
|
|
66
66
|
getAll<T extends LocalDbEntity>(collection: string): Promise<T[]>;
|
|
67
|
+
/** Iterira po kolekciji v chunkih brez nalaganja vseh zapisov naenkrat */
|
|
68
|
+
forEachBatch<T extends LocalDbEntity>(collection: string, batchSize: number, callback: (items: T[]) => Promise<void>): Promise<void>;
|
|
67
69
|
/** Vrne vse dirty objekte (z lokalnimi spremembami) - returns only changed fields + _id + metadata */
|
|
68
70
|
getDirty<T extends LocalDbEntity>(collection: string): Promise<Partial<T>[]>;
|
|
69
71
|
/** Add or accumulate changes for a record */
|
|
@@ -67,6 +67,11 @@ export interface I_RestInterface {
|
|
|
67
67
|
findByIds<T>(collection: string, ids: Id[]): Promise<T[]>;
|
|
68
68
|
findNewer<T>(collection: string, timestamp: Timestamp | number | string | Date, query?: QuerySpec<T>, opts?: QueryOpts): Promise<T[]>;
|
|
69
69
|
findNewerMany<T>(spec?: GetNewerSpec<T>[]): Promise<Record<string, any[]>>;
|
|
70
|
+
/** Streaming variant of findNewerMany. Calls onChunk for each batch of items as they arrive. */
|
|
71
|
+
findNewerManyStream<T>(spec: GetNewerSpec<T>[], onChunk: (collection: string, items: T[]) => Promise<void>, options?: {
|
|
72
|
+
timeoutMs?: number;
|
|
73
|
+
signal?: AbortSignal;
|
|
74
|
+
}): Promise<void>;
|
|
70
75
|
deleteOne<T>(collection: string, query: QuerySpec<T>): Promise<T>;
|
|
71
76
|
/** Izvede agregacijo na serverju */
|
|
72
77
|
aggregate<T>(collection: string, pipeline: object[], opts?: AggregateOptions): Promise<T[]>;
|
|
@@ -206,8 +206,6 @@ export interface CollectionSyncStats {
|
|
|
206
206
|
receivedCount: number;
|
|
207
207
|
/** Number of dirty items sent to server for this collection */
|
|
208
208
|
sentCount: number;
|
|
209
|
-
/** @deprecated Use receivedCount instead. Will be empty array in future streaming mode. */
|
|
210
|
-
receivedItems: LocalDbEntity[];
|
|
211
209
|
}
|
|
212
210
|
/**
|
|
213
211
|
* Informacije o sinhronizaciji za debugging/logging
|