@powersync/service-core 0.4.2 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/entry/cli-entry.js +2 -1
- package/dist/entry/cli-entry.js.map +1 -1
- package/dist/entry/commands/compact-action.d.ts +2 -0
- package/dist/entry/commands/compact-action.js +48 -0
- package/dist/entry/commands/compact-action.js.map +1 -0
- package/dist/entry/entry-index.d.ts +1 -0
- package/dist/entry/entry-index.js +1 -0
- package/dist/entry/entry-index.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +31 -1
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoCompactor.d.ts +40 -0
- package/dist/storage/mongo/MongoCompactor.js +292 -0
- package/dist/storage/mongo/MongoCompactor.js.map +1 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +3 -2
- package/dist/storage/mongo/MongoSyncBucketStorage.js +19 -13
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/mongo/models.d.ts +5 -4
- package/dist/storage/mongo/models.js.map +1 -1
- package/dist/storage/mongo/util.d.ts +3 -0
- package/dist/storage/mongo/util.js +22 -0
- package/dist/storage/mongo/util.js.map +1 -1
- package/dist/sync/sync.js +20 -7
- package/dist/sync/sync.js.map +1 -1
- package/package.json +4 -4
- package/src/entry/cli-entry.ts +2 -1
- package/src/entry/commands/compact-action.ts +54 -0
- package/src/entry/entry-index.ts +1 -0
- package/src/storage/BucketStorage.ts +36 -1
- package/src/storage/mongo/MongoCompactor.ts +371 -0
- package/src/storage/mongo/MongoSyncBucketStorage.ts +25 -14
- package/src/storage/mongo/models.ts +5 -4
- package/src/storage/mongo/util.ts +25 -0
- package/src/sync/sync.ts +20 -7
- package/test/src/__snapshots__/sync.test.ts.snap +85 -0
- package/test/src/bucket_validation.test.ts +142 -0
- package/test/src/bucket_validation.ts +116 -0
- package/test/src/compacting.test.ts +207 -0
- package/test/src/data_storage.test.ts +19 -60
- package/test/src/slow_tests.test.ts +144 -102
- package/test/src/sync.test.ts +169 -29
- package/test/src/util.ts +65 -1
- package/test/src/wal_stream_utils.ts +13 -4
- package/tsconfig.tsbuildinfo +1 -1
package/CHANGELOG.md
CHANGED
package/dist/entry/cli-entry.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Command } from 'commander';
|
|
2
2
|
import { registerMigrationAction } from './commands/migrate-action.js';
|
|
3
3
|
import { registerTearDownAction } from './commands/teardown-action.js';
|
|
4
|
-
import { registerStartAction } from './entry-index.js';
|
|
4
|
+
import { registerCompactAction, registerStartAction } from './entry-index.js';
|
|
5
5
|
import { logger } from '@powersync/lib-services-framework';
|
|
6
6
|
/**
|
|
7
7
|
* Generates a Commander program which serves as the entry point
|
|
@@ -14,6 +14,7 @@ export function generateEntryProgram(startHandlers) {
|
|
|
14
14
|
entryProgram.name('powersync-runner').description('CLI to initiate a PowerSync service runner');
|
|
15
15
|
registerTearDownAction(entryProgram);
|
|
16
16
|
registerMigrationAction(entryProgram);
|
|
17
|
+
registerCompactAction(entryProgram);
|
|
17
18
|
if (startHandlers) {
|
|
18
19
|
registerStartAction(entryProgram, startHandlers);
|
|
19
20
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli-entry.js","sourceRoot":"","sources":["../../src/entry/cli-entry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAGpC,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AACvE,OAAO,EAAE,mBAAmB,EAAE,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"cli-entry.js","sourceRoot":"","sources":["../../src/entry/cli-entry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAGpC,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AACvE,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,EAAE,MAAM,kBAAkB,CAAC;AAC9E,OAAO,EAAE,MAAM,EAAE,MAAM,mCAAmC,CAAC;AAE3D;;;;;GAKG;AACH,MAAM,UAAU,oBAAoB,CAAC,aAAyD;IAC5F,MAAM,YAAY,GAAG,IAAI,OAAO,EAAE,CAAC;IACnC,YAAY,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,WAAW,CAAC,4CAA4C,CAAC,CAAC;IAEhG,sBAAsB,CAAC,YAAY,CAAC,CAAC;IACrC,uBAAuB,CAAC,YAAY,CAAC,CAAC;IACtC,qBAAqB,CAAC,YAAY,CAAC,CAAC;IAEpC,IAAI,aAAa,EAAE;QACjB,mBAAmB,CAAC,YAAY,EAAE,aAAa,CAAC,CAAC;KAClD;IAED,OAAO;QACL,OAAO,EAAE,YAAY;QACrB;;WAEG;QACH,OAAO,EAAE,KAAK,UAAU,UAAU;YAChC,IAAI;gBACF,MAAM,YAAY,CAAC,UAAU,EAAE,CAAC;aACjC;YAAC,OAAO,CAAC,EAAE;gBACV,MAAM,CAAC,KAAK,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;gBAC/B,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;aACjB;QACH,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import * as v8 from 'v8';
|
|
3
|
+
import { createPowerSyncMongo, MongoBucketStorage } from '../../storage/storage-index.js';
|
|
4
|
+
import { loadConfig } from '../../util/config.js';
|
|
5
|
+
import { extractRunnerOptions, wrapConfigCommand } from './config-command.js';
|
|
6
|
+
const COMMAND_NAME = 'compact';
|
|
7
|
+
/**
|
|
8
|
+
* Approximately max-old-space-size + 64MB.
|
|
9
|
+
*/
|
|
10
|
+
const HEAP_LIMIT = v8.getHeapStatistics().heap_size_limit;
|
|
11
|
+
/**
|
|
12
|
+
* Subtract 128MB for process overhead.
|
|
13
|
+
*
|
|
14
|
+
* Limit to 1024MB overall.
|
|
15
|
+
*/
|
|
16
|
+
const COMPACT_MEMORY_LIMIT_MB = Math.min(HEAP_LIMIT / 1024 / 1024 - 128, 1024);
|
|
17
|
+
export function registerCompactAction(program) {
|
|
18
|
+
const compactCommand = program.command(COMMAND_NAME);
|
|
19
|
+
wrapConfigCommand(compactCommand);
|
|
20
|
+
return compactCommand.description('Compact storage').action(async (options) => {
|
|
21
|
+
const runnerConfig = extractRunnerOptions(options);
|
|
22
|
+
const config = await loadConfig(runnerConfig);
|
|
23
|
+
const { storage } = config;
|
|
24
|
+
const psdb = createPowerSyncMongo(storage);
|
|
25
|
+
const client = psdb.client;
|
|
26
|
+
await client.connect();
|
|
27
|
+
try {
|
|
28
|
+
const bucketStorage = new MongoBucketStorage(psdb, { slot_name_prefix: config.slot_name_prefix });
|
|
29
|
+
const active = await bucketStorage.getActiveSyncRules();
|
|
30
|
+
if (active == null) {
|
|
31
|
+
logger.info('No active instance to compact');
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
const p = bucketStorage.getInstance(active);
|
|
35
|
+
await p.compact({ memoryLimitMB: COMPACT_MEMORY_LIMIT_MB });
|
|
36
|
+
logger.info('done');
|
|
37
|
+
}
|
|
38
|
+
catch (e) {
|
|
39
|
+
logger.error(`Failed to compact: ${e.toString()}`);
|
|
40
|
+
process.exit(1);
|
|
41
|
+
}
|
|
42
|
+
finally {
|
|
43
|
+
await client.close();
|
|
44
|
+
process.exit(0);
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
//# sourceMappingURL=compact-action.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"compact-action.js","sourceRoot":"","sources":["../../../src/entry/commands/compact-action.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,MAAM,EAAE,MAAM,mCAAmC,CAAC;AAC3D,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAC1F,OAAO,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAClD,OAAO,EAAE,oBAAoB,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,YAAY,GAAG,SAAS,CAAC;AAE/B;;GAEG;AACH,MAAM,UAAU,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAC,eAAe,CAAC;AAE1D;;;;GAIG;AACH,MAAM,uBAAuB,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,GAAG,IAAI,GAAG,IAAI,GAAG,GAAG,EAAE,IAAI,CAAC,CAAC;AAE/E,MAAM,UAAU,qBAAqB,CAAC,OAAgB;IACpD,MAAM,cAAc,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;IAErD,iBAAiB,CAAC,cAAc,CAAC,CAAC;IAElC,OAAO,cAAc,CAAC,WAAW,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,EAAE;QAC5E,MAAM,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;QAEnD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,YAAY,CAAC,CAAC;QAC9C,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAC3B,MAAM,IAAI,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAC3B,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACvB,IAAI;YACF,MAAM,aAAa,GAAG,IAAI,kBAAkB,CAAC,IAAI,EAAE,EAAE,gBAAgB,EAAE,MAAM,CAAC,gBAAgB,EAAE,CAAC,CAAC;YAClG,MAAM,MAAM,GAAG,MAAM,aAAa,CAAC,kBAAkB,EAAE,CAAC;YACxD,IAAI,MAAM,IAAI,IAAI,EAAE;gBAClB,MAAM,CAAC,IAAI,CAAC,+BAA+B,CAAC,CAAC;gBAC7C,OAAO;aACR;YACD,MAAM,CAAC,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;YAC5C,MAAM,CAAC,CAAC,OAAO,CAAC,EAAE,aAAa,EAAE,uBAAuB,EAAE,CAAC,CAAC;YAC5D,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACrB;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;YACnD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACjB;gBAAS;YACR,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;YACrB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACjB;IACH,CAAC,CAAC,CAAC;AACL,CAAC"}
|
|
@@ -3,4 +3,5 @@ export * from './commands/config-command.js';
|
|
|
3
3
|
export * from './commands/migrate-action.js';
|
|
4
4
|
export * from './commands/start-action.js';
|
|
5
5
|
export * from './commands/teardown-action.js';
|
|
6
|
+
export * from './commands/compact-action.js';
|
|
6
7
|
//# sourceMappingURL=entry-index.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"entry-index.js","sourceRoot":"","sources":["../../src/entry/entry-index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,8BAA8B,CAAC;AAC7C,cAAc,8BAA8B,CAAC;AAC7C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,+BAA+B,CAAC"}
|
|
1
|
+
{"version":3,"file":"entry-index.js","sourceRoot":"","sources":["../../src/entry/entry-index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,8BAA8B,CAAC;AAC7C,cAAc,8BAA8B,CAAC;AAC7C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,8BAA8B,CAAC"}
|
|
@@ -175,7 +175,7 @@ export interface SyncRulesBucketStorage {
|
|
|
175
175
|
* @param dataBuckets current bucket states
|
|
176
176
|
* @param options batch size options
|
|
177
177
|
*/
|
|
178
|
-
getBucketDataBatch(checkpoint: util.OpId, dataBuckets: Map<string, string>, options?: BucketDataBatchOptions): AsyncIterable<
|
|
178
|
+
getBucketDataBatch(checkpoint: util.OpId, dataBuckets: Map<string, string>, options?: BucketDataBatchOptions): AsyncIterable<SyncBucketDataBatch>;
|
|
179
179
|
/**
|
|
180
180
|
* Compute checksums for a given list of buckets.
|
|
181
181
|
*
|
|
@@ -206,6 +206,7 @@ export interface SyncRulesBucketStorage {
|
|
|
206
206
|
* Errors are cleared on commit.
|
|
207
207
|
*/
|
|
208
208
|
reportError(e: any): Promise<void>;
|
|
209
|
+
compact(options?: CompactOptions): Promise<void>;
|
|
209
210
|
}
|
|
210
211
|
export interface SyncRuleStatus {
|
|
211
212
|
checkpoint_lsn: string | null;
|
|
@@ -307,4 +308,33 @@ export interface SaveDelete {
|
|
|
307
308
|
before: SqliteRow;
|
|
308
309
|
after?: undefined;
|
|
309
310
|
}
|
|
311
|
+
export interface SyncBucketDataBatch {
|
|
312
|
+
batch: util.SyncBucketData;
|
|
313
|
+
targetOp: bigint | null;
|
|
314
|
+
}
|
|
310
315
|
export declare function mergeToast(record: ToastableSqliteRow, persisted: ToastableSqliteRow): ToastableSqliteRow;
|
|
316
|
+
export interface CompactOptions {
|
|
317
|
+
/**
|
|
318
|
+
* Heap memory limit for the compact process.
|
|
319
|
+
*
|
|
320
|
+
* Add around 64MB to this to determine the "--max-old-space-size" argument.
|
|
321
|
+
* Add another 80MB to get RSS usage / memory limits.
|
|
322
|
+
*/
|
|
323
|
+
memoryLimitMB?: number;
|
|
324
|
+
/**
|
|
325
|
+
* If specified, ignore any operations newer than this when compacting.
|
|
326
|
+
*
|
|
327
|
+
* This is primarily for tests, where we want to test compacting at a specific
|
|
328
|
+
* point.
|
|
329
|
+
*
|
|
330
|
+
* This can also be used to create a "safe buffer" of recent operations that should
|
|
331
|
+
* not be compacted, to avoid invalidating checkpoints in use.
|
|
332
|
+
*/
|
|
333
|
+
maxOpId?: bigint;
|
|
334
|
+
/**
|
|
335
|
+
* If specified, compact only the specific buckets.
|
|
336
|
+
*
|
|
337
|
+
* If not specified, compacts all buckets.
|
|
338
|
+
*/
|
|
339
|
+
compactBuckets?: string[];
|
|
340
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"BucketStorage.js","sourceRoot":"","sources":["../../src/storage/BucketStorage.ts"],"names":[],"mappings":"AA+JA,MAAM,OAAO,yBAAyB;IAGpC,YAA4B,EAAU,EAAkB,UAAwB,EAAE,cAA6B;QAAnF,OAAE,GAAF,EAAE,CAAQ;QAAkB,eAAU,GAAV,UAAU,CAAc;QAC9E,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;IACvC,CAAC;IAED,IAAI,SAAS;QACX,OAAO,aAAa,IAAI,CAAC,EAAE,EAAE,CAAC;IAChC,CAAC;CACF;AAYD,MAAM,CAAC,MAAM,4BAA4B,GAAG,IAAI,CAAC;AACjD,MAAM,CAAC,MAAM,kCAAkC,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;
|
|
1
|
+
{"version":3,"file":"BucketStorage.js","sourceRoot":"","sources":["../../src/storage/BucketStorage.ts"],"names":[],"mappings":"AA+JA,MAAM,OAAO,yBAAyB;IAGpC,YAA4B,EAAU,EAAkB,UAAwB,EAAE,cAA6B;QAAnF,OAAE,GAAF,EAAE,CAAQ;QAAkB,eAAU,GAAV,UAAU,CAAc;QAC9E,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;IACvC,CAAC;IAED,IAAI,SAAS;QACX,OAAO,aAAa,IAAI,CAAC,EAAE,EAAE,CAAC;IAChC,CAAC;CACF;AAYD,MAAM,CAAC,MAAM,4BAA4B,GAAG,IAAI,CAAC;AACjD,MAAM,CAAC,MAAM,kCAAkC,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAuNlE,MAAM,UAAU,UAAU,CAAC,MAA0B,EAAE,SAA6B;IAClF,MAAM,SAAS,GAAuB,EAAE,CAAC;IACzC,KAAK,IAAI,GAAG,IAAI,MAAM,EAAE;QACtB,IAAI,OAAO,MAAM,CAAC,GAAG,CAAC,IAAI,WAAW,EAAE;YACrC,SAAS,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC;SACjC;aAAM;YACL,SAAS,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;SAC9B;KACF;IACD,OAAO,SAAS,CAAC;AACnB,CAAC"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { PowerSyncMongo } from './db.js';
|
|
2
|
+
import { CompactOptions } from '../BucketStorage.js';
|
|
3
|
+
/**
|
|
4
|
+
* Additional options, primarily for testing.
|
|
5
|
+
*/
|
|
6
|
+
export interface MongoCompactOptions extends CompactOptions {
|
|
7
|
+
/** Minimum of 2 */
|
|
8
|
+
clearBatchLimit?: number;
|
|
9
|
+
/** Minimum of 1 */
|
|
10
|
+
moveBatchLimit?: number;
|
|
11
|
+
/** Minimum of 1 */
|
|
12
|
+
moveBatchQueryLimit?: number;
|
|
13
|
+
}
|
|
14
|
+
export declare class MongoCompactor {
|
|
15
|
+
private db;
|
|
16
|
+
private group_id;
|
|
17
|
+
private updates;
|
|
18
|
+
private idLimitBytes;
|
|
19
|
+
private moveBatchLimit;
|
|
20
|
+
private moveBatchQueryLimit;
|
|
21
|
+
private clearBatchLimit;
|
|
22
|
+
private maxOpId;
|
|
23
|
+
private buckets;
|
|
24
|
+
constructor(db: PowerSyncMongo, group_id: number, options?: MongoCompactOptions);
|
|
25
|
+
/**
|
|
26
|
+
* Compact buckets by converting operations into MOVE and/or CLEAR operations.
|
|
27
|
+
*
|
|
28
|
+
* See /docs/compacting-operations.md for details.
|
|
29
|
+
*/
|
|
30
|
+
compact(): Promise<void>;
|
|
31
|
+
compactInternal(bucket: string | undefined): Promise<void>;
|
|
32
|
+
private flush;
|
|
33
|
+
/**
|
|
34
|
+
* Perform a CLEAR compact for a bucket.
|
|
35
|
+
*
|
|
36
|
+
* @param bucket bucket name
|
|
37
|
+
* @param op op_id of the last non-PUT operation, which will be converted to CLEAR.
|
|
38
|
+
*/
|
|
39
|
+
private clearBucket;
|
|
40
|
+
}
|
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { MaxKey, MinKey } from 'mongodb';
|
|
3
|
+
import { addChecksums } from '../../util/utils.js';
|
|
4
|
+
const DEFAULT_CLEAR_BATCH_LIMIT = 5000;
|
|
5
|
+
const DEFAULT_MOVE_BATCH_LIMIT = 2000;
|
|
6
|
+
const DEFAULT_MOVE_BATCH_QUERY_LIMIT = 10000;
|
|
7
|
+
/** This default is primarily for tests. */
|
|
8
|
+
const DEFAULT_MEMORY_LIMIT_MB = 64;
|
|
9
|
+
export class MongoCompactor {
|
|
10
|
+
constructor(db, group_id, options) {
|
|
11
|
+
this.db = db;
|
|
12
|
+
this.group_id = group_id;
|
|
13
|
+
this.updates = [];
|
|
14
|
+
this.idLimitBytes = (options?.memoryLimitMB ?? DEFAULT_MEMORY_LIMIT_MB) * 1024 * 1024;
|
|
15
|
+
this.moveBatchLimit = options?.moveBatchLimit ?? DEFAULT_MOVE_BATCH_LIMIT;
|
|
16
|
+
this.moveBatchQueryLimit = options?.moveBatchQueryLimit ?? DEFAULT_MOVE_BATCH_QUERY_LIMIT;
|
|
17
|
+
this.clearBatchLimit = options?.clearBatchLimit ?? DEFAULT_CLEAR_BATCH_LIMIT;
|
|
18
|
+
this.maxOpId = options?.maxOpId;
|
|
19
|
+
this.buckets = options?.compactBuckets;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Compact buckets by converting operations into MOVE and/or CLEAR operations.
|
|
23
|
+
*
|
|
24
|
+
* See /docs/compacting-operations.md for details.
|
|
25
|
+
*/
|
|
26
|
+
async compact() {
|
|
27
|
+
if (this.buckets) {
|
|
28
|
+
for (let bucket of this.buckets) {
|
|
29
|
+
// We can make this more efficient later on by iterating
|
|
30
|
+
// through the buckets in a single query.
|
|
31
|
+
// That makes batching more tricky, so we leave for later.
|
|
32
|
+
await this.compactInternal(bucket);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
else {
|
|
36
|
+
await this.compactInternal(undefined);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
async compactInternal(bucket) {
|
|
40
|
+
const idLimitBytes = this.idLimitBytes;
|
|
41
|
+
let currentState = null;
|
|
42
|
+
// Constant lower bound
|
|
43
|
+
const lowerBound = {
|
|
44
|
+
g: this.group_id,
|
|
45
|
+
b: bucket ?? new MinKey(),
|
|
46
|
+
o: new MinKey()
|
|
47
|
+
};
|
|
48
|
+
// Upper bound is adjusted for each batch
|
|
49
|
+
let upperBound = {
|
|
50
|
+
g: this.group_id,
|
|
51
|
+
b: bucket ?? new MaxKey(),
|
|
52
|
+
o: new MaxKey()
|
|
53
|
+
};
|
|
54
|
+
while (true) {
|
|
55
|
+
// Query one batch at a time, to avoid cursor timeouts
|
|
56
|
+
const batch = await this.db.bucket_data
|
|
57
|
+
.find({
|
|
58
|
+
_id: {
|
|
59
|
+
$gte: lowerBound,
|
|
60
|
+
$lt: upperBound
|
|
61
|
+
}
|
|
62
|
+
}, {
|
|
63
|
+
projection: {
|
|
64
|
+
_id: 1,
|
|
65
|
+
op: 1,
|
|
66
|
+
table: 1,
|
|
67
|
+
row_id: 1,
|
|
68
|
+
source_table: 1,
|
|
69
|
+
source_key: 1
|
|
70
|
+
},
|
|
71
|
+
limit: this.moveBatchQueryLimit,
|
|
72
|
+
sort: { _id: -1 },
|
|
73
|
+
singleBatch: true
|
|
74
|
+
})
|
|
75
|
+
.toArray();
|
|
76
|
+
if (batch.length == 0) {
|
|
77
|
+
// We've reached the end
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
// Set upperBound for the next batch
|
|
81
|
+
upperBound = batch[batch.length - 1]._id;
|
|
82
|
+
for (let doc of batch) {
|
|
83
|
+
if (currentState == null || doc._id.b != currentState.bucket) {
|
|
84
|
+
if (currentState != null && currentState.lastNotPut != null && currentState.opsSincePut >= 1) {
|
|
85
|
+
// Important to flush before clearBucket()
|
|
86
|
+
await this.flush();
|
|
87
|
+
logger.info(`Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`);
|
|
88
|
+
const bucket = currentState.bucket;
|
|
89
|
+
const clearOp = currentState.lastNotPut;
|
|
90
|
+
// Free memory before clearing bucket
|
|
91
|
+
currentState = null;
|
|
92
|
+
await this.clearBucket(bucket, clearOp);
|
|
93
|
+
}
|
|
94
|
+
currentState = {
|
|
95
|
+
bucket: doc._id.b,
|
|
96
|
+
seen: new Map(),
|
|
97
|
+
trackingSize: 0,
|
|
98
|
+
lastNotPut: null,
|
|
99
|
+
opsSincePut: 0
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
if (this.maxOpId != null && doc._id.o > this.maxOpId) {
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
let isPersistentPut = doc.op == 'PUT';
|
|
106
|
+
if (doc.op == 'REMOVE' || doc.op == 'PUT') {
|
|
107
|
+
const key = `${doc.table}/${doc.row_id}/${doc.source_table}/${doc.source_key?.toHexString()}`;
|
|
108
|
+
const targetOp = currentState.seen.get(key);
|
|
109
|
+
if (targetOp) {
|
|
110
|
+
// Will convert to MOVE, so don't count as PUT
|
|
111
|
+
isPersistentPut = false;
|
|
112
|
+
this.updates.push({
|
|
113
|
+
updateOne: {
|
|
114
|
+
filter: {
|
|
115
|
+
_id: doc._id
|
|
116
|
+
},
|
|
117
|
+
update: {
|
|
118
|
+
$set: {
|
|
119
|
+
op: 'MOVE',
|
|
120
|
+
target_op: targetOp
|
|
121
|
+
},
|
|
122
|
+
$unset: {
|
|
123
|
+
source_table: 1,
|
|
124
|
+
source_key: 1,
|
|
125
|
+
table: 1,
|
|
126
|
+
row_id: 1,
|
|
127
|
+
data: 1
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
else {
|
|
134
|
+
if (currentState.trackingSize >= idLimitBytes) {
|
|
135
|
+
// Reached memory limit.
|
|
136
|
+
// Keep the highest seen values in this case.
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
// flatstr reduces the memory usage by flattening the string
|
|
140
|
+
currentState.seen.set(flatstr(key), doc._id.o);
|
|
141
|
+
// length + 16 for the string
|
|
142
|
+
// 24 for the bigint
|
|
143
|
+
// 50 for map overhead
|
|
144
|
+
// 50 for additional overhead
|
|
145
|
+
currentState.trackingSize += key.length + 140;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
if (isPersistentPut) {
|
|
150
|
+
currentState.lastNotPut = null;
|
|
151
|
+
currentState.opsSincePut = 0;
|
|
152
|
+
}
|
|
153
|
+
else if (doc.op != 'CLEAR') {
|
|
154
|
+
if (currentState.lastNotPut == null) {
|
|
155
|
+
currentState.lastNotPut = doc._id.o;
|
|
156
|
+
}
|
|
157
|
+
currentState.opsSincePut += 1;
|
|
158
|
+
}
|
|
159
|
+
if (this.updates.length >= this.moveBatchLimit) {
|
|
160
|
+
await this.flush();
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
await this.flush();
|
|
165
|
+
currentState?.seen.clear();
|
|
166
|
+
if (currentState?.lastNotPut != null && currentState?.opsSincePut > 1) {
|
|
167
|
+
logger.info(`Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`);
|
|
168
|
+
const bucket = currentState.bucket;
|
|
169
|
+
const clearOp = currentState.lastNotPut;
|
|
170
|
+
// Free memory before clearing bucket
|
|
171
|
+
currentState = null;
|
|
172
|
+
await this.clearBucket(bucket, clearOp);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
async flush() {
|
|
176
|
+
if (this.updates.length > 0) {
|
|
177
|
+
logger.info(`Compacting ${this.updates.length} ops`);
|
|
178
|
+
await this.db.bucket_data.bulkWrite(this.updates, {
|
|
179
|
+
// Order is not important.
|
|
180
|
+
// Since checksums are not affected, these operations can happen in any order,
|
|
181
|
+
// and it's fine if the operations are partially applied.
|
|
182
|
+
// Each individual operation is atomic.
|
|
183
|
+
ordered: false
|
|
184
|
+
});
|
|
185
|
+
this.updates = [];
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Perform a CLEAR compact for a bucket.
|
|
190
|
+
*
|
|
191
|
+
* @param bucket bucket name
|
|
192
|
+
* @param op op_id of the last non-PUT operation, which will be converted to CLEAR.
|
|
193
|
+
*/
|
|
194
|
+
async clearBucket(bucket, op) {
|
|
195
|
+
const opFilter = {
|
|
196
|
+
_id: {
|
|
197
|
+
$gte: {
|
|
198
|
+
g: this.group_id,
|
|
199
|
+
b: bucket,
|
|
200
|
+
o: new MinKey()
|
|
201
|
+
},
|
|
202
|
+
$lte: {
|
|
203
|
+
g: this.group_id,
|
|
204
|
+
b: bucket,
|
|
205
|
+
o: op
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
};
|
|
209
|
+
const session = this.db.client.startSession();
|
|
210
|
+
try {
|
|
211
|
+
let done = false;
|
|
212
|
+
while (!done) {
|
|
213
|
+
// Do the CLEAR operation in batches, with each batch a separate transaction.
|
|
214
|
+
// The state after each batch is fully consistent.
|
|
215
|
+
// We need a transaction per batch to make sure checksums stay consistent.
|
|
216
|
+
await session.withTransaction(async () => {
|
|
217
|
+
const query = this.db.bucket_data.find(opFilter, {
|
|
218
|
+
session,
|
|
219
|
+
sort: { _id: 1 },
|
|
220
|
+
projection: {
|
|
221
|
+
_id: 1,
|
|
222
|
+
op: 1,
|
|
223
|
+
checksum: 1,
|
|
224
|
+
target_op: 1
|
|
225
|
+
},
|
|
226
|
+
limit: this.clearBatchLimit
|
|
227
|
+
});
|
|
228
|
+
let checksum = 0;
|
|
229
|
+
let lastOpId = null;
|
|
230
|
+
let targetOp = null;
|
|
231
|
+
let gotAnOp = false;
|
|
232
|
+
for await (let op of query.stream()) {
|
|
233
|
+
if (op.op == 'MOVE' || op.op == 'REMOVE' || op.op == 'CLEAR') {
|
|
234
|
+
checksum = addChecksums(checksum, op.checksum);
|
|
235
|
+
lastOpId = op._id;
|
|
236
|
+
if (op.op != 'CLEAR') {
|
|
237
|
+
gotAnOp = true;
|
|
238
|
+
}
|
|
239
|
+
if (op.target_op != null) {
|
|
240
|
+
if (targetOp == null || op.target_op > targetOp) {
|
|
241
|
+
targetOp = op.target_op;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
else {
|
|
246
|
+
throw new Error(`Unexpected ${op.op} operation at ${op._id.g}:${op._id.b}:${op._id.o}`);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
if (!gotAnOp) {
|
|
250
|
+
done = true;
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
logger.info(`Flushing CLEAR at ${lastOpId?.o}`);
|
|
254
|
+
await this.db.bucket_data.deleteMany({
|
|
255
|
+
_id: {
|
|
256
|
+
$gte: {
|
|
257
|
+
g: this.group_id,
|
|
258
|
+
b: bucket,
|
|
259
|
+
o: new MinKey()
|
|
260
|
+
},
|
|
261
|
+
$lte: lastOpId
|
|
262
|
+
}
|
|
263
|
+
}, { session });
|
|
264
|
+
await this.db.bucket_data.insertOne({
|
|
265
|
+
_id: lastOpId,
|
|
266
|
+
op: 'CLEAR',
|
|
267
|
+
checksum: checksum,
|
|
268
|
+
data: null,
|
|
269
|
+
target_op: targetOp
|
|
270
|
+
}, { session });
|
|
271
|
+
}, {
|
|
272
|
+
writeConcern: { w: 'majority' },
|
|
273
|
+
readConcern: { level: 'snapshot' }
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
finally {
|
|
278
|
+
await session.endSession();
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Flattens string to reduce memory usage (around 320 bytes -> 120 bytes),
|
|
284
|
+
* at the cost of some upfront CPU usage.
|
|
285
|
+
*
|
|
286
|
+
* From: https://github.com/davidmarkclements/flatstr/issues/8
|
|
287
|
+
*/
|
|
288
|
+
function flatstr(s) {
|
|
289
|
+
s.match(/\n/g);
|
|
290
|
+
return s;
|
|
291
|
+
}
|
|
292
|
+
//# sourceMappingURL=MongoCompactor.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"MongoCompactor.js","sourceRoot":"","sources":["../../../src/storage/mongo/MongoCompactor.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,mCAAmC,CAAC;AAC3D,OAAO,EAAyB,MAAM,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAChE,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAwCnD,MAAM,yBAAyB,GAAG,IAAI,CAAC;AACvC,MAAM,wBAAwB,GAAG,IAAI,CAAC;AACtC,MAAM,8BAA8B,GAAG,KAAM,CAAC;AAE9C,2CAA2C;AAC3C,MAAM,uBAAuB,GAAG,EAAE,CAAC;AAEnC,MAAM,OAAO,cAAc;IAUzB,YAAoB,EAAkB,EAAU,QAAgB,EAAE,OAA6B;QAA3E,OAAE,GAAF,EAAE,CAAgB;QAAU,aAAQ,GAAR,QAAQ,CAAQ;QATxD,YAAO,GAAgD,EAAE,CAAC;QAUhE,IAAI,CAAC,YAAY,GAAG,CAAC,OAAO,EAAE,aAAa,IAAI,uBAAuB,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;QACtF,IAAI,CAAC,cAAc,GAAG,OAAO,EAAE,cAAc,IAAI,wBAAwB,CAAC;QAC1E,IAAI,CAAC,mBAAmB,GAAG,OAAO,EAAE,mBAAmB,IAAI,8BAA8B,CAAC;QAC1F,IAAI,CAAC,eAAe,GAAG,OAAO,EAAE,eAAe,IAAI,yBAAyB,CAAC;QAC7E,IAAI,CAAC,OAAO,GAAG,OAAO,EAAE,OAAO,CAAC;QAChC,IAAI,CAAC,OAAO,GAAG,OAAO,EAAE,cAAc,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,OAAO;QACX,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,KAAK,IAAI,MAAM,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC/B,wDAAwD;gBACxD,yCAAyC;gBACzC,0DAA0D;gBAC1D,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;aACpC;SACF;aAAM;YACL,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;SACvC;IACH,CAAC;IAED,KAAK,CAAC,eAAe,CAAC,MAA0B;QAC9C,MAAM,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC;QAEvC,IAAI,YAAY,GAA8B,IAAI,CAAC;QAEnD,uBAAuB;QACvB,MAAM,UAAU,GAAkB;YAChC,CAAC,EAAE,IAAI,CAAC,QAAQ;YAChB,CAAC,EAAE,MAAM,IAAK,IAAI,MAAM,EAAU;YAClC,CAAC,EAAE,IAAI,MAAM,EAAS;SACvB,CAAC;QAEF,yCAAyC;QACzC,IAAI,UAAU,GAAkB;YAC9B,CAAC,EAAE,IAAI,CAAC,QAAQ;YAChB,CAAC,EAAE,MAAM,IAAK,IAAI,MAAM,EAAU;YAClC,CAAC,EAAE,IAAI,MAAM,EAAS;SACvB,CAAC;QAEF,OAAO,IAAI,EAAE;YACX,sDAAsD;YACtD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,WAAW;iBACpC,IAAI,CACH;gBACE,GAAG,EAAE;oBACH,IAAI,EAAE,UAAU;oBAChB,GAAG,EAAE,UAAU;iBAChB;aACF,EACD;gBACE,UAAU,EAAE;oBACV,GAAG,EAAE,CAAC;oBACN,EAAE,EAAE,CAAC;oBACL,KAAK,EAAE,CAAC;oBACR,MAAM,EAAE,CAAC;oBACT,YAAY,EAAE,CAAC;oBACf,UAAU,EAAE,CAAC;iBACd;gBACD,KAAK,EAAE,IAAI,CAAC,mBAAmB;gBAC/B,IAAI,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE;gBACjB,WAAW,EAAE,IAAI;aAClB,CACF;iBACA,OAAO,EAAE,CAAC;YAEb,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE;gBACrB,wBAAwB;gBACxB,MAAM;aACP;YAED,oCAAoC;YACpC,UAAU,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;YAEzC,KAAK,IAAI,GAAG,IAAI,KAAK,EAAE;gBACrB,IAAI,YAAY,IAAI,IAAI,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,MAAM,EAAE;oBAC5D,IAAI,YAAY,IAAI,IAAI,IAAI,YAAY,CAAC,UAAU,IAAI,IAAI,IAAI,YAAY,CAAC,WAAW,IAAI,CAAC,EAAE;wBAC5F,0CAA0C;wBAC1C,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;wBACnB,MAAM,CAAC,IAAI,CACT,sBAAsB,IAAI,CAAC,QAAQ,IAAI,YAAY,CAAC,MAAM,IAAI,YAAY,CAAC,UAAU,cAAc,YAAY,CAAC,WAAW,aAAa,CACzI,CAAC;wBAEF,MAAM,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;wBACnC,MAAM,OAAO,GAAG,YAAY,CAAC,UAAU,CAAC;wBACxC,qCAAqC;wBACrC,YAAY,GAAG,IAAI,CAAC;wBACpB,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;qBACzC;oBACD,YAAY,GAAG;wBACb,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC;wBACjB,IAAI,EAAE,IAAI,GAAG,EAAE;wBACf,YAAY,EAAE,CAAC;wBACf,UAAU,EAAE,IAAI;wBAChB,WAAW,EAAE,CAAC;qBACf,CAAC;iBACH;gBAED,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,OAAO,EAAE;oBACpD,SAAS;iBACV;gBAED,IAAI,eAAe,GAAG,GAAG,CAAC,EAAE,IAAI,KAAK,CAAC;gBAEtC,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,EAAE;oBACzC,MAAM,GAAG,GAAG,GAAG,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,YAAY,IAAI,GAAG,CAAC,UAAU,EAAE,WAAW,EAAE,EAAE,CAAC;oBAC9F,MAAM,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;oBAC5C,IAAI,QAAQ,EAAE;wBACZ,8CAA8C;wBAC9C,eAAe,GAAG,KAAK,CAAC;wBAExB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;4BAChB,SAAS,EAAE;gCACT,MAAM,EAAE;oCACN,GAAG,EAAE,GAAG,CAAC,GAAG;iCACb;gCACD,MAAM,EAAE;oCACN,IAAI,EAAE;wCACJ,EAAE,EAAE,MAAM;wCACV,SAAS,EAAE,QAAQ;qCACpB;oCACD,MAAM,EAAE;wCACN,YAAY,EAAE,CAAC;wCACf,UAAU,EAAE,CAAC;wCACb,KAAK,EAAE,CAAC;wCACR,MAAM,EAAE,CAAC;wCACT,IAAI,EAAE,CAAC;qCACR;iCACF;6BACF;yBACF,CAAC,CAAC;qBACJ;yBAAM;wBACL,IAAI,YAAY,CAAC,YAAY,IAAI,YAAY,EAAE;4BAC7C,wBAAwB;4BACxB,6CAA6C;yBAC9C;6BAAM;4BACL,4DAA4D;4BAC5D,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;4BAC/C,6BAA6B;4BAC7B,oBAAoB;4BACpB,sBAAsB;4BACtB,6BAA6B;4BAC7B,YAAY,CAAC,YAAY,IAAI,GAAG,CAAC,MAAM,GAAG,GAAG,CAAC;yBAC/C;qBACF;iBACF;gBAED,IAAI,eAAe,EAAE;oBACnB,YAAY,CAAC,UAAU,GAAG,IAAI,CAAC;oBAC/B,YAAY,CAAC,WAAW,GAAG,CAAC,CAAC;iBAC9B;qBAAM,IAAI,GAAG,CAAC,EAAE,IAAI,OAAO,EAAE;oBAC5B,IAAI,YAAY,CAAC,UAAU,IAAI,IAAI,EAAE;wBACnC,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;qBACrC;oBACD,YAAY,CAAC,WAAW,IAAI,CAAC,CAAC;iBAC/B;gBAED,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,IAAI,CAAC,cAAc,EAAE;oBAC9C,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;iBACpB;aACF;SACF;QAED,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QACnB,YAAY,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC;QAC3B,IAAI,YAAY,EAAE,UAAU,IAAI,IAAI,IAAI,YAAY,EAAE,WAAW,GAAG,CAAC,EAAE;YACrE,MAAM,CAAC,IAAI,CACT,sBAAsB,IAAI,CAAC,QAAQ,IAAI,YAAY,CAAC,MAAM,IAAI,YAAY,CAAC,UAAU,cAAc,YAAY,CAAC,WAAW,aAAa,CACzI,CAAC;YACF,MAAM,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;YACnC,MAAM,OAAO,GAAG,YAAY,CAAC,UAAU,CAAC;YACxC,qCAAqC;YACrC,YAAY,GAAG,IAAI,CAAC;YACpB,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SACzC;IACH,CAAC;IAEO,KAAK,CAAC,KAAK;QACjB,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3B,MAAM,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,OAAO,CAAC,MAAM,MAAM,CAAC,CAAC;YACrD,MAAM,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE;gBAChD,0BAA0B;gBAC1B,8EAA8E;gBAC9E,yDAAyD;gBACzD,uCAAuC;gBACvC,OAAO,EAAE,KAAK;aACf,CAAC,CAAC;YACH,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;SACnB;IACH,CAAC;IAED;;;;;OAKG;IACK,KAAK,CAAC,WAAW,CAAC,MAAc,EAAE,EAAU;QAClD,MAAM,QAAQ,GAAG;YACf,GAAG,EAAE;gBACH,IAAI,EAAE;oBACJ,CAAC,EAAE,IAAI,CAAC,QAAQ;oBAChB,CAAC,EAAE,MAAM;oBACT,CAAC,EAAE,IAAI,MAAM,EAAS;iBACvB;gBACD,IAAI,EAAE;oBACJ,CAAC,EAAE,IAAI,CAAC,QAAQ;oBAChB,CAAC,EAAE,MAAM;oBACT,CAAC,EAAE,EAAE;iBACN;aACF;SACF,CAAC;QAEF,MAAM,OAAO,GAAG,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC;QAC9C,IAAI;YACF,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,OAAO,CAAC,IAAI,EAAE;gBACZ,6EAA6E;gBAC7E,kDAAkD;gBAClD,0EAA0E;gBAC1E,MAAM,OAAO,CAAC,eAAe,CAC3B,KAAK,IAAI,EAAE;oBACT,MAAM,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,EAAE;wBAC/C,OAAO;wBACP,IAAI,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE;wBAChB,UAAU,EAAE;4BACV,GAAG,EAAE,CAAC;4BACN,EAAE,EAAE,CAAC;4BACL,QAAQ,EAAE,CAAC;4BACX,SAAS,EAAE,CAAC;yBACb;wBACD,KAAK,EAAE,IAAI,CAAC,eAAe;qBAC5B,CAAC,CAAC;oBACH,IAAI,QAAQ,GAAG,CAAC,CAAC;oBACjB,IAAI,QAAQ,GAAyB,IAAI,CAAC;oBAC1C,IAAI,QAAQ,GAAkB,IAAI,CAAC;oBACnC,IAAI,OAAO,GAAG,KAAK,CAAC;oBACpB,IAAI,KAAK,EAAE,IAAI,EAAE,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;wBACnC,IAAI,EAAE,CAAC,EAAE,IAAI,MAAM,IAAI,EAAE,CAAC,EAAE,IAAI,QAAQ,IAAI,EAAE,CAAC,EAAE,IAAI,OAAO,EAAE;4BAC5D,QAAQ,GAAG,YAAY,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC;4BAC/C,QAAQ,GAAG,EAAE,CAAC,GAAG,CAAC;4BAClB,IAAI,EAAE,CAAC,EAAE,IAAI,OAAO,EAAE;gCACpB,OAAO,GAAG,IAAI,CAAC;6BAChB;4BACD,IAAI,EAAE,CAAC,SAAS,IAAI,IAAI,EAAE;gCACxB,IAAI,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAC,SAAS,GAAG,QAAQ,EAAE;oCAC/C,QAAQ,GAAG,EAAE,CAAC,SAAS,CAAC;iCACzB;6BACF;yBACF;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,cAAc,EAAE,CAAC,EAAE,iBAAiB,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;yBACzF;qBACF;oBACD,IAAI,CAAC,OAAO,EAAE;wBACZ,IAAI,GAAG,IAAI,CAAC;wBACZ,OAAO;qBACR;oBAED,MAAM,CAAC,IAAI,CAAC,qBAAqB,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC;oBAChD,MAAM,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,UAAU,CAClC;wBACE,GAAG,EAAE;4BACH,IAAI,EAAE;gCACJ,CAAC,EAAE,IAAI,CAAC,QAAQ;gCAChB,CAAC,EAAE,MAAM;gCACT,CAAC,EAAE,IAAI,MAAM,EAAS;6BACvB;4BACD,IAAI,EAAE,QAAS;yBAChB;qBACF,EACD,EAAE,OAAO,EAAE,CACZ,CAAC;oBAEF,MAAM,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,SAAS,CACjC;wBACE,GAAG,EAAE,QAAS;wBACd,EAAE,EAAE,OAAO;wBACX,QAAQ,EAAE,QAAQ;wBAClB,IAAI,EAAE,IAAI;wBACV,SAAS,EAAE,QAAQ;qBACpB,EACD,EAAE,OAAO,EAAE,CACZ,CAAC;gBACJ,CAAC,EACD;oBACE,YAAY,EAAE,EAAE,CAAC,EAAE,UAAU,EAAE;oBAC/B,WAAW,EAAE,EAAE,KAAK,EAAE,UAAU,EAAE;iBACnC,CACF,CAAC;aACH;SACF;gBAAS;YACR,MAAM,OAAO,CAAC,UAAU,EAAE,CAAC;SAC5B;IACH,CAAC;CACF;AAED;;;;;GAKG;AACH,SAAS,OAAO,CAAC,CAAS;IACxB,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACf,OAAO,CAAC,CAAC;AACX,CAAC"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
2
|
import * as util from '../../util/util-index.js';
|
|
3
|
-
import { BucketDataBatchOptions, BucketStorageBatch, FlushedResult, ResolveTableOptions, ResolveTableResult, SyncRulesBucketStorage, SyncRuleStatus } from '../BucketStorage.js';
|
|
3
|
+
import { BucketDataBatchOptions, BucketStorageBatch, CompactOptions, FlushedResult, ResolveTableOptions, ResolveTableResult, SyncBucketDataBatch, SyncRulesBucketStorage, SyncRuleStatus } from '../BucketStorage.js';
|
|
4
4
|
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
5
5
|
export declare class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
6
6
|
readonly factory: MongoBucketStorage;
|
|
@@ -17,7 +17,7 @@ export declare class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
|
17
17
|
startBatch(options: {}, callback: (batch: BucketStorageBatch) => Promise<void>): Promise<FlushedResult | null>;
|
|
18
18
|
resolveTable(options: ResolveTableOptions): Promise<ResolveTableResult>;
|
|
19
19
|
getParameterSets(checkpoint: util.OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]>;
|
|
20
|
-
getBucketDataBatch(checkpoint: util.OpId, dataBuckets: Map<string, string>, options?: BucketDataBatchOptions): AsyncIterable<
|
|
20
|
+
getBucketDataBatch(checkpoint: util.OpId, dataBuckets: Map<string, string>, options?: BucketDataBatchOptions): AsyncIterable<SyncBucketDataBatch>;
|
|
21
21
|
getChecksums(checkpoint: util.OpId, buckets: string[]): Promise<util.ChecksumMap>;
|
|
22
22
|
private getChecksumsInternal;
|
|
23
23
|
terminate(): Promise<void>;
|
|
@@ -26,4 +26,5 @@ export declare class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
|
26
26
|
setSnapshotDone(lsn: string): Promise<void>;
|
|
27
27
|
autoActivate(): Promise<void>;
|
|
28
28
|
reportError(e: any): Promise<void>;
|
|
29
|
+
compact(options?: CompactOptions): Promise<void>;
|
|
29
30
|
}
|
|
@@ -3,11 +3,12 @@ import * as db from '../../db/db-index.js';
|
|
|
3
3
|
import * as replication from '../../replication/WalStream.js';
|
|
4
4
|
import * as util from '../../util/util-index.js';
|
|
5
5
|
import { DEFAULT_DOCUMENT_BATCH_LIMIT, DEFAULT_DOCUMENT_CHUNK_LIMIT_BYTES } from '../BucketStorage.js';
|
|
6
|
+
import { ChecksumCache } from '../ChecksumCache.js';
|
|
6
7
|
import { SourceTable } from '../SourceTable.js';
|
|
7
8
|
import { SyncRuleState } from './models.js';
|
|
8
9
|
import { MongoBucketBatch } from './MongoBucketBatch.js';
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
10
|
+
import { MongoCompactor } from './MongoCompactor.js';
|
|
11
|
+
import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, mapOpEntry, readSingleBatch, serializeLookup } from './util.js';
|
|
11
12
|
export class MongoSyncBucketStorage {
|
|
12
13
|
constructor(factory, group_id, sync_rules, slot_name) {
|
|
13
14
|
this.factory = factory;
|
|
@@ -189,6 +190,7 @@ export class MongoSyncBucketStorage {
|
|
|
189
190
|
}
|
|
190
191
|
let batchSize = 0;
|
|
191
192
|
let currentBatch = null;
|
|
193
|
+
let targetOp = null;
|
|
192
194
|
// Ordered by _id, meaning buckets are grouped together
|
|
193
195
|
for (let rawData of data) {
|
|
194
196
|
const row = bson.deserialize(rawData, BSON_DESERIALIZE_OPTIONS);
|
|
@@ -203,7 +205,8 @@ export class MongoSyncBucketStorage {
|
|
|
203
205
|
start = currentBatch.after;
|
|
204
206
|
currentBatch = null;
|
|
205
207
|
batchSize = 0;
|
|
206
|
-
yield yieldBatch;
|
|
208
|
+
yield { batch: yieldBatch, targetOp: targetOp };
|
|
209
|
+
targetOp = null;
|
|
207
210
|
}
|
|
208
211
|
start ?? (start = dataBuckets.get(bucket));
|
|
209
212
|
if (start == null) {
|
|
@@ -216,16 +219,15 @@ export class MongoSyncBucketStorage {
|
|
|
216
219
|
data: [],
|
|
217
220
|
next_after: start
|
|
218
221
|
};
|
|
222
|
+
targetOp = null;
|
|
223
|
+
}
|
|
224
|
+
const entry = mapOpEntry(row);
|
|
225
|
+
if (row.target_op != null) {
|
|
226
|
+
// MOVE, CLEAR
|
|
227
|
+
if (targetOp == null || row.target_op > targetOp) {
|
|
228
|
+
targetOp = row.target_op;
|
|
229
|
+
}
|
|
219
230
|
}
|
|
220
|
-
const entry = {
|
|
221
|
-
op_id: util.timestampToOpId(row._id.o),
|
|
222
|
-
op: row.op,
|
|
223
|
-
object_type: row.table,
|
|
224
|
-
object_id: row.row_id,
|
|
225
|
-
checksum: Number(row.checksum),
|
|
226
|
-
subkey: `${row.source_table}/${row.source_key.toHexString()}`,
|
|
227
|
-
data: row.data
|
|
228
|
-
};
|
|
229
231
|
currentBatch.data.push(entry);
|
|
230
232
|
currentBatch.next_after = entry.op_id;
|
|
231
233
|
batchSize += rawData.byteLength;
|
|
@@ -233,7 +235,8 @@ export class MongoSyncBucketStorage {
|
|
|
233
235
|
if (currentBatch != null) {
|
|
234
236
|
const yieldBatch = currentBatch;
|
|
235
237
|
currentBatch = null;
|
|
236
|
-
yield yieldBatch;
|
|
238
|
+
yield { batch: yieldBatch, targetOp: targetOp };
|
|
239
|
+
targetOp = null;
|
|
237
240
|
}
|
|
238
241
|
}
|
|
239
242
|
async getChecksums(checkpoint, buckets) {
|
|
@@ -387,5 +390,8 @@ export class MongoSyncBucketStorage {
|
|
|
387
390
|
}
|
|
388
391
|
});
|
|
389
392
|
}
|
|
393
|
+
async compact(options) {
|
|
394
|
+
return new MongoCompactor(this.db, this.group_id, options).compact();
|
|
395
|
+
}
|
|
390
396
|
}
|
|
391
397
|
//# sourceMappingURL=MongoSyncBucketStorage.js.map
|