@powersync/service-core 0.0.0-dev-20241015210820 → 0.0.0-dev-20241016143203
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -8
- package/dist/api/RouteAPI.d.ts +4 -6
- package/dist/api/diagnostics.js +1 -3
- package/dist/api/diagnostics.js.map +1 -1
- package/dist/api/schema.js +2 -2
- package/dist/api/schema.js.map +1 -1
- package/dist/db/mongo.d.ts +6 -0
- package/dist/db/mongo.js +6 -0
- package/dist/db/mongo.js.map +1 -1
- package/dist/replication/AbstractReplicationJob.js +2 -2
- package/dist/replication/AbstractReplicationJob.js.map +1 -1
- package/dist/replication/ReplicationModule.js +0 -3
- package/dist/replication/ReplicationModule.js.map +1 -1
- package/dist/routes/configure-fastify.js +12 -12
- package/dist/routes/configure-fastify.js.map +1 -1
- package/dist/routes/configure-rsocket.js +1 -4
- package/dist/routes/configure-rsocket.js.map +1 -1
- package/dist/routes/endpoints/admin.js.map +1 -1
- package/dist/routes/endpoints/sync-rules.js.map +1 -1
- package/dist/routes/router.d.ts +1 -8
- package/dist/routes/router.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +11 -17
- package/dist/storage/BucketStorage.js +0 -6
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/MongoBucketStorage.js +27 -9
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoBucketBatch.js +0 -1
- package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +5 -3
- package/dist/storage/mongo/MongoSyncBucketStorage.js +28 -20
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/mongo/db.d.ts +9 -0
- package/dist/storage/mongo/db.js +11 -0
- package/dist/storage/mongo/db.js.map +1 -1
- package/dist/util/protocol-types.d.ts +1 -2
- package/package.json +5 -5
- package/src/api/RouteAPI.ts +4 -7
- package/src/api/diagnostics.ts +1 -3
- package/src/api/schema.ts +3 -3
- package/src/db/mongo.ts +7 -0
- package/src/replication/AbstractReplicationJob.ts +2 -2
- package/src/replication/ReplicationModule.ts +0 -4
- package/src/routes/configure-fastify.ts +17 -16
- package/src/routes/configure-rsocket.ts +2 -7
- package/src/routes/endpoints/admin.ts +2 -2
- package/src/routes/endpoints/sync-rules.ts +0 -1
- package/src/routes/router.ts +1 -7
- package/src/storage/BucketStorage.ts +10 -20
- package/src/storage/MongoBucketStorage.ts +26 -9
- package/src/storage/mongo/MongoBucketBatch.ts +0 -1
- package/src/storage/mongo/MongoSyncBucketStorage.ts +29 -26
- package/src/storage/mongo/db.ts +12 -0
- package/src/util/protocol-types.ts +1 -1
- package/test/src/compacting.test.ts +15 -13
- package/test/src/data_storage.test.ts +78 -56
- package/test/src/sync.test.ts +11 -26
- package/test/src/util.ts +14 -3
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -8,7 +8,7 @@ import { ServiceContext } from '../system/ServiceContext.js';
|
|
|
8
8
|
import { generateContext, getTokenFromHeader } from './auth.js';
|
|
9
9
|
import { syncStreamReactive } from './endpoints/socket-route.js';
|
|
10
10
|
import { RSocketContextMeta, SocketRouteGenerator } from './router-socket.js';
|
|
11
|
-
import { Context
|
|
11
|
+
import { Context } from './router.js';
|
|
12
12
|
|
|
13
13
|
export type RSockerRouterConfig = {
|
|
14
14
|
service_context: ServiceContext;
|
|
@@ -36,17 +36,12 @@ export function configureRSocket(router: ReactiveSocketRouter<Context>, options:
|
|
|
36
36
|
if (context?.token_payload == null) {
|
|
37
37
|
throw new errors.AuthorizationError(token_errors ?? 'Authentication required');
|
|
38
38
|
}
|
|
39
|
-
|
|
40
|
-
if (!service_context.routerEngine) {
|
|
41
|
-
throw new Error(`RouterEngine has not been registered`);
|
|
42
|
-
}
|
|
43
|
-
|
|
44
39
|
return {
|
|
45
40
|
token,
|
|
46
41
|
user_agent,
|
|
47
42
|
...context,
|
|
48
43
|
token_errors: token_errors,
|
|
49
|
-
service_context
|
|
44
|
+
service_context
|
|
50
45
|
};
|
|
51
46
|
} else {
|
|
52
47
|
throw new errors.AuthorizationError('No token provided');
|
|
@@ -137,8 +137,8 @@ export const reprocess = routeDefinition({
|
|
|
137
137
|
connections: [
|
|
138
138
|
{
|
|
139
139
|
// Previously the connection was asserted with `!`
|
|
140
|
-
tag: baseConfig
|
|
141
|
-
id: baseConfig
|
|
140
|
+
tag: baseConfig!.tag!,
|
|
141
|
+
id: baseConfig!.id,
|
|
142
142
|
slot_name: new_rules.slot_name
|
|
143
143
|
}
|
|
144
144
|
]
|
|
@@ -3,7 +3,6 @@ import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules';
|
|
|
3
3
|
import type { FastifyPluginAsync } from 'fastify';
|
|
4
4
|
import * as t from 'ts-codec';
|
|
5
5
|
|
|
6
|
-
import * as system from '../../system/system-index.js';
|
|
7
6
|
import { authApi } from '../auth.js';
|
|
8
7
|
import { routeDefinition } from '../router.js';
|
|
9
8
|
import { RouteAPI } from '../../api/RouteAPI.js';
|
package/src/routes/router.ts
CHANGED
|
@@ -1,19 +1,13 @@
|
|
|
1
1
|
import { router } from '@powersync/lib-services-framework';
|
|
2
2
|
import * as auth from '../auth/auth-index.js';
|
|
3
3
|
import { ServiceContext } from '../system/ServiceContext.js';
|
|
4
|
-
import { RouterEngine } from './RouterEngine.js';
|
|
5
4
|
|
|
6
|
-
/**
|
|
7
|
-
* The {@link RouterEngine} must be provided for these routes
|
|
8
|
-
*/
|
|
9
|
-
export type RouterServiceContext = ServiceContext & { routerEngine: RouterEngine };
|
|
10
5
|
/**
|
|
11
6
|
* Common context for routes
|
|
12
7
|
*/
|
|
13
8
|
export type Context = {
|
|
14
9
|
user_id?: string;
|
|
15
|
-
|
|
16
|
-
service_context: RouterServiceContext;
|
|
10
|
+
service_context: ServiceContext;
|
|
17
11
|
|
|
18
12
|
token_payload?: auth.JwtPayload;
|
|
19
13
|
token_errors?: string[];
|
|
@@ -106,22 +106,20 @@ export interface BucketStorageFactory
|
|
|
106
106
|
getPowerSyncInstanceId(): Promise<string>;
|
|
107
107
|
}
|
|
108
108
|
|
|
109
|
-
export interface
|
|
109
|
+
export interface WriteCheckpoint {
|
|
110
|
+
base: ActiveCheckpoint;
|
|
111
|
+
writeCheckpoint: bigint | null;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export interface ActiveCheckpoint {
|
|
110
115
|
readonly checkpoint: util.OpId;
|
|
111
116
|
readonly lsn: string | null;
|
|
112
|
-
}
|
|
113
117
|
|
|
114
|
-
export interface ActiveCheckpoint extends Checkpoint {
|
|
115
118
|
hasSyncRules(): boolean;
|
|
116
119
|
|
|
117
120
|
getBucketStorage(): Promise<SyncRulesBucketStorage | null>;
|
|
118
121
|
}
|
|
119
122
|
|
|
120
|
-
export interface WriteCheckpoint {
|
|
121
|
-
base: ActiveCheckpoint;
|
|
122
|
-
writeCheckpoint: bigint | null;
|
|
123
|
-
}
|
|
124
|
-
|
|
125
123
|
export interface StorageMetrics {
|
|
126
124
|
/**
|
|
127
125
|
* Size of operations (bucket_data)
|
|
@@ -221,7 +219,7 @@ export interface SyncRulesBucketStorage extends DisposableObserverClient<SyncRul
|
|
|
221
219
|
callback: (batch: BucketStorageBatch) => Promise<void>
|
|
222
220
|
): Promise<FlushedResult | null>;
|
|
223
221
|
|
|
224
|
-
getCheckpoint(): Promise<
|
|
222
|
+
getCheckpoint(): Promise<{ checkpoint: util.OpId }>;
|
|
225
223
|
|
|
226
224
|
getParsedSyncRules(options: ParseSyncRulesOptions): SqlSyncRules;
|
|
227
225
|
|
|
@@ -267,8 +265,6 @@ export interface SyncRulesBucketStorage extends DisposableObserverClient<SyncRul
|
|
|
267
265
|
*/
|
|
268
266
|
clear(): Promise<void>;
|
|
269
267
|
|
|
270
|
-
setSnapshotDone(lsn: string): Promise<void>;
|
|
271
|
-
|
|
272
268
|
autoActivate(): Promise<void>;
|
|
273
269
|
|
|
274
270
|
/**
|
|
@@ -389,14 +385,8 @@ export type SaveOp = 'insert' | 'update' | 'delete';
|
|
|
389
385
|
|
|
390
386
|
export type SaveOptions = SaveInsert | SaveUpdate | SaveDelete;
|
|
391
387
|
|
|
392
|
-
export enum SaveOperationTag {
|
|
393
|
-
INSERT = 'insert',
|
|
394
|
-
UPDATE = 'update',
|
|
395
|
-
DELETE = 'delete'
|
|
396
|
-
}
|
|
397
|
-
|
|
398
388
|
export interface SaveInsert {
|
|
399
|
-
tag:
|
|
389
|
+
tag: 'insert';
|
|
400
390
|
sourceTable: SourceTable;
|
|
401
391
|
before?: undefined;
|
|
402
392
|
beforeReplicaId?: undefined;
|
|
@@ -405,7 +395,7 @@ export interface SaveInsert {
|
|
|
405
395
|
}
|
|
406
396
|
|
|
407
397
|
export interface SaveUpdate {
|
|
408
|
-
tag:
|
|
398
|
+
tag: 'update';
|
|
409
399
|
sourceTable: SourceTable;
|
|
410
400
|
|
|
411
401
|
/**
|
|
@@ -424,7 +414,7 @@ export interface SaveUpdate {
|
|
|
424
414
|
}
|
|
425
415
|
|
|
426
416
|
export interface SaveDelete {
|
|
427
|
-
tag:
|
|
417
|
+
tag: 'delete';
|
|
428
418
|
sourceTable: SourceTable;
|
|
429
419
|
before?: SqliteRow;
|
|
430
420
|
beforeReplicaId: ReplicaId;
|
|
@@ -331,39 +331,56 @@ export class MongoBucketStorage
|
|
|
331
331
|
}
|
|
332
332
|
|
|
333
333
|
async getStorageMetrics(): Promise<StorageMetrics> {
|
|
334
|
+
const ignoreNotExiting = (e: unknown) => {
|
|
335
|
+
if (e instanceof mongo.MongoServerError && e.codeName == 'NamespaceNotFound') {
|
|
336
|
+
// Collection doesn't exist - return 0
|
|
337
|
+
return [{ storageStats: { size: 0 } }];
|
|
338
|
+
} else {
|
|
339
|
+
return Promise.reject(e);
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
const active_sync_rules = await this.getActiveSyncRules({ defaultSchema: 'public' });
|
|
344
|
+
if (active_sync_rules == null) {
|
|
345
|
+
return {
|
|
346
|
+
operations_size_bytes: 0,
|
|
347
|
+
parameters_size_bytes: 0,
|
|
348
|
+
replication_size_bytes: 0
|
|
349
|
+
};
|
|
350
|
+
}
|
|
334
351
|
const operations_aggregate = await this.db.bucket_data
|
|
335
352
|
|
|
336
353
|
.aggregate([
|
|
337
354
|
{
|
|
338
355
|
$collStats: {
|
|
339
|
-
storageStats: {}
|
|
340
|
-
count: {}
|
|
356
|
+
storageStats: {}
|
|
341
357
|
}
|
|
342
358
|
}
|
|
343
359
|
])
|
|
344
|
-
.toArray()
|
|
360
|
+
.toArray()
|
|
361
|
+
.catch(ignoreNotExiting);
|
|
345
362
|
|
|
346
363
|
const parameters_aggregate = await this.db.bucket_parameters
|
|
347
364
|
.aggregate([
|
|
348
365
|
{
|
|
349
366
|
$collStats: {
|
|
350
|
-
storageStats: {}
|
|
351
|
-
count: {}
|
|
367
|
+
storageStats: {}
|
|
352
368
|
}
|
|
353
369
|
}
|
|
354
370
|
])
|
|
355
|
-
.toArray()
|
|
371
|
+
.toArray()
|
|
372
|
+
.catch(ignoreNotExiting);
|
|
356
373
|
|
|
357
374
|
const replication_aggregate = await this.db.current_data
|
|
358
375
|
.aggregate([
|
|
359
376
|
{
|
|
360
377
|
$collStats: {
|
|
361
|
-
storageStats: {}
|
|
362
|
-
count: {}
|
|
378
|
+
storageStats: {}
|
|
363
379
|
}
|
|
364
380
|
}
|
|
365
381
|
])
|
|
366
|
-
.toArray()
|
|
382
|
+
.toArray()
|
|
383
|
+
.catch(ignoreNotExiting);
|
|
367
384
|
|
|
368
385
|
return {
|
|
369
386
|
operations_size_bytes: operations_aggregate[0].storageStats.size,
|
|
@@ -81,7 +81,6 @@ export class MongoBucketBatch extends DisposableObserver<BucketBatchStorageListe
|
|
|
81
81
|
this.session = this.client.startSession();
|
|
82
82
|
this.slot_name = slot_name;
|
|
83
83
|
this.sync_rules = sync_rules;
|
|
84
|
-
this.batch = new OperationBatch();
|
|
85
84
|
}
|
|
86
85
|
|
|
87
86
|
addCustomWriteCheckpoint(checkpoint: CustomWriteCheckpointOptions): void {
|
|
@@ -8,7 +8,6 @@ import * as util from '../../util/util-index.js';
|
|
|
8
8
|
import {
|
|
9
9
|
BucketDataBatchOptions,
|
|
10
10
|
BucketStorageBatch,
|
|
11
|
-
Checkpoint,
|
|
12
11
|
CompactOptions,
|
|
13
12
|
DEFAULT_DOCUMENT_BATCH_LIMIT,
|
|
14
13
|
DEFAULT_DOCUMENT_CHUNK_LIMIT_BYTES,
|
|
@@ -32,6 +31,8 @@ import { BucketDataDocument, BucketDataKey, SourceKey, SyncRuleState } from './m
|
|
|
32
31
|
import { MongoBucketBatch } from './MongoBucketBatch.js';
|
|
33
32
|
import { MongoCompactor } from './MongoCompactor.js';
|
|
34
33
|
import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, mapOpEntry, readSingleBatch, serializeLookup } from './util.js';
|
|
34
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
35
|
+
import * as timers from 'timers/promises';
|
|
35
36
|
|
|
36
37
|
export class MongoSyncBucketStorage
|
|
37
38
|
extends DisposableObserver<SyncRulesBucketStorageListener>
|
|
@@ -61,16 +62,15 @@ export class MongoSyncBucketStorage
|
|
|
61
62
|
return this.parsedSyncRulesCache;
|
|
62
63
|
}
|
|
63
64
|
|
|
64
|
-
async getCheckpoint()
|
|
65
|
+
async getCheckpoint() {
|
|
65
66
|
const doc = await this.db.sync_rules.findOne(
|
|
66
67
|
{ _id: this.group_id },
|
|
67
68
|
{
|
|
68
|
-
projection: { last_checkpoint: 1
|
|
69
|
+
projection: { last_checkpoint: 1 }
|
|
69
70
|
}
|
|
70
71
|
);
|
|
71
72
|
return {
|
|
72
|
-
checkpoint: util.timestampToOpId(doc?.last_checkpoint ?? 0n)
|
|
73
|
-
lsn: doc?.last_checkpoint_lsn ?? null
|
|
73
|
+
checkpoint: util.timestampToOpId(doc?.last_checkpoint ?? 0n)
|
|
74
74
|
};
|
|
75
75
|
}
|
|
76
76
|
|
|
@@ -461,10 +461,28 @@ export class MongoSyncBucketStorage
|
|
|
461
461
|
}
|
|
462
462
|
|
|
463
463
|
async clear(): Promise<void> {
|
|
464
|
+
while (true) {
|
|
465
|
+
try {
|
|
466
|
+
await this.clearIteration();
|
|
467
|
+
return;
|
|
468
|
+
} catch (e: unknown) {
|
|
469
|
+
if (e instanceof mongo.MongoServerError && e.codeName == 'MaxTimeMSExpired') {
|
|
470
|
+
logger.info(
|
|
471
|
+
`Clearing took longer than ${db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS}ms, waiting and triggering another iteration.`
|
|
472
|
+
);
|
|
473
|
+
await timers.setTimeout(db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS / 5);
|
|
474
|
+
continue;
|
|
475
|
+
} else {
|
|
476
|
+
throw e;
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
private async clearIteration(): Promise<void> {
|
|
464
483
|
// Individual operations here may time out with the maxTimeMS option.
|
|
465
484
|
// It is expected to still make progress, and continue on the next try.
|
|
466
485
|
|
|
467
|
-
// TODO: Transactional?
|
|
468
486
|
await this.db.sync_rules.updateOne(
|
|
469
487
|
{
|
|
470
488
|
_id: this.group_id
|
|
@@ -478,48 +496,33 @@ export class MongoSyncBucketStorage
|
|
|
478
496
|
no_checkpoint_before: null
|
|
479
497
|
}
|
|
480
498
|
},
|
|
481
|
-
{ maxTimeMS: db.mongo.
|
|
499
|
+
{ maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
|
|
482
500
|
);
|
|
483
501
|
await this.db.bucket_data.deleteMany(
|
|
484
502
|
{
|
|
485
503
|
_id: idPrefixFilter<BucketDataKey>({ g: this.group_id }, ['b', 'o'])
|
|
486
504
|
},
|
|
487
|
-
{ maxTimeMS: db.mongo.
|
|
505
|
+
{ maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
|
|
488
506
|
);
|
|
489
507
|
await this.db.bucket_parameters.deleteMany(
|
|
490
508
|
{
|
|
491
509
|
key: idPrefixFilter<SourceKey>({ g: this.group_id }, ['t', 'k'])
|
|
492
510
|
},
|
|
493
|
-
{ maxTimeMS: db.mongo.
|
|
511
|
+
{ maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
|
|
494
512
|
);
|
|
495
513
|
|
|
496
514
|
await this.db.current_data.deleteMany(
|
|
497
515
|
{
|
|
498
516
|
_id: idPrefixFilter<SourceKey>({ g: this.group_id }, ['t', 'k'])
|
|
499
517
|
},
|
|
500
|
-
{ maxTimeMS: db.mongo.
|
|
518
|
+
{ maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
|
|
501
519
|
);
|
|
502
520
|
|
|
503
521
|
await this.db.source_tables.deleteMany(
|
|
504
522
|
{
|
|
505
523
|
group_id: this.group_id
|
|
506
524
|
},
|
|
507
|
-
{ maxTimeMS: db.mongo.
|
|
508
|
-
);
|
|
509
|
-
}
|
|
510
|
-
|
|
511
|
-
async setSnapshotDone(lsn: string): Promise<void> {
|
|
512
|
-
await this.db.sync_rules.updateOne(
|
|
513
|
-
{
|
|
514
|
-
_id: this.group_id
|
|
515
|
-
},
|
|
516
|
-
{
|
|
517
|
-
$set: {
|
|
518
|
-
snapshot_done: true,
|
|
519
|
-
persisted_lsn: lsn,
|
|
520
|
-
last_checkpoint_ts: new Date()
|
|
521
|
-
}
|
|
522
|
-
}
|
|
525
|
+
{ maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
|
|
523
526
|
);
|
|
524
527
|
}
|
|
525
528
|
|
package/src/storage/mongo/db.ts
CHANGED
|
@@ -62,6 +62,9 @@ export class PowerSyncMongo {
|
|
|
62
62
|
this.locks = this.db.collection('locks');
|
|
63
63
|
}
|
|
64
64
|
|
|
65
|
+
/**
|
|
66
|
+
* Clear all collections.
|
|
67
|
+
*/
|
|
65
68
|
async clear() {
|
|
66
69
|
await this.current_data.deleteMany({});
|
|
67
70
|
await this.bucket_data.deleteMany({});
|
|
@@ -73,4 +76,13 @@ export class PowerSyncMongo {
|
|
|
73
76
|
await this.instance.deleteOne({});
|
|
74
77
|
await this.locks.deleteMany({});
|
|
75
78
|
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Drop the entire database.
|
|
82
|
+
*
|
|
83
|
+
* Primarily for tests.
|
|
84
|
+
*/
|
|
85
|
+
async drop() {
|
|
86
|
+
await this.db.dropDatabase();
|
|
87
|
+
}
|
|
76
88
|
}
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
import { SaveOperationTag } from '@/storage/BucketStorage.js';
|
|
2
1
|
import { MongoCompactOptions } from '@/storage/mongo/MongoCompactor.js';
|
|
2
|
+
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
3
3
|
import { describe, expect, test } from 'vitest';
|
|
4
4
|
import { validateCompactedBucket } from './bucket_validation.js';
|
|
5
5
|
import { oneFromAsync } from './stream_utils.js';
|
|
6
|
-
import { BATCH_OPTIONS, makeTestTable, MONGO_STORAGE_FACTORY, rid, testRules } from './util.js';
|
|
6
|
+
import { BATCH_OPTIONS, makeTestTable, MONGO_STORAGE_FACTORY, rid, testRules, ZERO_LSN } from './util.js';
|
|
7
|
+
import { ParseSyncRulesOptions, PersistedSyncRulesContent, StartBatchOptions } from '@/storage/BucketStorage.js';
|
|
8
|
+
import { getUuidReplicaIdentityBson } from '@/util/util-index.js';
|
|
7
9
|
|
|
8
10
|
const TEST_TABLE = makeTestTable('test', ['id']);
|
|
9
11
|
|
|
@@ -29,7 +31,7 @@ bucket_definitions:
|
|
|
29
31
|
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
30
32
|
await batch.save({
|
|
31
33
|
sourceTable: TEST_TABLE,
|
|
32
|
-
tag:
|
|
34
|
+
tag: 'insert',
|
|
33
35
|
after: {
|
|
34
36
|
id: 't1'
|
|
35
37
|
},
|
|
@@ -38,7 +40,7 @@ bucket_definitions:
|
|
|
38
40
|
|
|
39
41
|
await batch.save({
|
|
40
42
|
sourceTable: TEST_TABLE,
|
|
41
|
-
tag:
|
|
43
|
+
tag: 'insert',
|
|
42
44
|
after: {
|
|
43
45
|
id: 't2'
|
|
44
46
|
},
|
|
@@ -47,7 +49,7 @@ bucket_definitions:
|
|
|
47
49
|
|
|
48
50
|
await batch.save({
|
|
49
51
|
sourceTable: TEST_TABLE,
|
|
50
|
-
tag:
|
|
52
|
+
tag: 'update',
|
|
51
53
|
after: {
|
|
52
54
|
id: 't2'
|
|
53
55
|
},
|
|
@@ -126,7 +128,7 @@ bucket_definitions:
|
|
|
126
128
|
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
127
129
|
await batch.save({
|
|
128
130
|
sourceTable: TEST_TABLE,
|
|
129
|
-
tag:
|
|
131
|
+
tag: 'insert',
|
|
130
132
|
after: {
|
|
131
133
|
id: 't1'
|
|
132
134
|
},
|
|
@@ -135,7 +137,7 @@ bucket_definitions:
|
|
|
135
137
|
|
|
136
138
|
await batch.save({
|
|
137
139
|
sourceTable: TEST_TABLE,
|
|
138
|
-
tag:
|
|
140
|
+
tag: 'insert',
|
|
139
141
|
after: {
|
|
140
142
|
id: 't2'
|
|
141
143
|
},
|
|
@@ -144,7 +146,7 @@ bucket_definitions:
|
|
|
144
146
|
|
|
145
147
|
await batch.save({
|
|
146
148
|
sourceTable: TEST_TABLE,
|
|
147
|
-
tag:
|
|
149
|
+
tag: 'delete',
|
|
148
150
|
before: {
|
|
149
151
|
id: 't1'
|
|
150
152
|
},
|
|
@@ -153,7 +155,7 @@ bucket_definitions:
|
|
|
153
155
|
|
|
154
156
|
await batch.save({
|
|
155
157
|
sourceTable: TEST_TABLE,
|
|
156
|
-
tag:
|
|
158
|
+
tag: 'update',
|
|
157
159
|
after: {
|
|
158
160
|
id: 't2'
|
|
159
161
|
},
|
|
@@ -231,7 +233,7 @@ bucket_definitions:
|
|
|
231
233
|
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
232
234
|
await batch.save({
|
|
233
235
|
sourceTable: TEST_TABLE,
|
|
234
|
-
tag:
|
|
236
|
+
tag: 'insert',
|
|
235
237
|
after: {
|
|
236
238
|
id: 't1'
|
|
237
239
|
},
|
|
@@ -240,7 +242,7 @@ bucket_definitions:
|
|
|
240
242
|
|
|
241
243
|
await batch.save({
|
|
242
244
|
sourceTable: TEST_TABLE,
|
|
243
|
-
tag:
|
|
245
|
+
tag: 'insert',
|
|
244
246
|
after: {
|
|
245
247
|
id: 't2'
|
|
246
248
|
},
|
|
@@ -249,7 +251,7 @@ bucket_definitions:
|
|
|
249
251
|
|
|
250
252
|
await batch.save({
|
|
251
253
|
sourceTable: TEST_TABLE,
|
|
252
|
-
tag:
|
|
254
|
+
tag: 'delete',
|
|
253
255
|
before: {
|
|
254
256
|
id: 't1'
|
|
255
257
|
},
|
|
@@ -263,7 +265,7 @@ bucket_definitions:
|
|
|
263
265
|
const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
264
266
|
await batch.save({
|
|
265
267
|
sourceTable: TEST_TABLE,
|
|
266
|
-
tag:
|
|
268
|
+
tag: 'delete',
|
|
267
269
|
before: {
|
|
268
270
|
id: 't2'
|
|
269
271
|
},
|