@powersync/service-core 0.2.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/api/diagnostics.js +2 -2
- package/dist/api/diagnostics.js.map +1 -1
- package/dist/api/schema.js.map +1 -1
- package/dist/auth/CachedKeyCollector.js.map +1 -1
- package/dist/auth/KeySpec.js.map +1 -1
- package/dist/auth/KeyStore.js +2 -2
- package/dist/auth/KeyStore.js.map +1 -1
- package/dist/auth/LeakyBucket.js.map +1 -1
- package/dist/auth/RemoteJWKSCollector.js.map +1 -1
- package/dist/auth/SupabaseKeyCollector.js.map +1 -1
- package/dist/db/mongo.js.map +1 -1
- package/dist/entry/cli-entry.js +2 -2
- package/dist/entry/cli-entry.js.map +1 -1
- package/dist/entry/commands/config-command.js.map +1 -1
- package/dist/entry/commands/migrate-action.js.map +1 -1
- package/dist/entry/commands/start-action.js.map +1 -1
- package/dist/entry/commands/teardown-action.js.map +1 -1
- package/dist/index.d.ts +3 -2
- package/dist/index.js +4 -2
- package/dist/index.js.map +1 -1
- package/dist/locks/LockManager.d.ts +10 -0
- package/dist/locks/LockManager.js +7 -0
- package/dist/locks/LockManager.js.map +1 -0
- package/dist/locks/MongoLocks.d.ts +36 -0
- package/dist/locks/MongoLocks.js +81 -0
- package/dist/locks/MongoLocks.js.map +1 -0
- package/dist/locks/locks-index.d.ts +2 -0
- package/dist/locks/locks-index.js +3 -0
- package/dist/locks/locks-index.js.map +1 -0
- package/dist/metrics/Metrics.js +6 -6
- package/dist/metrics/Metrics.js.map +1 -1
- package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -1
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -1
- package/dist/migrations/definitions.d.ts +18 -0
- package/dist/migrations/definitions.js +6 -0
- package/dist/migrations/definitions.js.map +1 -0
- package/dist/migrations/executor.d.ts +16 -0
- package/dist/migrations/executor.js +64 -0
- package/dist/migrations/executor.js.map +1 -0
- package/dist/migrations/migrations-index.d.ts +3 -0
- package/dist/migrations/migrations-index.js +4 -0
- package/dist/migrations/migrations-index.js.map +1 -0
- package/dist/migrations/migrations.d.ts +1 -1
- package/dist/migrations/migrations.js +4 -8
- package/dist/migrations/migrations.js.map +1 -1
- package/dist/migrations/store/migration-store.d.ts +11 -0
- package/dist/migrations/store/migration-store.js +46 -0
- package/dist/migrations/store/migration-store.js.map +1 -0
- package/dist/replication/ErrorRateLimiter.js.map +1 -1
- package/dist/replication/PgRelation.js.map +1 -1
- package/dist/replication/WalConnection.js.map +1 -1
- package/dist/replication/WalStream.d.ts +0 -1
- package/dist/replication/WalStream.js +21 -25
- package/dist/replication/WalStream.js.map +1 -1
- package/dist/replication/WalStreamManager.js +12 -13
- package/dist/replication/WalStreamManager.js.map +1 -1
- package/dist/replication/WalStreamRunner.js +8 -8
- package/dist/replication/WalStreamRunner.js.map +1 -1
- package/dist/replication/util.js.map +1 -1
- package/dist/routes/auth.d.ts +8 -10
- package/dist/routes/auth.js.map +1 -1
- package/dist/routes/endpoints/admin.d.ts +1011 -0
- package/dist/routes/{admin.js → endpoints/admin.js} +33 -18
- package/dist/routes/endpoints/admin.js.map +1 -0
- package/dist/routes/endpoints/checkpointing.d.ts +76 -0
- package/dist/routes/endpoints/checkpointing.js +36 -0
- package/dist/routes/endpoints/checkpointing.js.map +1 -0
- package/dist/routes/endpoints/dev.d.ts +312 -0
- package/dist/routes/{dev.js → endpoints/dev.js} +25 -16
- package/dist/routes/endpoints/dev.js.map +1 -0
- package/dist/routes/endpoints/route-endpoints-index.d.ts +6 -0
- package/dist/routes/endpoints/route-endpoints-index.js +7 -0
- package/dist/routes/endpoints/route-endpoints-index.js.map +1 -0
- package/dist/routes/endpoints/socket-route.d.ts +2 -0
- package/dist/routes/{socket-route.js → endpoints/socket-route.js} +10 -10
- package/dist/routes/endpoints/socket-route.js.map +1 -0
- package/dist/routes/endpoints/sync-rules.d.ts +174 -0
- package/dist/routes/{sync-rules.js → endpoints/sync-rules.js} +44 -24
- package/dist/routes/endpoints/sync-rules.js.map +1 -0
- package/dist/routes/endpoints/sync-stream.d.ts +132 -0
- package/dist/routes/{sync-stream.js → endpoints/sync-stream.js} +26 -17
- package/dist/routes/endpoints/sync-stream.js.map +1 -0
- package/dist/routes/hooks.d.ts +10 -0
- package/dist/routes/hooks.js +31 -0
- package/dist/routes/hooks.js.map +1 -0
- package/dist/routes/route-register.d.ts +10 -0
- package/dist/routes/route-register.js +87 -0
- package/dist/routes/route-register.js.map +1 -0
- package/dist/routes/router.d.ts +16 -4
- package/dist/routes/router.js +6 -1
- package/dist/routes/router.js.map +1 -1
- package/dist/routes/routes-index.d.ts +5 -3
- package/dist/routes/routes-index.js +5 -3
- package/dist/routes/routes-index.js.map +1 -1
- package/dist/runner/teardown.js +27 -12
- package/dist/runner/teardown.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +3 -0
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/ChecksumCache.js.map +1 -1
- package/dist/storage/MongoBucketStorage.js +5 -5
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/SourceTable.js.map +1 -1
- package/dist/storage/mongo/MongoBucketBatch.js +23 -18
- package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
- package/dist/storage/mongo/MongoIdSequence.js.map +1 -1
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoSyncRulesLock.js +3 -3
- package/dist/storage/mongo/MongoSyncRulesLock.js.map +1 -1
- package/dist/storage/mongo/OperationBatch.js.map +1 -1
- package/dist/storage/mongo/PersistedBatch.js +2 -2
- package/dist/storage/mongo/PersistedBatch.js.map +1 -1
- package/dist/storage/mongo/db.d.ts +2 -2
- package/dist/storage/mongo/db.js.map +1 -1
- package/dist/storage/mongo/util.js.map +1 -1
- package/dist/sync/BroadcastIterable.js.map +1 -1
- package/dist/sync/LastValueSink.js.map +1 -1
- package/dist/sync/merge.js.map +1 -1
- package/dist/sync/safeRace.js.map +1 -1
- package/dist/sync/sync.js +4 -4
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/util.js.map +1 -1
- package/dist/system/CorePowerSyncSystem.d.ts +12 -7
- package/dist/system/CorePowerSyncSystem.js +26 -2
- package/dist/system/CorePowerSyncSystem.js.map +1 -1
- package/dist/system/system-index.d.ts +1 -0
- package/dist/system/system-index.js +2 -0
- package/dist/system/system-index.js.map +1 -0
- package/dist/util/Mutex.js.map +1 -1
- package/dist/util/PgManager.js.map +1 -1
- package/dist/util/alerting.d.ts +0 -2
- package/dist/util/alerting.js +0 -6
- package/dist/util/alerting.js.map +1 -1
- package/dist/util/config/collectors/config-collector.js +3 -3
- package/dist/util/config/collectors/config-collector.js.map +1 -1
- package/dist/util/config/collectors/impl/base64-config-collector.js.map +1 -1
- package/dist/util/config/collectors/impl/filesystem-config-collector.js +7 -5
- package/dist/util/config/collectors/impl/filesystem-config-collector.js.map +1 -1
- package/dist/util/config/compound-config-collector.js +4 -4
- package/dist/util/config/compound-config-collector.js.map +1 -1
- package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.js.map +1 -1
- package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.js.map +1 -1
- package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.js.map +1 -1
- package/dist/util/config.js.map +1 -1
- package/dist/util/env.d.ts +1 -2
- package/dist/util/env.js +3 -2
- package/dist/util/env.js.map +1 -1
- package/dist/util/memory-tracking.js +2 -2
- package/dist/util/memory-tracking.js.map +1 -1
- package/dist/util/migration_lib.js.map +1 -1
- package/dist/util/pgwire_utils.js +2 -2
- package/dist/util/pgwire_utils.js.map +1 -1
- package/dist/util/populate_test_data.js.map +1 -1
- package/dist/util/secs.js.map +1 -1
- package/dist/util/utils.js +4 -4
- package/dist/util/utils.js.map +1 -1
- package/package.json +13 -10
- package/src/api/diagnostics.ts +5 -5
- package/src/api/schema.ts +1 -1
- package/src/auth/KeyStore.ts +2 -2
- package/src/entry/cli-entry.ts +3 -4
- package/src/entry/commands/config-command.ts +1 -1
- package/src/entry/commands/migrate-action.ts +2 -2
- package/src/entry/commands/start-action.ts +1 -1
- package/src/entry/commands/teardown-action.ts +1 -1
- package/src/index.ts +5 -2
- package/src/locks/LockManager.ts +16 -0
- package/src/locks/MongoLocks.ts +142 -0
- package/src/locks/locks-index.ts +2 -0
- package/src/metrics/Metrics.ts +8 -8
- package/src/migrations/db/migrations/1684951997326-init.ts +3 -3
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +3 -3
- package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +2 -2
- package/src/migrations/definitions.ts +21 -0
- package/src/migrations/executor.ts +87 -0
- package/src/migrations/migrations-index.ts +3 -0
- package/src/migrations/migrations.ts +7 -11
- package/src/migrations/store/migration-store.ts +63 -0
- package/src/replication/WalConnection.ts +2 -2
- package/src/replication/WalStream.ts +24 -29
- package/src/replication/WalStreamManager.ts +14 -15
- package/src/replication/WalStreamRunner.ts +10 -10
- package/src/replication/util.ts +1 -1
- package/src/routes/auth.ts +22 -16
- package/src/routes/endpoints/admin.ts +237 -0
- package/src/routes/endpoints/checkpointing.ts +41 -0
- package/src/routes/endpoints/dev.ts +199 -0
- package/src/routes/endpoints/route-endpoints-index.ts +6 -0
- package/src/routes/{socket-route.ts → endpoints/socket-route.ts} +11 -11
- package/src/routes/endpoints/sync-rules.ts +227 -0
- package/src/routes/endpoints/sync-stream.ts +101 -0
- package/src/routes/hooks.ts +45 -0
- package/src/routes/route-register.ts +104 -0
- package/src/routes/router.ts +34 -6
- package/src/routes/routes-index.ts +5 -4
- package/src/runner/teardown.ts +30 -13
- package/src/storage/BucketStorage.ts +7 -2
- package/src/storage/ChecksumCache.ts +2 -2
- package/src/storage/MongoBucketStorage.ts +8 -8
- package/src/storage/SourceTable.ts +2 -2
- package/src/storage/mongo/MongoBucketBatch.ts +29 -22
- package/src/storage/mongo/MongoSyncBucketStorage.ts +3 -3
- package/src/storage/mongo/MongoSyncRulesLock.ts +3 -3
- package/src/storage/mongo/OperationBatch.ts +1 -1
- package/src/storage/mongo/PersistedBatch.ts +3 -3
- package/src/storage/mongo/db.ts +3 -4
- package/src/sync/sync.ts +8 -8
- package/src/sync/util.ts +2 -2
- package/src/system/CorePowerSyncSystem.ts +31 -10
- package/src/system/system-index.ts +1 -0
- package/src/util/alerting.ts +0 -8
- package/src/util/config/collectors/config-collector.ts +5 -3
- package/src/util/config/collectors/impl/filesystem-config-collector.ts +8 -6
- package/src/util/config/compound-config-collector.ts +4 -4
- package/src/util/env.ts +4 -2
- package/src/util/memory-tracking.ts +2 -2
- package/src/util/pgwire_utils.ts +3 -3
- package/src/util/utils.ts +5 -5
- package/test/src/auth.test.ts +4 -2
- package/test/src/data_storage.test.ts +177 -0
- package/test/src/env.ts +6 -6
- package/test/src/pg_test.test.ts +18 -0
- package/test/src/setup.ts +7 -0
- package/test/src/slow_tests.test.ts +45 -6
- package/test/tsconfig.json +1 -1
- package/tsconfig.json +5 -6
- package/tsconfig.tsbuildinfo +1 -1
- package/vitest.config.ts +1 -3
- package/dist/migrations/db/store.d.ts +0 -3
- package/dist/migrations/db/store.js +0 -10
- package/dist/migrations/db/store.js.map +0 -1
- package/dist/routes/admin.d.ts +0 -7
- package/dist/routes/admin.js.map +0 -1
- package/dist/routes/checkpointing.d.ts +0 -3
- package/dist/routes/checkpointing.js +0 -30
- package/dist/routes/checkpointing.js.map +0 -1
- package/dist/routes/dev.d.ts +0 -6
- package/dist/routes/dev.js.map +0 -1
- package/dist/routes/route-generators.d.ts +0 -15
- package/dist/routes/route-generators.js +0 -32
- package/dist/routes/route-generators.js.map +0 -1
- package/dist/routes/socket-route.d.ts +0 -2
- package/dist/routes/socket-route.js.map +0 -1
- package/dist/routes/sync-rules.d.ts +0 -6
- package/dist/routes/sync-rules.js.map +0 -1
- package/dist/routes/sync-stream.d.ts +0 -5
- package/dist/routes/sync-stream.js.map +0 -1
- package/src/migrations/db/store.ts +0 -11
- package/src/routes/admin.ts +0 -229
- package/src/routes/checkpointing.ts +0 -38
- package/src/routes/dev.ts +0 -194
- package/src/routes/route-generators.ts +0 -39
- package/src/routes/sync-rules.ts +0 -210
- package/src/routes/sync-stream.ts +0 -95
|
@@ -1,26 +1,47 @@
|
|
|
1
1
|
import * as pgwire from '@powersync/service-jpgwire';
|
|
2
|
-
import
|
|
2
|
+
import { LifeCycledSystem, container, logger } from '@powersync/lib-services-framework';
|
|
3
3
|
|
|
4
|
-
import * as
|
|
5
|
-
import * as
|
|
6
|
-
import * as utils from '@/util/util-index.js';
|
|
4
|
+
import * as storage from '../storage/storage-index.js';
|
|
5
|
+
import * as utils from '../util/util-index.js';
|
|
7
6
|
|
|
8
|
-
export abstract class CorePowerSyncSystem extends
|
|
7
|
+
export abstract class CorePowerSyncSystem extends LifeCycledSystem {
|
|
9
8
|
abstract storage: storage.BucketStorageFactory;
|
|
10
|
-
abstract client_keystore: auth.KeyStore;
|
|
11
|
-
abstract dev_client_keystore: auth.KeyStore;
|
|
12
9
|
abstract pgwire_pool?: pgwire.PgClient;
|
|
10
|
+
closed: boolean;
|
|
13
11
|
|
|
14
12
|
protected stopHandlers: Set<() => void> = new Set();
|
|
15
13
|
|
|
16
|
-
closed: boolean;
|
|
17
|
-
|
|
18
14
|
constructor(public config: utils.ResolvedPowerSyncConfig) {
|
|
19
15
|
super();
|
|
20
16
|
this.closed = false;
|
|
21
17
|
}
|
|
22
18
|
|
|
23
|
-
|
|
19
|
+
get client_keystore() {
|
|
20
|
+
return this.config.client_keystore;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
get dev_client_keystore() {
|
|
24
|
+
return this.config.dev_client_keystore;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Adds a termination handler which will call handlers registered via
|
|
29
|
+
* [addStopHandler].
|
|
30
|
+
* This should be called after the server is started and it's termination handler is added.
|
|
31
|
+
* This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit
|
|
32
|
+
*/
|
|
33
|
+
addTerminationHandler() {
|
|
34
|
+
container.terminationHandler.handleTerminationSignal(async () => {
|
|
35
|
+
// Close open streams, so that they don't block the server from closing.
|
|
36
|
+
// Note: This does not work well when streaming requests are queued. In that case, the server still doesn't
|
|
37
|
+
// close in the 30-second timeout.
|
|
38
|
+
this.closed = true;
|
|
39
|
+
logger.info(`Closing ${this.stopHandlers.size} streams`);
|
|
40
|
+
for (let handler of this.stopHandlers) {
|
|
41
|
+
handler();
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
|
24
45
|
|
|
25
46
|
addStopHandler(handler: () => void): () => void {
|
|
26
47
|
if (this.closed) {
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './CorePowerSyncSystem.js';
|
package/src/util/alerting.ts
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import * as micro from '@journeyapps-platform/micro';
|
|
2
|
-
|
|
3
1
|
let globalTags: Record<string, string> = {};
|
|
4
2
|
|
|
5
3
|
export function setTags(tags: Record<string, string>) {
|
|
@@ -9,9 +7,3 @@ export function setTags(tags: Record<string, string>) {
|
|
|
9
7
|
export function getGlobalTags() {
|
|
10
8
|
return globalTags;
|
|
11
9
|
}
|
|
12
|
-
|
|
13
|
-
export function captureException(error: any, options?: micro.alerts.CaptureOptions) {
|
|
14
|
-
micro.alerts.captureException(error, {
|
|
15
|
-
...options
|
|
16
|
-
});
|
|
17
|
-
}
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import * as t from 'ts-codec';
|
|
2
|
+
import * as yaml from 'yaml';
|
|
3
|
+
|
|
2
4
|
import { configFile } from '@powersync/service-types';
|
|
3
|
-
import
|
|
5
|
+
import { schema } from '@powersync/lib-services-framework';
|
|
6
|
+
|
|
4
7
|
import { RunnerConfig } from '../types.js';
|
|
5
|
-
import * as yaml from 'yaml';
|
|
6
8
|
|
|
7
9
|
export enum ConfigFileFormat {
|
|
8
10
|
YAML = 'yaml',
|
|
@@ -22,7 +24,7 @@ export enum ConfigFileFormat {
|
|
|
22
24
|
const YAML_ENV_PREFIX = 'PS_';
|
|
23
25
|
|
|
24
26
|
// ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
25
|
-
const configSchemaValidator =
|
|
27
|
+
const configSchemaValidator = schema
|
|
26
28
|
.parseJSONSchema(
|
|
27
29
|
t.generateJSONSchema(configFile.powerSyncConfig, { allowAdditional: true, parsers: [configFile.portParser] })
|
|
28
30
|
)
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import * as fs from 'fs/promises';
|
|
2
|
-
import * as
|
|
2
|
+
import * as path from 'path';
|
|
3
3
|
|
|
4
4
|
import { ConfigCollector, ConfigFileFormat } from '../config-collector.js';
|
|
5
5
|
import { RunnerConfig } from '../../types.js';
|
|
6
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
6
7
|
|
|
7
8
|
export class FileSystemConfigCollector extends ConfigCollector {
|
|
8
9
|
get name(): string {
|
|
@@ -15,16 +16,17 @@ export class FileSystemConfigCollector extends ConfigCollector {
|
|
|
15
16
|
return null;
|
|
16
17
|
}
|
|
17
18
|
|
|
19
|
+
const resolvedPath = path.resolve(process.cwd(), config_path);
|
|
20
|
+
|
|
18
21
|
// Check if file exists
|
|
19
22
|
try {
|
|
20
|
-
await fs.access(
|
|
23
|
+
await fs.access(resolvedPath, fs.constants.F_OK);
|
|
21
24
|
} catch (ex) {
|
|
22
|
-
throw new Error(`Config file path ${
|
|
25
|
+
throw new Error(`Config file path ${resolvedPath} was specified, but the file does not exist.`);
|
|
23
26
|
}
|
|
24
27
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
const content = await fs.readFile(config_path, 'utf-8');
|
|
28
|
+
logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`);
|
|
29
|
+
const content = await fs.readFile(resolvedPath, 'utf-8');
|
|
28
30
|
|
|
29
31
|
let contentType: ConfigFileFormat | undefined;
|
|
30
32
|
switch (true) {
|
|
@@ -1,8 +1,7 @@
|
|
|
1
|
-
import * as micro from '@journeyapps-platform/micro';
|
|
2
1
|
import { configFile, normalizeConnection } from '@powersync/service-types';
|
|
3
2
|
import { ConfigCollector } from './collectors/config-collector.js';
|
|
4
3
|
import { ResolvedConnection, ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js';
|
|
5
|
-
import * as auth from '
|
|
4
|
+
import * as auth from '../../auth/auth-index.js';
|
|
6
5
|
import { SyncRulesCollector } from './sync-rules/sync-collector.js';
|
|
7
6
|
import { Base64ConfigCollector } from './collectors/impl/base64-config-collector.js';
|
|
8
7
|
import { FileSystemConfigCollector } from './collectors/impl/filesystem-config-collector.js';
|
|
@@ -10,6 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co
|
|
|
10
9
|
import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js';
|
|
11
10
|
import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js';
|
|
12
11
|
import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js';
|
|
12
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
13
13
|
|
|
14
14
|
const POWERSYNC_DEV_KID = 'powersync-dev';
|
|
15
15
|
|
|
@@ -140,7 +140,7 @@ export class CompoundConfigCollector {
|
|
|
140
140
|
if (baseConfig) {
|
|
141
141
|
return baseConfig;
|
|
142
142
|
}
|
|
143
|
-
|
|
143
|
+
logger.debug(
|
|
144
144
|
`Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.`
|
|
145
145
|
);
|
|
146
146
|
} catch (ex) {
|
|
@@ -161,7 +161,7 @@ export class CompoundConfigCollector {
|
|
|
161
161
|
if (config) {
|
|
162
162
|
return config;
|
|
163
163
|
}
|
|
164
|
-
|
|
164
|
+
logger.debug(
|
|
165
165
|
`Could not collect sync rules with ${collector.name} method. Moving on to next method if available.`
|
|
166
166
|
);
|
|
167
167
|
} catch (ex) {
|
package/src/util/env.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { utils } from '@
|
|
1
|
+
import { utils } from '@powersync/lib-services-framework';
|
|
2
2
|
|
|
3
3
|
import { ServiceRunner } from './config/types.js';
|
|
4
4
|
|
|
@@ -22,7 +22,9 @@ export const env = utils.collectEnvironmentVariables({
|
|
|
22
22
|
/**
|
|
23
23
|
* Port for metrics
|
|
24
24
|
*/
|
|
25
|
-
METRICS_PORT: utils.type.number.optional()
|
|
25
|
+
METRICS_PORT: utils.type.number.optional(),
|
|
26
|
+
|
|
27
|
+
NODE_ENV: utils.type.string.optional()
|
|
26
28
|
});
|
|
27
29
|
|
|
28
30
|
export type Env = typeof env;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* Track and log memory usage.
|
|
@@ -57,7 +57,7 @@ export function trackMemoryUsage() {
|
|
|
57
57
|
)
|
|
58
58
|
)`.replaceAll(/\s+/g, ' ');
|
|
59
59
|
|
|
60
|
-
|
|
60
|
+
logger.info(output);
|
|
61
61
|
}
|
|
62
62
|
}, 50);
|
|
63
63
|
}
|
package/src/util/pgwire_utils.ts
CHANGED
|
@@ -4,9 +4,9 @@ import * as bson from 'bson';
|
|
|
4
4
|
import * as uuid from 'uuid';
|
|
5
5
|
import * as pgwire from '@powersync/service-jpgwire';
|
|
6
6
|
import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules';
|
|
7
|
-
import * as micro from '@journeyapps-platform/micro';
|
|
8
7
|
|
|
9
|
-
import * as replication from '
|
|
8
|
+
import * as replication from '../replication/replication-index.js';
|
|
9
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
10
10
|
|
|
11
11
|
/**
|
|
12
12
|
* pgwire message -> SQLite row.
|
|
@@ -133,7 +133,7 @@ export async function retriedQuery(db: pgwire.PgClient, ...args: any[]) {
|
|
|
133
133
|
if (tries == 1) {
|
|
134
134
|
throw e;
|
|
135
135
|
}
|
|
136
|
-
|
|
136
|
+
logger.warn('Query error, retrying', e);
|
|
137
137
|
}
|
|
138
138
|
}
|
|
139
139
|
}
|
package/src/util/utils.ts
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import crypto from 'crypto';
|
|
2
2
|
import * as pgwire from '@powersync/service-jpgwire';
|
|
3
3
|
import { pgwireRows } from '@powersync/service-jpgwire';
|
|
4
|
-
import * as micro from '@journeyapps-platform/micro';
|
|
5
4
|
|
|
6
|
-
import * as storage from '
|
|
5
|
+
import * as storage from '../storage/storage-index.js';
|
|
7
6
|
import { BucketChecksum, OpId } from './protocol-types.js';
|
|
8
7
|
import { retriedQuery } from './pgwire_utils.js';
|
|
8
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
9
9
|
|
|
10
10
|
export type ChecksumMap = Map<string, BucketChecksum>;
|
|
11
11
|
|
|
@@ -90,14 +90,14 @@ export async function getClientCheckpoint(
|
|
|
90
90
|
|
|
91
91
|
const timeout = options?.timeout ?? 50_000;
|
|
92
92
|
|
|
93
|
-
|
|
93
|
+
logger.info(`Waiting for LSN checkpoint: ${lsn}`);
|
|
94
94
|
while (Date.now() - start < timeout) {
|
|
95
95
|
const cp = await bucketStorage.getActiveCheckpoint();
|
|
96
96
|
if (!cp.hasSyncRules()) {
|
|
97
97
|
throw new Error('No sync rules available');
|
|
98
98
|
}
|
|
99
99
|
if (cp.lsn >= lsn) {
|
|
100
|
-
|
|
100
|
+
logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`);
|
|
101
101
|
return cp.checkpoint;
|
|
102
102
|
}
|
|
103
103
|
|
|
@@ -117,6 +117,6 @@ export async function createWriteCheckpoint(
|
|
|
117
117
|
);
|
|
118
118
|
|
|
119
119
|
const id = await bucketStorage.createWriteCheckpoint(user_id, { '1': lsn });
|
|
120
|
-
|
|
120
|
+
logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`);
|
|
121
121
|
return id;
|
|
122
122
|
}
|
package/test/src/auth.test.ts
CHANGED
|
@@ -262,7 +262,8 @@ describe('JWT Auth', () => {
|
|
|
262
262
|
expect(errors).toEqual([]);
|
|
263
263
|
expect(keys.length).toBeGreaterThanOrEqual(1);
|
|
264
264
|
|
|
265
|
-
|
|
265
|
+
// The localhost hostname fails to resolve correctly on MacOS https://github.com/nodejs/help/issues/2163
|
|
266
|
+
const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json', {
|
|
266
267
|
block_local_ip: true
|
|
267
268
|
});
|
|
268
269
|
expect(invalid.getKeys()).rejects.toThrow('IPs in this range are not supported');
|
|
@@ -278,7 +279,8 @@ describe('JWT Auth', () => {
|
|
|
278
279
|
expect(errors).toEqual([]);
|
|
279
280
|
expect(keys.length).toBeGreaterThanOrEqual(1);
|
|
280
281
|
|
|
281
|
-
|
|
282
|
+
// The localhost hostname fails to resolve correctly on MacOS https://github.com/nodejs/help/issues/2163
|
|
283
|
+
const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json');
|
|
282
284
|
// Should try and fetch
|
|
283
285
|
expect(invalid.getKeys()).rejects.toThrow('ECONNREFUSED');
|
|
284
286
|
});
|
|
@@ -897,6 +897,183 @@ bucket_definitions:
|
|
|
897
897
|
]);
|
|
898
898
|
});
|
|
899
899
|
|
|
900
|
+
test('changed data with replica identity full', async () => {
|
|
901
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
902
|
+
bucket_definitions:
|
|
903
|
+
global:
|
|
904
|
+
data:
|
|
905
|
+
- SELECT id, description FROM "test"
|
|
906
|
+
`);
|
|
907
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
908
|
+
|
|
909
|
+
const sourceTable = makeTestTable('test', ['id', 'description']);
|
|
910
|
+
|
|
911
|
+
// Pre-setup
|
|
912
|
+
const result1 = await storage.startBatch({}, async (batch) => {
|
|
913
|
+
await batch.save({
|
|
914
|
+
sourceTable,
|
|
915
|
+
tag: 'insert',
|
|
916
|
+
after: {
|
|
917
|
+
id: 'test1',
|
|
918
|
+
description: 'test1a'
|
|
919
|
+
}
|
|
920
|
+
});
|
|
921
|
+
});
|
|
922
|
+
|
|
923
|
+
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
924
|
+
|
|
925
|
+
const result2 = await storage.startBatch({}, async (batch) => {
|
|
926
|
+
// Unchanged, but has a before id
|
|
927
|
+
await batch.save({
|
|
928
|
+
sourceTable,
|
|
929
|
+
tag: 'update',
|
|
930
|
+
before: {
|
|
931
|
+
id: 'test1',
|
|
932
|
+
description: 'test1a'
|
|
933
|
+
},
|
|
934
|
+
after: {
|
|
935
|
+
id: 'test1',
|
|
936
|
+
description: 'test1b'
|
|
937
|
+
}
|
|
938
|
+
});
|
|
939
|
+
});
|
|
940
|
+
|
|
941
|
+
const result3 = await storage.startBatch({}, async (batch) => {
|
|
942
|
+
// Delete
|
|
943
|
+
await batch.save({
|
|
944
|
+
sourceTable,
|
|
945
|
+
tag: 'delete',
|
|
946
|
+
before: {
|
|
947
|
+
id: 'test1',
|
|
948
|
+
description: 'test1b'
|
|
949
|
+
},
|
|
950
|
+
after: undefined
|
|
951
|
+
});
|
|
952
|
+
});
|
|
953
|
+
|
|
954
|
+
const checkpoint3 = result3!.flushed_op;
|
|
955
|
+
|
|
956
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
|
|
957
|
+
const data = batch[0].data.map((d) => {
|
|
958
|
+
return {
|
|
959
|
+
op: d.op,
|
|
960
|
+
object_id: d.object_id,
|
|
961
|
+
data: d.data,
|
|
962
|
+
subkey: d.subkey
|
|
963
|
+
};
|
|
964
|
+
});
|
|
965
|
+
|
|
966
|
+
// Operations must be in this order
|
|
967
|
+
expect(data).toEqual([
|
|
968
|
+
// 2
|
|
969
|
+
// The REMOVE is expected because the subkey changes
|
|
970
|
+
{
|
|
971
|
+
op: 'REMOVE',
|
|
972
|
+
object_id: 'test1',
|
|
973
|
+
data: null,
|
|
974
|
+
subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
|
|
975
|
+
},
|
|
976
|
+
{
|
|
977
|
+
op: 'PUT',
|
|
978
|
+
object_id: 'test1',
|
|
979
|
+
data: JSON.stringify({ id: 'test1', description: 'test1b' }),
|
|
980
|
+
subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
|
|
981
|
+
},
|
|
982
|
+
// 3
|
|
983
|
+
{
|
|
984
|
+
op: 'REMOVE',
|
|
985
|
+
object_id: 'test1',
|
|
986
|
+
data: null,
|
|
987
|
+
subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
|
|
988
|
+
}
|
|
989
|
+
]);
|
|
990
|
+
});
|
|
991
|
+
|
|
992
|
+
test('unchanged data with replica identity full', async () => {
|
|
993
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
994
|
+
bucket_definitions:
|
|
995
|
+
global:
|
|
996
|
+
data:
|
|
997
|
+
- SELECT id, description FROM "test"
|
|
998
|
+
`);
|
|
999
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
1000
|
+
|
|
1001
|
+
const sourceTable = makeTestTable('test', ['id', 'description']);
|
|
1002
|
+
|
|
1003
|
+
// Pre-setup
|
|
1004
|
+
const result1 = await storage.startBatch({}, async (batch) => {
|
|
1005
|
+
await batch.save({
|
|
1006
|
+
sourceTable,
|
|
1007
|
+
tag: 'insert',
|
|
1008
|
+
after: {
|
|
1009
|
+
id: 'test1',
|
|
1010
|
+
description: 'test1a'
|
|
1011
|
+
}
|
|
1012
|
+
});
|
|
1013
|
+
});
|
|
1014
|
+
|
|
1015
|
+
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
1016
|
+
|
|
1017
|
+
const result2 = await storage.startBatch({}, async (batch) => {
|
|
1018
|
+
// Unchanged, but has a before id
|
|
1019
|
+
await batch.save({
|
|
1020
|
+
sourceTable,
|
|
1021
|
+
tag: 'update',
|
|
1022
|
+
before: {
|
|
1023
|
+
id: 'test1',
|
|
1024
|
+
description: 'test1a'
|
|
1025
|
+
},
|
|
1026
|
+
after: {
|
|
1027
|
+
id: 'test1',
|
|
1028
|
+
description: 'test1a'
|
|
1029
|
+
}
|
|
1030
|
+
});
|
|
1031
|
+
});
|
|
1032
|
+
|
|
1033
|
+
const result3 = await storage.startBatch({}, async (batch) => {
|
|
1034
|
+
// Delete
|
|
1035
|
+
await batch.save({
|
|
1036
|
+
sourceTable,
|
|
1037
|
+
tag: 'delete',
|
|
1038
|
+
before: {
|
|
1039
|
+
id: 'test1',
|
|
1040
|
+
description: 'test1a'
|
|
1041
|
+
},
|
|
1042
|
+
after: undefined
|
|
1043
|
+
});
|
|
1044
|
+
});
|
|
1045
|
+
|
|
1046
|
+
const checkpoint3 = result3!.flushed_op;
|
|
1047
|
+
|
|
1048
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
|
|
1049
|
+
const data = batch[0].data.map((d) => {
|
|
1050
|
+
return {
|
|
1051
|
+
op: d.op,
|
|
1052
|
+
object_id: d.object_id,
|
|
1053
|
+
data: d.data,
|
|
1054
|
+
subkey: d.subkey
|
|
1055
|
+
};
|
|
1056
|
+
});
|
|
1057
|
+
|
|
1058
|
+
// Operations must be in this order
|
|
1059
|
+
expect(data).toEqual([
|
|
1060
|
+
// 2
|
|
1061
|
+
{
|
|
1062
|
+
op: 'PUT',
|
|
1063
|
+
object_id: 'test1',
|
|
1064
|
+
data: JSON.stringify({ id: 'test1', description: 'test1a' }),
|
|
1065
|
+
subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
|
|
1066
|
+
},
|
|
1067
|
+
// 3
|
|
1068
|
+
{
|
|
1069
|
+
op: 'REMOVE',
|
|
1070
|
+
object_id: 'test1',
|
|
1071
|
+
data: null,
|
|
1072
|
+
subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
|
|
1073
|
+
}
|
|
1074
|
+
]);
|
|
1075
|
+
});
|
|
1076
|
+
|
|
900
1077
|
test('large batch', async () => {
|
|
901
1078
|
// Test syncing a batch of data that is small in count,
|
|
902
1079
|
// but large enough in size to be split over multiple returned batches.
|
package/test/src/env.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { utils } from '@powersync/lib-services-framework';
|
|
2
2
|
|
|
3
|
-
export const env =
|
|
4
|
-
MONGO_TEST_URL:
|
|
5
|
-
PG_TEST_URL:
|
|
6
|
-
CI:
|
|
7
|
-
SLOW_TESTS:
|
|
3
|
+
export const env = utils.collectEnvironmentVariables({
|
|
4
|
+
MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
|
|
5
|
+
PG_TEST_URL: utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'),
|
|
6
|
+
CI: utils.type.boolean.default('false'),
|
|
7
|
+
SLOW_TESTS: utils.type.boolean.default('false')
|
|
8
8
|
});
|
package/test/src/pg_test.test.ts
CHANGED
|
@@ -86,6 +86,12 @@ VALUES(6, 'epoch'::timestamp, 'epoch'::timestamptz);
|
|
|
86
86
|
|
|
87
87
|
INSERT INTO test_data(id, timestamp, timestamptz)
|
|
88
88
|
VALUES(7, 'infinity'::timestamp, 'infinity'::timestamptz);
|
|
89
|
+
|
|
90
|
+
INSERT INTO test_data(id, timestamptz)
|
|
91
|
+
VALUES(8, '0022-02-03 12:13:14+03'::timestamptz);
|
|
92
|
+
|
|
93
|
+
INSERT INTO test_data(id, timestamptz)
|
|
94
|
+
VALUES(9, '10022-02-03 12:13:14+03'::timestamptz);
|
|
89
95
|
`);
|
|
90
96
|
}
|
|
91
97
|
|
|
@@ -186,6 +192,18 @@ VALUES(10, ARRAY['null']::TEXT[]);
|
|
|
186
192
|
timestamp: '9999-12-31 23:59:59',
|
|
187
193
|
timestamptz: '9999-12-31 23:59:59Z'
|
|
188
194
|
});
|
|
195
|
+
|
|
196
|
+
expect(transformed[7]).toMatchObject({
|
|
197
|
+
id: 8n,
|
|
198
|
+
timestamptz: '0022-02-03 09:13:14Z'
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
expect(transformed[8]).toMatchObject({
|
|
202
|
+
id: 9n,
|
|
203
|
+
// 10022-02-03 12:13:14+03 - out of range of both our date parsing logic, and sqlite's date functions
|
|
204
|
+
// We can consider just preserving the source string as an alternative if this causes issues.
|
|
205
|
+
timestamptz: null
|
|
206
|
+
});
|
|
189
207
|
}
|
|
190
208
|
|
|
191
209
|
function checkResultArrays(transformed: Record<string, any>[]) {
|
|
@@ -62,7 +62,7 @@ function defineSlowTests(factory: StorageFactory) {
|
|
|
62
62
|
bucket_definitions:
|
|
63
63
|
global:
|
|
64
64
|
data:
|
|
65
|
-
- SELECT
|
|
65
|
+
- SELECT * FROM "test_data"
|
|
66
66
|
`;
|
|
67
67
|
const syncRules = await f.updateSyncRules({ content: syncRuleContent });
|
|
68
68
|
const storage = f.getInstance(syncRules.parsed());
|
|
@@ -76,7 +76,10 @@ bucket_definitions:
|
|
|
76
76
|
walStream = new WalStream(options);
|
|
77
77
|
|
|
78
78
|
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
79
|
-
await pool.query(
|
|
79
|
+
await pool.query(
|
|
80
|
+
`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text, num decimal)`
|
|
81
|
+
);
|
|
82
|
+
await pool.query(`ALTER TABLE test_data REPLICA IDENTITY FULL`);
|
|
80
83
|
|
|
81
84
|
await walStream.initReplication(replicationConnection);
|
|
82
85
|
await storage.autoActivate();
|
|
@@ -88,14 +91,17 @@ bucket_definitions:
|
|
|
88
91
|
|
|
89
92
|
while (!abort && Date.now() - start < TEST_DURATION_MS) {
|
|
90
93
|
const bg = async () => {
|
|
91
|
-
for (let j = 0; j <
|
|
92
|
-
const n =
|
|
94
|
+
for (let j = 0; j < 1 && !abort; j++) {
|
|
95
|
+
const n = 1;
|
|
93
96
|
let statements: pgwire.Statement[] = [];
|
|
94
97
|
for (let i = 0; i < n; i++) {
|
|
95
98
|
const description = `test${i}`;
|
|
96
99
|
statements.push({
|
|
97
|
-
statement: `INSERT INTO test_data(description) VALUES($1) returning id as test_id`,
|
|
98
|
-
params: [
|
|
100
|
+
statement: `INSERT INTO test_data(description, num) VALUES($1, $2) returning id as test_id`,
|
|
101
|
+
params: [
|
|
102
|
+
{ type: 'varchar', value: description },
|
|
103
|
+
{ type: 'float8', value: Math.random() }
|
|
104
|
+
]
|
|
99
105
|
});
|
|
100
106
|
}
|
|
101
107
|
const results = await pool.query(...statements);
|
|
@@ -104,6 +110,24 @@ bucket_definitions:
|
|
|
104
110
|
});
|
|
105
111
|
await new Promise((resolve) => setTimeout(resolve, Math.random() * 30));
|
|
106
112
|
|
|
113
|
+
if (Math.random() > 0.5) {
|
|
114
|
+
const updateStatements: pgwire.Statement[] = ids.map((id) => {
|
|
115
|
+
return {
|
|
116
|
+
statement: `UPDATE test_data SET num = $2 WHERE id = $1`,
|
|
117
|
+
params: [
|
|
118
|
+
{ type: 'uuid', value: id },
|
|
119
|
+
{ type: 'float8', value: Math.random() }
|
|
120
|
+
]
|
|
121
|
+
};
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
await pool.query(...updateStatements);
|
|
125
|
+
if (Math.random() > 0.5) {
|
|
126
|
+
// Special case - an update that doesn't change data
|
|
127
|
+
await pool.query(...updateStatements);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
107
131
|
const deleteStatements: pgwire.Statement[] = ids.map((id) => {
|
|
108
132
|
return {
|
|
109
133
|
statement: `DELETE FROM test_data WHERE id = $1`,
|
|
@@ -129,6 +153,21 @@ bucket_definitions:
|
|
|
129
153
|
return bson.deserialize((doc.data as mongo.Binary).buffer) as SqliteRow;
|
|
130
154
|
});
|
|
131
155
|
expect(transformed).toEqual([]);
|
|
156
|
+
|
|
157
|
+
// Check that each PUT has a REMOVE
|
|
158
|
+
const ops = await f.db.bucket_data.find().sort({ _id: 1 }).toArray();
|
|
159
|
+
let active = new Set<string>();
|
|
160
|
+
for (let op of ops) {
|
|
161
|
+
const key = op.source_key.toHexString();
|
|
162
|
+
if (op.op == 'PUT') {
|
|
163
|
+
active.add(key);
|
|
164
|
+
} else if (op.op == 'REMOVE') {
|
|
165
|
+
active.delete(key);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
if (active.size > 0) {
|
|
169
|
+
throw new Error(`${active.size} rows not removed`);
|
|
170
|
+
}
|
|
132
171
|
}
|
|
133
172
|
|
|
134
173
|
abortController.abort();
|
package/test/tsconfig.json
CHANGED
package/tsconfig.json
CHANGED
|
@@ -1,15 +1,11 @@
|
|
|
1
1
|
{
|
|
2
|
-
"extends": "
|
|
2
|
+
"extends": "../../tsconfig.base.json",
|
|
3
3
|
"compilerOptions": {
|
|
4
4
|
"rootDir": "src",
|
|
5
5
|
"outDir": "dist",
|
|
6
|
-
"baseUrl": ".",
|
|
7
6
|
"esModuleInterop": true,
|
|
8
7
|
"skipLibCheck": true,
|
|
9
|
-
"sourceMap": true
|
|
10
|
-
"paths": {
|
|
11
|
-
"@/*": ["./src/*"]
|
|
12
|
-
}
|
|
8
|
+
"sourceMap": true
|
|
13
9
|
},
|
|
14
10
|
"include": ["src"],
|
|
15
11
|
"references": [
|
|
@@ -27,6 +23,9 @@
|
|
|
27
23
|
},
|
|
28
24
|
{
|
|
29
25
|
"path": "../sync-rules"
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
"path": "../../libs/lib-services"
|
|
30
29
|
}
|
|
31
30
|
]
|
|
32
31
|
}
|