@powersync/service-core 1.18.2 → 1.19.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/CHANGELOG.md +35 -0
  2. package/dist/api/RouteAPI.d.ts +2 -2
  3. package/dist/api/diagnostics.js +4 -3
  4. package/dist/api/diagnostics.js.map +1 -1
  5. package/dist/auth/JwtPayload.d.ts +7 -8
  6. package/dist/auth/JwtPayload.js +19 -1
  7. package/dist/auth/JwtPayload.js.map +1 -1
  8. package/dist/auth/KeyStore.js +2 -1
  9. package/dist/auth/KeyStore.js.map +1 -1
  10. package/dist/metrics/open-telemetry/util.js +3 -1
  11. package/dist/metrics/open-telemetry/util.js.map +1 -1
  12. package/dist/replication/AbstractReplicator.d.ts +1 -0
  13. package/dist/replication/AbstractReplicator.js +16 -6
  14. package/dist/replication/AbstractReplicator.js.map +1 -1
  15. package/dist/routes/auth.d.ts +0 -1
  16. package/dist/routes/auth.js +2 -4
  17. package/dist/routes/auth.js.map +1 -1
  18. package/dist/routes/configure-fastify.js +1 -1
  19. package/dist/routes/configure-fastify.js.map +1 -1
  20. package/dist/routes/endpoints/admin.d.ts +3 -0
  21. package/dist/routes/endpoints/admin.js +8 -2
  22. package/dist/routes/endpoints/admin.js.map +1 -1
  23. package/dist/routes/endpoints/checkpointing.js +3 -3
  24. package/dist/routes/endpoints/checkpointing.js.map +1 -1
  25. package/dist/routes/endpoints/socket-route.js +3 -6
  26. package/dist/routes/endpoints/socket-route.js.map +1 -1
  27. package/dist/routes/endpoints/sync-rules.js +5 -5
  28. package/dist/routes/endpoints/sync-rules.js.map +1 -1
  29. package/dist/routes/endpoints/sync-stream.js +3 -6
  30. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  31. package/dist/routes/router.d.ts +0 -1
  32. package/dist/routes/router.js.map +1 -1
  33. package/dist/storage/PersistedSyncRulesContent.d.ts +3 -2
  34. package/dist/storage/SyncRulesBucketStorage.d.ts +12 -4
  35. package/dist/storage/SyncRulesBucketStorage.js.map +1 -1
  36. package/dist/storage/bson.d.ts +3 -3
  37. package/dist/storage/bson.js.map +1 -1
  38. package/dist/sync/BucketChecksumState.d.ts +7 -10
  39. package/dist/sync/BucketChecksumState.js +16 -15
  40. package/dist/sync/BucketChecksumState.js.map +1 -1
  41. package/dist/sync/sync.d.ts +2 -2
  42. package/dist/sync/sync.js +5 -7
  43. package/dist/sync/sync.js.map +1 -1
  44. package/dist/util/config/collectors/config-collector.js +5 -2
  45. package/dist/util/config/collectors/config-collector.js.map +1 -1
  46. package/dist/util/config.d.ts +2 -0
  47. package/dist/util/config.js +15 -2
  48. package/dist/util/config.js.map +1 -1
  49. package/package.json +5 -5
  50. package/src/api/RouteAPI.ts +2 -2
  51. package/src/api/diagnostics.ts +5 -4
  52. package/src/auth/JwtPayload.ts +16 -8
  53. package/src/auth/KeyStore.ts +1 -1
  54. package/src/metrics/open-telemetry/util.ts +3 -1
  55. package/src/replication/AbstractReplicator.ts +15 -7
  56. package/src/routes/auth.ts +2 -4
  57. package/src/routes/configure-fastify.ts +1 -1
  58. package/src/routes/endpoints/admin.ts +8 -2
  59. package/src/routes/endpoints/checkpointing.ts +5 -3
  60. package/src/routes/endpoints/socket-route.ts +3 -6
  61. package/src/routes/endpoints/sync-rules.ts +5 -5
  62. package/src/routes/endpoints/sync-stream.ts +3 -6
  63. package/src/routes/router.ts +0 -2
  64. package/src/storage/PersistedSyncRulesContent.ts +4 -2
  65. package/src/storage/SyncRulesBucketStorage.ts +13 -4
  66. package/src/storage/bson.ts +3 -3
  67. package/src/sync/BucketChecksumState.ts +26 -28
  68. package/src/sync/sync.ts +12 -14
  69. package/src/util/config/collectors/config-collector.ts +9 -2
  70. package/src/util/config.ts +20 -2
  71. package/test/src/auth.test.ts +76 -20
  72. package/test/src/config.test.ts +17 -0
  73. package/test/src/routes/stream.test.ts +9 -9
  74. package/test/src/sync/BucketChecksumState.test.ts +23 -52
  75. package/tsconfig.json +0 -3
  76. package/tsconfig.tsbuildinfo +1 -1
  77. package/vitest.config.ts +6 -7
@@ -1,12 +1,25 @@
1
1
  import * as fs from 'fs/promises';
2
- import { container } from '@powersync/lib-services-framework';
2
+ import winston from 'winston';
3
+ import { container, logger, LogFormat, DEFAULT_LOG_LEVEL, DEFAULT_LOG_FORMAT } from '@powersync/lib-services-framework';
3
4
  import { CompoundConfigCollector } from './util-index.js';
5
+ export function configureLogger(config) {
6
+ const level = process.env.PS_LOG_LEVEL ?? config?.level ?? DEFAULT_LOG_LEVEL;
7
+ const format = process.env.PS_LOG_FORMAT ?? config?.format ?? DEFAULT_LOG_FORMAT;
8
+ const winstonFormat = format === 'json' ? LogFormat.production : LogFormat.development;
9
+ // We want the user to always be aware that a log level was configured (they might forget they set it in the config and wonder why they aren't seeing logs)
10
+ // We log this using the configured format, but before we configure the level.
11
+ logger.configure({ level: DEFAULT_LOG_LEVEL, format: winstonFormat, transports: [new winston.transports.Console()] });
12
+ logger.info(`Configured logger with level "${level}" and format "${format}"`);
13
+ logger.configure({ level, format: winstonFormat, transports: [new winston.transports.Console()] });
14
+ }
4
15
  /**
5
16
  * Loads the resolved config using the registered config collector
6
17
  */
7
18
  export async function loadConfig(runnerConfig) {
8
19
  const collector = container.getImplementation(CompoundConfigCollector);
9
- return collector.collectConfig(runnerConfig);
20
+ const config = await collector.collectConfig(runnerConfig);
21
+ configureLogger(config.base_config.system?.logging);
22
+ return config;
10
23
  }
11
24
  export async function loadSyncRules(config) {
12
25
  const sync_rules = config.sync_rules;
@@ -1 +1 @@
1
- {"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/util/config.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,aAAa,CAAC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,mCAAmC,CAAC;AAE9D,OAAO,EAAE,uBAAuB,EAAE,MAAM,iBAAiB,CAAC;AAE1D;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,YAA0B;IACzD,MAAM,SAAS,GAAG,SAAS,CAAC,iBAAiB,CAAC,uBAAuB,CAAC,CAAC;IACvE,OAAO,SAAS,CAAC,aAAa,CAAC,YAAY,CAAC,CAAC;AAC/C,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,aAAa,CAAC,MAA+B;IACjE,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC;IACrC,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;QACvB,OAAO,UAAU,CAAC,OAAO,CAAC;IAC5B,CAAC;SAAM,IAAI,UAAU,CAAC,IAAI,EAAE,CAAC;QAC3B,OAAO,MAAM,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrD,CAAC;AACH,CAAC"}
1
+ {"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/util/config.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,aAAa,CAAC;AAClC,OAAO,OAAO,MAAM,SAAS,CAAC;AAE9B,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,mCAAmC,CAAC;AAGxH,OAAO,EAAE,uBAAuB,EAAE,MAAM,iBAAiB,CAAC;AAE1D,MAAM,UAAU,eAAe,CAAC,MAAiC;IAC/D,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,MAAM,EAAE,KAAK,IAAI,iBAAiB,CAAC;IAC7E,MAAM,MAAM,GACT,OAAO,CAAC,GAAG,CAAC,aAAoD,IAAI,MAAM,EAAE,MAAM,IAAI,kBAAkB,CAAC;IAC5G,MAAM,aAAa,GAAG,MAAM,KAAK,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC,WAAW,CAAC;IAEvF,2JAA2J;IAC3J,8EAA8E;IAC9E,MAAM,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,iBAAiB,EAAE,MAAM,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC;IACtH,MAAM,CAAC,IAAI,CAAC,iCAAiC,KAAK,iBAAiB,MAAM,GAAG,CAAC,CAAC;IAE9E,MAAM,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC;AACrG,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,YAA0B;IACzD,MAAM,SAAS,GAAG,SAAS,CAAC,iBAAiB,CAAC,uBAAuB,CAAC,CAAC;IACvE,MAAM,MAAM,GAAG,MAAM,SAAS,CAAC,aAAa,CAAC,YAAY,CAAC,CAAC;IAC3D,eAAe,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,aAAa,CAAC,MAA+B;IACjE,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC;IACrC,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;QACvB,OAAO,UAAU,CAAC,OAAO,CAAC;IAC5B,CAAC;SAAM,IAAI,UAAU,CAAC,IAAI,EAAE,CAAC;QAC3B,OAAO,MAAM,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrD,CAAC;AACH,CAAC"}
package/package.json CHANGED
@@ -5,7 +5,7 @@
5
5
  "publishConfig": {
6
6
  "access": "public"
7
7
  },
8
- "version": "1.18.2",
8
+ "version": "1.19.1",
9
9
  "main": "dist/index.js",
10
10
  "license": "FSL-1.1-ALv2",
11
11
  "type": "module",
@@ -33,11 +33,11 @@
33
33
  "uuid": "^11.1.0",
34
34
  "winston": "^3.13.0",
35
35
  "yaml": "^2.3.2",
36
- "@powersync/lib-services-framework": "0.7.14",
36
+ "@powersync/lib-services-framework": "0.8.1",
37
37
  "@powersync/service-jsonbig": "0.17.12",
38
- "@powersync/service-rsocket-router": "0.2.11",
39
- "@powersync/service-sync-rules": "0.29.10",
40
- "@powersync/service-types": "0.13.3"
38
+ "@powersync/service-rsocket-router": "0.2.13",
39
+ "@powersync/service-sync-rules": "0.31.0",
40
+ "@powersync/service-types": "0.14.0"
41
41
  },
42
42
  "devDependencies": {
43
43
  "@types/async": "^3.2.24",
@@ -1,4 +1,4 @@
1
- import { SqlSyncRules, TablePattern } from '@powersync/service-sync-rules';
1
+ import { SyncConfig, TablePattern } from '@powersync/service-sync-rules';
2
2
  import * as types from '@powersync/service-types';
3
3
  import { ParseSyncRulesOptions, SyncRulesBucketStorage } from '../storage/storage-index.js';
4
4
 
@@ -41,7 +41,7 @@ export interface RouteAPI {
41
41
  * tables to ensure syncing should function according to the input
42
42
  * pattern. Debug errors and warnings are reported per table.
43
43
  */
44
- getDebugTablesInfo(tablePatterns: TablePattern[], sqlSyncRules: SqlSyncRules): Promise<PatternResult[]>;
44
+ getDebugTablesInfo(tablePatterns: TablePattern[], sqlSyncRules: SyncConfig): Promise<PatternResult[]>;
45
45
 
46
46
  /**
47
47
  * @returns The replication lag: that is the amount of data which has not been
@@ -1,5 +1,5 @@
1
1
  import { logger } from '@powersync/lib-services-framework';
2
- import { DEFAULT_TAG, SourceTableInterface, SqlSyncRules } from '@powersync/service-sync-rules';
2
+ import { DEFAULT_TAG, SourceTableInterface, SqlSyncRules, SyncConfigWithErrors } from '@powersync/service-sync-rules';
3
3
  import { ReplicationError, SyncRulesStatus, TableInfo } from '@powersync/service-types';
4
4
 
5
5
  import * as storage from '../storage/storage-index.js';
@@ -41,11 +41,11 @@ export async function getSyncRulesStatus(
41
41
  const check_connection = options.check_connection ?? false;
42
42
  const now = new Date().toISOString();
43
43
 
44
- let rules: SqlSyncRules;
44
+ let parsed: SyncConfigWithErrors;
45
45
  let persisted: storage.PersistedSyncRules;
46
46
  try {
47
47
  persisted = sync_rules.parsed(apiHandler.getParseSyncRulesOptions());
48
- rules = persisted.sync_rules;
48
+ parsed = persisted.sync_rules;
49
49
  } catch (e) {
50
50
  return {
51
51
  content: include_content ? sync_rules.sync_rules_content : undefined,
@@ -54,6 +54,7 @@ export async function getSyncRulesStatus(
54
54
  };
55
55
  }
56
56
 
57
+ const { config: rules, errors: syncRuleErrors } = parsed;
57
58
  const sourceConfig = await apiHandler.getSourceConfig();
58
59
  // This method can run under some situations if no connection is configured yet.
59
60
  // It will return a default tag in such a case. This default tag is not module specific.
@@ -131,7 +132,7 @@ export async function getSyncRulesStatus(
131
132
  });
132
133
  }
133
134
  errors.push(
134
- ...rules.errors.map((e) => {
135
+ ...syncRuleErrors.map((e) => {
135
136
  return {
136
137
  level: e.type,
137
138
  message: e.message,
@@ -1,15 +1,23 @@
1
+ import { BaseJwtPayload } from '@powersync/service-sync-rules';
2
+
1
3
  /**
2
4
  * Payload from a JWT, always signed.
3
- *
4
- * May have arbitrary additional parameters.
5
5
  */
6
- export interface JwtPayload {
6
+ export class JwtPayload extends BaseJwtPayload {
7
7
  /**
8
- * token_parameters.user_id
8
+ * Stringified version of sub. Used where we need a string user identifier, such as in write checkpoints
9
+ * and per-user metrics.
9
10
  */
10
- sub: string;
11
+ public readonly userIdString: string;
12
+
13
+ constructor(parsedPayload: Record<string, any>) {
14
+ super(parsedPayload);
15
+
16
+ this.userIdString = this.userIdJson?.toString() ?? 'null';
17
+ }
11
18
 
12
- iss?: string | undefined;
13
- exp: number;
14
- iat: number;
19
+ get exp(): number {
20
+ // Verified to be a number when parsing the token.
21
+ return this.parsedPayload.exp;
22
+ }
15
23
  }
@@ -122,7 +122,7 @@ export class KeyStore<Collector extends KeyCollector = KeyCollector> {
122
122
  throw new AuthorizationError(ErrorCode.PSYNC_S2101, `Payload parameters must be an object`);
123
123
  }
124
124
 
125
- return tokenPayload as JwtPayload;
125
+ return new JwtPayload(tokenPayload);
126
126
  }
127
127
 
128
128
  private async verifyInternal(token: string, options: jose.JWTVerifyOptions) {
@@ -51,7 +51,7 @@ export function createOpenTelemetryMetricsFactory(context: ServiceContext): Metr
51
51
  const instanceId = await bucketStorage.getPowerSyncInstanceId();
52
52
  resolvedInstanceId(instanceId);
53
53
  } catch (err) {
54
- resolvedInstanceId('Unknown');
54
+ resolvedInstanceId('unknown');
55
55
  }
56
56
  }
57
57
  });
@@ -59,6 +59,8 @@ export function createOpenTelemetryMetricsFactory(context: ServiceContext): Metr
59
59
  const resource = resourceFromAttributes({
60
60
  ['service']: 'PowerSync',
61
61
  ['service.version']: pkg.version,
62
+ ['source_type']: configuration.connections?.[0]?.type ?? 'unknown',
63
+ ['storage_type']: configuration.storage.type ?? 'unknown',
62
64
  ['instance_id']: instanceIdPromise
63
65
  });
64
66
 
@@ -1,4 +1,4 @@
1
- import { container, logger } from '@powersync/lib-services-framework';
1
+ import { container, ErrorCode, logger } from '@powersync/lib-services-framework';
2
2
  import { ReplicationMetric } from '@powersync/service-types';
3
3
  import { hrtime } from 'node:process';
4
4
  import winston from 'winston';
@@ -36,7 +36,7 @@ export interface AbstractReplicatorOptions {
36
36
  */
37
37
  export abstract class AbstractReplicator<T extends AbstractReplicationJob = AbstractReplicationJob> {
38
38
  protected logger: winston.Logger;
39
-
39
+ private lockAlerted: boolean = false;
40
40
  /**
41
41
  * Map of replication jobs by sync rule id. Usually there is only one running job, but there could be two when
42
42
  * transitioning to a new set of sync rules.
@@ -225,12 +225,20 @@ export abstract class AbstractReplicator<T extends AbstractReplicationJob = Abst
225
225
  if (syncRules.active) {
226
226
  activeJob = newJob;
227
227
  }
228
+ this.lockAlerted = false;
228
229
  } catch (e) {
229
- // Could be a sync rules parse error,
230
- // for example from stricter validation that was added.
231
- // This will be retried every couple of seconds.
232
- // When new (valid) sync rules are deployed and processed, this one be disabled.
233
- this.logger.error('Failed to start replication for new sync rules', e);
230
+ if (e?.errorData?.code === ErrorCode.PSYNC_S1003) {
231
+ if (!this.lockAlerted) {
232
+ this.logger.info(`[${e.errorData.code}] ${e.errorData.description}`);
233
+ this.lockAlerted = true;
234
+ }
235
+ } else {
236
+ // Could be a sync rules parse error,
237
+ // for example from stricter validation that was added.
238
+ // This will be retried every couple of seconds.
239
+ // When new (valid) sync rules are deployed and processed, this one be disabled.
240
+ this.logger.error('Failed to start replication for new sync rules', e);
241
+ }
234
242
  }
235
243
  }
236
244
  }
@@ -47,17 +47,15 @@ export async function authorizeUser(context: Context, authHeader: string = ''):
47
47
  export async function generateContext(serviceContext: ServiceContext, token: string) {
48
48
  const { configuration } = serviceContext;
49
49
 
50
- let tokenPayload: auth.JwtPayload;
51
50
  try {
52
51
  const maxAge = configuration.token_max_expiration;
53
- tokenPayload = await configuration.client_keystore.verifyJwt(token, {
52
+ const parsedToken = await configuration.client_keystore.verifyJwt(token, {
54
53
  defaultAudiences: configuration.jwt_audiences,
55
54
  maxAge: maxAge
56
55
  });
57
56
  return {
58
57
  context: {
59
- user_id: tokenPayload.sub,
60
- token_payload: tokenPayload
58
+ token_payload: parsedToken
61
59
  }
62
60
  };
63
61
  } catch (err) {
@@ -61,7 +61,7 @@ export function configureFastifyServer(server: fastify.FastifyInstance, options:
61
61
 
62
62
  const generateContext: ContextProvider = async (request, options) => {
63
63
  return {
64
- user_id: undefined,
64
+ user_id_string: undefined,
65
65
  service_context: service_context,
66
66
  logger: options.logger
67
67
  };
@@ -7,6 +7,9 @@ import * as storage from '../../storage/storage-index.js';
7
7
  import { authApi } from '../auth.js';
8
8
  import { routeDefinition } from '../router.js';
9
9
 
10
+ /**
11
+ * @deprecated This will be removed in a future release
12
+ */
10
13
  export const executeSql = routeDefinition({
11
14
  path: '/api/admin/v1/execute-sql',
12
15
  method: router.HTTPMethod.POST,
@@ -128,7 +131,7 @@ export const reprocess = routeDefinition({
128
131
  }
129
132
 
130
133
  const new_rules = await activeBucketStorage.updateSyncRules({
131
- content: active.sync_rules.content,
134
+ content: active.sync_rules.config.content,
132
135
  // These sync rules already passed validation. But if the rules are not valid anymore due
133
136
  // to a service change, we do want to report the error here.
134
137
  validate: true
@@ -177,7 +180,10 @@ export const validate = routeDefinition({
177
180
  sync_rules: SqlSyncRules.fromYaml(content, {
178
181
  ...apiHandler.getParseSyncRulesOptions(),
179
182
  schema
180
- })
183
+ }),
184
+ hydratedSyncRules() {
185
+ return this.sync_rules.config.hydrate();
186
+ }
181
187
  };
182
188
  },
183
189
  sync_rules_content: content,
@@ -52,18 +52,20 @@ export const writeCheckpoint2 = routeDefinition({
52
52
  authorize: authUser,
53
53
  validator: schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }),
54
54
  handler: async (payload) => {
55
- const { user_id, service_context } = payload.context;
55
+ const { token_payload, service_context } = payload.context;
56
56
 
57
57
  const apiHandler = service_context.routerEngine.getAPI();
58
58
 
59
59
  const { replicationHead, writeCheckpoint } = await util.createWriteCheckpoint({
60
- userId: user_id,
60
+ userId: token_payload!.userIdString,
61
61
  clientId: payload.params.client_id,
62
62
  api: apiHandler,
63
63
  storage: service_context.storageEngine.activeBucketStorage
64
64
  });
65
65
 
66
- logger.info(`Write checkpoint for ${user_id}/${payload.params.client_id}: ${writeCheckpoint} | ${replicationHead}`);
66
+ logger.info(
67
+ `Write checkpoint for ${token_payload!.userIdString}/${payload.params.client_id}: ${writeCheckpoint} | ${replicationHead}`
68
+ );
67
69
 
68
70
  return {
69
71
  write_checkpoint: String(writeCheckpoint)
@@ -18,14 +18,14 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
18
18
 
19
19
  logger.defaultMeta = {
20
20
  ...logger.defaultMeta,
21
- user_id: context.token_payload?.sub,
21
+ user_id: context.token_payload!.userIdJson,
22
22
  client_id: params.client_id,
23
23
  user_agent: context.user_agent
24
24
  };
25
25
 
26
26
  const sdkData: event_types.ConnectedUserData & event_types.ClientConnectionEventData = {
27
27
  client_id: params.client_id ?? '',
28
- user_id: context.user_id!,
28
+ user_id: context.token_payload!.userIdString,
29
29
  user_agent: context.user_agent,
30
30
  // At this point the token_payload is guaranteed to be present
31
31
  jwt_exp: new Date(context.token_payload!.exp * 1000),
@@ -109,10 +109,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
109
109
  for await (const data of sync.streamResponse({
110
110
  syncContext: syncContext,
111
111
  bucketStorage: bucketStorage,
112
- syncRules: {
113
- syncRules,
114
- version: bucketStorage.group_id
115
- },
112
+ syncRules,
116
113
  params: {
117
114
  ...params
118
115
  },
@@ -169,7 +169,7 @@ export const reprocessSyncRules = routeDefinition({
169
169
  }
170
170
 
171
171
  const new_rules = await activeBucketStorage.updateSyncRules({
172
- content: sync_rules.sync_rules.content,
172
+ content: sync_rules.sync_rules.config.content,
173
173
  // These sync rules already passed validation. But if the rules are not valid anymore due
174
174
  // to a service change, we do want to report the error here.
175
175
  validate: true
@@ -197,14 +197,14 @@ async function debugSyncRules(apiHandler: RouteAPI, sync_rules: string) {
197
197
  // No schema-based validation at this point
198
198
  schema: undefined
199
199
  });
200
- const source_table_patterns = rules.getSourceTables();
201
- const resolved_tables = await apiHandler.getDebugTablesInfo(source_table_patterns, rules);
200
+ const source_table_patterns = rules.config.getSourceTables();
201
+ const resolved_tables = await apiHandler.getDebugTablesInfo(source_table_patterns, rules.config);
202
202
 
203
203
  return {
204
204
  valid: true,
205
- bucket_definitions: rules.bucketSources.map((source) => source.debugRepresentation()),
205
+ bucket_definitions: rules.config.debugRepresentation(),
206
206
  source_tables: resolved_tables,
207
- data_tables: rules.debugGetOutputTables()
207
+ data_tables: rules.config.debugGetOutputTables()
208
208
  };
209
209
  } catch (e) {
210
210
  if (e instanceof SyncRulesErrors) {
@@ -42,12 +42,12 @@ export const syncStreamed = routeDefinition({
42
42
  ...logger.defaultMeta,
43
43
  user_agent: userAgent,
44
44
  client_id: clientId,
45
- user_id: payload.context.user_id,
45
+ user_id: payload.context.token_payload!.userIdJson,
46
46
  bson: useBson
47
47
  };
48
48
  const sdkData: event_types.ConnectedUserData & event_types.ClientConnectionEventData = {
49
49
  client_id: clientId ?? '',
50
- user_id: payload.context.user_id!,
50
+ user_id: payload.context.token_payload!.userIdString,
51
51
  user_agent: userAgent as string,
52
52
  // At this point the token_payload is guaranteed to be present
53
53
  jwt_exp: new Date(token_payload!.exp * 1000),
@@ -92,10 +92,7 @@ export const syncStreamed = routeDefinition({
92
92
  const syncLines = sync.streamResponse({
93
93
  syncContext: syncContext,
94
94
  bucketStorage,
95
- syncRules: {
96
- syncRules,
97
- version: bucketStorage.group_id
98
- },
95
+ syncRules,
99
96
  params: payload.params,
100
97
  token: payload.context.token_payload!,
101
98
  tracker,
@@ -12,8 +12,6 @@ export type RouterServiceContext = ServiceContext;
12
12
  * Common context for routes
13
13
  */
14
14
  export type Context = {
15
- user_id?: string;
16
-
17
15
  service_context: RouterServiceContext;
18
16
 
19
17
  token_payload?: JwtPayload;
@@ -1,4 +1,4 @@
1
- import { SqlSyncRules } from '@powersync/service-sync-rules';
1
+ import { HydratedSyncRules, SyncConfig, SyncConfigWithErrors } from '@powersync/service-sync-rules';
2
2
  import { ReplicationLock } from './ReplicationLock.js';
3
3
 
4
4
  export interface ParseSyncRulesOptions {
@@ -28,6 +28,8 @@ export interface PersistedSyncRulesContent {
28
28
 
29
29
  export interface PersistedSyncRules {
30
30
  readonly id: number;
31
- readonly sync_rules: SqlSyncRules;
31
+ readonly sync_rules: SyncConfigWithErrors;
32
32
  readonly slot_name: string;
33
+
34
+ hydratedSyncRules(): HydratedSyncRules;
33
35
  }
@@ -1,5 +1,5 @@
1
1
  import { Logger, ObserverClient } from '@powersync/lib-services-framework';
2
- import { ParameterLookup, SqlSyncRules, SqliteJsonRow } from '@powersync/service-sync-rules';
2
+ import { HydratedSyncRules, ScopedParameterLookup, SqliteJsonRow } from '@powersync/service-sync-rules';
3
3
  import * as util from '../util/util-index.js';
4
4
  import { BucketStorageBatch, FlushedResult, SaveUpdate } from './BucketStorageBatch.js';
5
5
  import { BucketStorageFactory } from './BucketStorageFactory.js';
@@ -32,7 +32,7 @@ export interface SyncRulesBucketStorage
32
32
  callback: (batch: BucketStorageBatch) => Promise<void>
33
33
  ): Promise<FlushedResult | null>;
34
34
 
35
- getParsedSyncRules(options: ParseSyncRulesOptions): SqlSyncRules;
35
+ getParsedSyncRules(options: ParseSyncRulesOptions): HydratedSyncRules;
36
36
 
37
37
  /**
38
38
  * Terminate the sync rules.
@@ -139,7 +139,7 @@ export interface ResolveTableOptions {
139
139
  connection_tag: string;
140
140
  entity_descriptor: SourceEntityDescriptor;
141
141
 
142
- sync_rules: SqlSyncRules;
142
+ sync_rules: HydratedSyncRules;
143
143
  }
144
144
 
145
145
  export interface ResolveTableResult {
@@ -218,10 +218,19 @@ export interface CompactOptions {
218
218
  moveBatchQueryLimit?: number;
219
219
 
220
220
  /**
221
+ * Minimum number new operations in a bucket to trigger compaction of that bucket.
222
+ *
221
223
  * Minimum of 1, default of 10.
222
224
  */
223
225
  minBucketChanges?: number;
224
226
 
227
+ /**
228
+ * Minimum ratio of new operations to existing operations in a bucket to trigger compaction of that bucket.
229
+ *
230
+ * Number between 0 and 1, default of 0.1.
231
+ */
232
+ minChangeRatio?: number;
233
+
225
234
  /**
226
235
  * Internal/testing use: Cache size for compacting parameters.
227
236
  */
@@ -284,7 +293,7 @@ export interface ReplicationCheckpoint {
284
293
  *
285
294
  * This gets parameter sets specific to this checkpoint.
286
295
  */
287
- getParameterSets(lookups: ParameterLookup[]): Promise<SqliteJsonRow[]>;
296
+ getParameterSets(lookups: ScopedParameterLookup[]): Promise<SqliteJsonRow[]>;
288
297
  }
289
298
 
290
299
  export interface WatchWriteCheckpointOptions {
@@ -1,6 +1,6 @@
1
1
  import * as bson from 'bson';
2
2
 
3
- import { ParameterLookup, SqliteJsonValue } from '@powersync/service-sync-rules';
3
+ import { ScopedParameterLookup, SqliteJsonValue } from '@powersync/service-sync-rules';
4
4
  import { ReplicaId } from './BucketStorageBatch.js';
5
5
 
6
6
  type NodeBuffer = Buffer<ArrayBuffer>;
@@ -27,11 +27,11 @@ export const BSON_DESERIALIZE_DATA_OPTIONS: bson.DeserializeOptions = {
27
27
  * Lookup serialization must be number-agnostic. I.e. normalize numbers, instead of preserving numbers.
28
28
  * @param lookup
29
29
  */
30
- export const serializeLookupBuffer = (lookup: ParameterLookup): NodeBuffer => {
30
+ export const serializeLookupBuffer = (lookup: ScopedParameterLookup): NodeBuffer => {
31
31
  return bson.serialize({ l: lookup.values }) as NodeBuffer;
32
32
  };
33
33
 
34
- export const serializeLookup = (lookup: ParameterLookup) => {
34
+ export const serializeLookup = (lookup: ScopedParameterLookup) => {
35
35
  return new bson.Binary(serializeLookupBuffer(lookup));
36
36
  };
37
37