@powersync/service-core 1.13.4 → 1.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/CHANGELOG.md +29 -0
  2. package/dist/api/diagnostics.js +31 -1
  3. package/dist/api/diagnostics.js.map +1 -1
  4. package/dist/auth/KeyStore.d.ts +19 -0
  5. package/dist/auth/KeyStore.js +16 -4
  6. package/dist/auth/KeyStore.js.map +1 -1
  7. package/dist/auth/RemoteJWKSCollector.d.ts +3 -0
  8. package/dist/auth/RemoteJWKSCollector.js +3 -1
  9. package/dist/auth/RemoteJWKSCollector.js.map +1 -1
  10. package/dist/auth/StaticSupabaseKeyCollector.d.ts +2 -1
  11. package/dist/auth/StaticSupabaseKeyCollector.js +1 -1
  12. package/dist/auth/StaticSupabaseKeyCollector.js.map +1 -1
  13. package/dist/auth/utils.d.ts +19 -0
  14. package/dist/auth/utils.js +106 -3
  15. package/dist/auth/utils.js.map +1 -1
  16. package/dist/entry/commands/compact-action.js +10 -1
  17. package/dist/entry/commands/compact-action.js.map +1 -1
  18. package/dist/metrics/open-telemetry/util.js +3 -1
  19. package/dist/metrics/open-telemetry/util.js.map +1 -1
  20. package/dist/replication/AbstractReplicator.js +2 -2
  21. package/dist/replication/AbstractReplicator.js.map +1 -1
  22. package/dist/routes/configure-fastify.js +2 -1
  23. package/dist/routes/configure-fastify.js.map +1 -1
  24. package/dist/routes/endpoints/socket-route.js +1 -8
  25. package/dist/routes/endpoints/socket-route.js.map +1 -1
  26. package/dist/routes/endpoints/sync-stream.js +17 -4
  27. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  28. package/dist/routes/route-register.d.ts +4 -0
  29. package/dist/routes/route-register.js +29 -15
  30. package/dist/routes/route-register.js.map +1 -1
  31. package/dist/storage/BucketStorageBatch.d.ts +12 -2
  32. package/dist/storage/BucketStorageBatch.js.map +1 -1
  33. package/dist/storage/SourceEntity.d.ts +5 -4
  34. package/dist/storage/SourceTable.d.ts +22 -20
  35. package/dist/storage/SourceTable.js +34 -30
  36. package/dist/storage/SourceTable.js.map +1 -1
  37. package/dist/storage/SyncRulesBucketStorage.d.ts +11 -5
  38. package/dist/storage/SyncRulesBucketStorage.js.map +1 -1
  39. package/dist/sync/BucketChecksumState.d.ts +1 -1
  40. package/dist/sync/BucketChecksumState.js +1 -1
  41. package/dist/sync/BucketChecksumState.js.map +1 -1
  42. package/dist/sync/util.d.ts +3 -1
  43. package/dist/sync/util.js +29 -1
  44. package/dist/sync/util.js.map +1 -1
  45. package/dist/util/config/compound-config-collector.js +23 -0
  46. package/dist/util/config/compound-config-collector.js.map +1 -1
  47. package/dist/util/lsn.d.ts +4 -0
  48. package/dist/util/lsn.js +11 -0
  49. package/dist/util/lsn.js.map +1 -0
  50. package/dist/util/util-index.d.ts +1 -0
  51. package/dist/util/util-index.js +1 -0
  52. package/dist/util/util-index.js.map +1 -1
  53. package/package.json +6 -4
  54. package/src/api/diagnostics.ts +33 -1
  55. package/src/auth/KeyStore.ts +28 -4
  56. package/src/auth/RemoteJWKSCollector.ts +5 -2
  57. package/src/auth/StaticSupabaseKeyCollector.ts +1 -1
  58. package/src/auth/utils.ts +123 -3
  59. package/src/entry/commands/compact-action.ts +9 -1
  60. package/src/metrics/open-telemetry/util.ts +4 -1
  61. package/src/replication/AbstractReplicator.ts +2 -2
  62. package/src/routes/configure-fastify.ts +3 -1
  63. package/src/routes/endpoints/socket-route.ts +1 -7
  64. package/src/routes/endpoints/sync-stream.ts +29 -21
  65. package/src/routes/route-register.ts +41 -15
  66. package/src/storage/BucketStorageBatch.ts +13 -2
  67. package/src/storage/SourceEntity.ts +5 -5
  68. package/src/storage/SourceTable.ts +48 -34
  69. package/src/storage/SyncRulesBucketStorage.ts +14 -7
  70. package/src/sync/BucketChecksumState.ts +2 -2
  71. package/src/sync/util.ts +31 -2
  72. package/src/util/config/compound-config-collector.ts +24 -0
  73. package/src/util/lsn.ts +8 -0
  74. package/src/util/util-index.ts +1 -0
  75. package/test/src/auth.test.ts +323 -1
  76. package/test/src/sync/BucketChecksumState.test.ts +36 -35
  77. package/tsconfig.tsbuildinfo +1 -1
@@ -1,6 +1,7 @@
1
1
  import { ErrorCode, errors, logger, router, schema } from '@powersync/lib-services-framework';
2
2
  import { RequestParameters } from '@powersync/service-sync-rules';
3
3
  import { Readable } from 'stream';
4
+ import Negotiator from 'negotiator';
4
5
 
5
6
  import * as sync from '../../sync/sync-index.js';
6
7
  import * as util from '../../util/util-index.js';
@@ -14,6 +15,10 @@ export enum SyncRoutes {
14
15
  STREAM = '/sync/stream'
15
16
  }
16
17
 
18
+ const ndJsonContentType = 'application/x-ndjson';
19
+ const concatenatedBsonContentType = 'application/vnd.powersync.bson-stream';
20
+ const supportedContentTypes = [ndJsonContentType, concatenatedBsonContentType];
21
+
17
22
  export const syncStreamed = routeDefinition({
18
23
  path: SyncRoutes.STREAM,
19
24
  method: router.HTTPMethod.POST,
@@ -26,12 +31,17 @@ export const syncStreamed = routeDefinition({
26
31
  const userAgent = headers['x-user-agent'] ?? headers['user-agent'];
27
32
  const clientId = payload.params.client_id;
28
33
  const streamStart = Date.now();
34
+ // This falls back to JSON unless there's preference for the bson-stream in the Accept header.
35
+ const useBson =
36
+ payload.request.headers.accept &&
37
+ new Negotiator(payload.request).mediaType(supportedContentTypes) == concatenatedBsonContentType;
29
38
 
30
39
  logger.defaultMeta = {
31
40
  ...logger.defaultMeta,
32
41
  user_agent: userAgent,
33
42
  client_id: clientId,
34
- user_id: payload.context.user_id
43
+ user_id: payload.context.user_id,
44
+ bson: useBson
35
45
  };
36
46
 
37
47
  if (routerEngine.closed) {
@@ -61,25 +71,23 @@ export const syncStreamed = routeDefinition({
61
71
  const tracker = new sync.RequestTracker(metricsEngine);
62
72
  try {
63
73
  metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(1);
64
- const stream = Readable.from(
65
- sync.transformToBytesTracked(
66
- sync.ndjson(
67
- sync.streamResponse({
68
- syncContext: syncContext,
69
- bucketStorage,
70
- syncRules: syncRules,
71
- params,
72
- syncParams,
73
- token: payload.context.token_payload!,
74
- tracker,
75
- signal: controller.signal,
76
- logger
77
- })
78
- ),
79
- tracker
80
- ),
81
- { objectMode: false, highWaterMark: 16 * 1024 }
82
- );
74
+ const syncLines = sync.streamResponse({
75
+ syncContext: syncContext,
76
+ bucketStorage,
77
+ syncRules: syncRules,
78
+ params,
79
+ syncParams,
80
+ token: payload.context.token_payload!,
81
+ tracker,
82
+ signal: controller.signal,
83
+ logger
84
+ });
85
+
86
+ const byteContents = useBson ? sync.bsonLines(syncLines) : sync.ndjson(syncLines);
87
+ const stream = Readable.from(sync.transformToBytesTracked(byteContents, tracker), {
88
+ objectMode: false,
89
+ highWaterMark: 16 * 1024
90
+ });
83
91
 
84
92
  // Best effort guess on why the stream was closed.
85
93
  // We use the `??=` operator everywhere, so that we catch the first relevant
@@ -114,7 +122,7 @@ export const syncStreamed = routeDefinition({
114
122
  return new router.RouterResponse({
115
123
  status: 200,
116
124
  headers: {
117
- 'Content-Type': 'application/x-ndjson'
125
+ 'Content-Type': useBson ? concatenatedBsonContentType : ndJsonContentType
118
126
  },
119
127
  data: stream,
120
128
  afterSend: async (details) => {
@@ -1,8 +1,17 @@
1
1
  import type fastify from 'fastify';
2
2
  import * as uuid from 'uuid';
3
3
 
4
- import { errors, HTTPMethod, logger, router } from '@powersync/lib-services-framework';
4
+ import {
5
+ ErrorCode,
6
+ errors,
7
+ HTTPMethod,
8
+ logger,
9
+ RouteNotFound,
10
+ router,
11
+ ServiceError
12
+ } from '@powersync/lib-services-framework';
5
13
  import { Context, ContextProvider, RequestEndpoint, RequestEndpointHandlerPayload } from './router.js';
14
+ import { FastifyReply } from 'fastify';
6
15
 
7
16
  export type FastifyEndpoint<I, O, C> = RequestEndpoint<I, O, C> & {
8
17
  parse?: boolean;
@@ -69,23 +78,11 @@ export function registerFastifyRoutes(
69
78
  const serviceError = errors.asServiceError(ex);
70
79
  requestLogger.error(`Request failed`, serviceError);
71
80
 
72
- response = new router.RouterResponse({
73
- status: serviceError.errorData.status || 500,
74
- headers: {
75
- 'Content-Type': 'application/json'
76
- },
77
- data: {
78
- error: serviceError.errorData
79
- }
80
- });
81
+ response = serviceErrorToResponse(serviceError);
81
82
  }
82
83
 
83
- Object.keys(response.headers).forEach((key) => {
84
- reply.header(key, response.headers[key]);
85
- });
86
- reply.status(response.status);
87
84
  try {
88
- await reply.send(response.data);
85
+ await respond(reply, response);
89
86
  } finally {
90
87
  await response.afterSend?.({ clientClosed: request.socket.closed });
91
88
  requestLogger.info(`${e.method} ${request.url}`, {
@@ -106,3 +103,32 @@ export function registerFastifyRoutes(
106
103
  });
107
104
  }
108
105
  }
106
+
107
+ /**
108
+ * Registers a custom not-found handler to ensure 404 error responses have the same schema as other service errors.
109
+ */
110
+ export function registerFastifyNotFoundHandler(app: fastify.FastifyInstance) {
111
+ app.setNotFoundHandler(async (request, reply) => {
112
+ await respond(reply, serviceErrorToResponse(new RouteNotFound(request.originalUrl, request.method)));
113
+ });
114
+ }
115
+
116
+ function serviceErrorToResponse(error: ServiceError): router.RouterResponse {
117
+ return new router.RouterResponse({
118
+ status: error.errorData.status || 500,
119
+ headers: {
120
+ 'Content-Type': 'application/json'
121
+ },
122
+ data: {
123
+ error: error.errorData
124
+ }
125
+ });
126
+ }
127
+
128
+ async function respond(reply: FastifyReply, response: router.RouterResponse) {
129
+ Object.keys(response.headers).forEach((key) => {
130
+ reply.header(key, response.headers[key]);
131
+ });
132
+ reply.status(response.status);
133
+ await reply.send(response.data);
134
+ }
@@ -60,18 +60,29 @@ export interface BucketStorageBatch extends ObserverClient<BucketBatchStorageLis
60
60
  keepalive(lsn: string): Promise<boolean>;
61
61
 
62
62
  /**
63
- * Set the LSN for a snapshot, before starting replication.
63
+ * Set the LSN that replication should resume from.
64
+ *
65
+ * This can be used for:
66
+ * 1. Setting the LSN for a snapshot, before starting replication.
67
+ * 2. Setting the LSN to resume from after a replication restart, without advancing the checkpoint LSN via a commit.
64
68
  *
65
69
  * Not required if the source database keeps track of this, for example with
66
70
  * PostgreSQL logical replication slots.
67
71
  */
68
- setSnapshotLsn(lsn: string): Promise<void>;
72
+ setResumeLsn(lsn: string): Promise<void>;
69
73
 
70
74
  /**
71
75
  * Get the last checkpoint LSN, from either commit or keepalive.
72
76
  */
73
77
  lastCheckpointLsn: string | null;
74
78
 
79
+ /**
80
+ * LSN to resume from.
81
+ *
82
+ * Not relevant for streams where the source keeps track of replication progress, such as Postgres.
83
+ */
84
+ resumeFromLsn: string | null;
85
+
75
86
  markSnapshotDone(tables: SourceTable[], no_checkpoint_before_lsn: string): Promise<SourceTable[]>;
76
87
 
77
88
  updateTableProgress(table: SourceTable, progress: Partial<TableSnapshotStatus>): Promise<SourceTable>;
@@ -10,17 +10,17 @@ export interface ColumnDescriptor {
10
10
  typeId?: number;
11
11
  }
12
12
 
13
- // TODO: This needs to be consolidated with SourceTable into something new.
14
13
  export interface SourceEntityDescriptor {
15
14
  /**
16
- * The internal id of the data source structure in the database.
17
- *
15
+ * The internal id of the source entity structure in the database.
18
16
  * If undefined, the schema and name are used as the identifier.
19
- *
20
17
  * If specified, this is specifically used to detect renames.
21
18
  */
22
19
  objectId: number | string | undefined;
23
20
  schema: string;
24
21
  name: string;
25
- replicationColumns: ColumnDescriptor[];
22
+ /**
23
+ * The columns that are used to uniquely identify a record in the source entity.
24
+ */
25
+ replicaIdColumns: ColumnDescriptor[];
26
26
  }
@@ -1,6 +1,16 @@
1
1
  import { DEFAULT_TAG } from '@powersync/service-sync-rules';
2
2
  import * as util from '../util/util-index.js';
3
- import { ColumnDescriptor } from './SourceEntity.js';
3
+ import { ColumnDescriptor, SourceEntityDescriptor } from './SourceEntity.js';
4
+
5
+ export interface SourceTableOptions {
6
+ id: any;
7
+ connectionTag: string;
8
+ objectId: number | string | undefined;
9
+ schema: string;
10
+ name: string;
11
+ replicaIdColumns: ColumnDescriptor[];
12
+ snapshotComplete: boolean;
13
+ }
4
14
 
5
15
  export interface TableSnapshotStatus {
6
16
  totalEstimatedCount: number;
@@ -8,7 +18,7 @@ export interface TableSnapshotStatus {
8
18
  lastKey: Uint8Array | null;
9
19
  }
10
20
 
11
- export class SourceTable {
21
+ export class SourceTable implements SourceEntityDescriptor {
12
22
  static readonly DEFAULT_TAG = DEFAULT_TAG;
13
23
 
14
24
  /**
@@ -45,37 +55,41 @@ export class SourceTable {
45
55
  */
46
56
  public snapshotStatus: TableSnapshotStatus | undefined = undefined;
47
57
 
48
- constructor(
49
- public readonly id: any,
50
- public readonly connectionTag: string,
51
- public readonly objectId: number | string | undefined,
52
- public readonly schema: string,
53
- public readonly table: string,
58
+ public snapshotComplete: boolean;
54
59
 
55
- public readonly replicaIdColumns: ColumnDescriptor[],
56
- public snapshotComplete: boolean
57
- ) {}
60
+ constructor(public readonly options: SourceTableOptions) {
61
+ this.snapshotComplete = options.snapshotComplete;
62
+ }
58
63
 
59
- get hasReplicaIdentity() {
60
- return this.replicaIdColumns.length > 0;
64
+ get id() {
65
+ return this.options.id;
61
66
  }
62
67
 
63
- /**
64
- * Use for postgres only.
65
- *
66
- * Usage: db.query({statement: `SELECT $1::regclass`, params: [{type: 'varchar', value: table.qualifiedName}]})
67
- */
68
- get qualifiedName() {
69
- return this.escapedIdentifier;
68
+ get connectionTag() {
69
+ return this.options.connectionTag;
70
+ }
71
+
72
+ get objectId() {
73
+ return this.options.objectId;
74
+ }
75
+
76
+ get schema() {
77
+ return this.options.schema;
78
+ }
79
+ get name() {
80
+ return this.options.name;
81
+ }
82
+
83
+ get replicaIdColumns() {
84
+ return this.options.replicaIdColumns;
70
85
  }
71
86
 
72
87
  /**
73
- * Use for postgres and logs only.
74
- *
75
- * Usage: db.query(`SELECT * FROM ${table.escapedIdentifier}`)
88
+ * Sanitized name of the entity in the format of "{schema}.{entity name}"
89
+ * Suitable for safe use in Postgres queries.
76
90
  */
77
- get escapedIdentifier() {
78
- return `${util.escapeIdentifier(this.schema)}.${util.escapeIdentifier(this.table)}`;
91
+ get qualifiedName() {
92
+ return `${util.escapeIdentifier(this.schema)}.${util.escapeIdentifier(this.name)}`;
79
93
  }
80
94
 
81
95
  get syncAny() {
@@ -86,15 +100,15 @@ export class SourceTable {
86
100
  * In-memory clone of the table status.
87
101
  */
88
102
  clone() {
89
- const copy = new SourceTable(
90
- this.id,
91
- this.connectionTag,
92
- this.objectId,
93
- this.schema,
94
- this.table,
95
- this.replicaIdColumns,
96
- this.snapshotComplete
97
- );
103
+ const copy = new SourceTable({
104
+ id: this.id,
105
+ connectionTag: this.connectionTag,
106
+ objectId: this.objectId,
107
+ schema: this.schema,
108
+ name: this.name,
109
+ replicaIdColumns: this.replicaIdColumns,
110
+ snapshotComplete: this.snapshotComplete
111
+ });
98
112
  copy.syncData = this.syncData;
99
113
  copy.syncParameters = this.syncParameters;
100
114
  copy.snapshotStatus = this.snapshotStatus;
@@ -50,8 +50,6 @@ export interface SyncRulesBucketStorage
50
50
  */
51
51
  clear(options?: ClearStorageOptions): Promise<void>;
52
52
 
53
- autoActivate(): Promise<void>;
54
-
55
53
  /**
56
54
  * Record a replication error.
57
55
  *
@@ -68,11 +66,6 @@ export interface SyncRulesBucketStorage
68
66
 
69
67
  getCheckpoint(): Promise<ReplicationCheckpoint>;
70
68
 
71
- /**
72
- * Used to resolve "dynamic" parameter queries.
73
- */
74
- getParameterSets(checkpoint: util.InternalOpId, lookups: ParameterLookup[]): Promise<SqliteJsonRow[]>;
75
-
76
69
  /**
77
70
  * Given two checkpoints, return the changes in bucket data and parameters that may have occurred
78
71
  * in that period.
@@ -200,6 +193,8 @@ export interface CompactOptions {
200
193
  */
201
194
  compactBuckets?: string[];
202
195
 
196
+ compactParameterData?: boolean;
197
+
203
198
  /** Minimum of 2 */
204
199
  clearBatchLimit?: number;
205
200
 
@@ -208,6 +203,11 @@ export interface CompactOptions {
208
203
 
209
204
  /** Minimum of 1 */
210
205
  moveBatchQueryLimit?: number;
206
+
207
+ /**
208
+ * Internal/testing use: Cache size for compacting parameters.
209
+ */
210
+ compactParameterCacheLimit?: number;
211
211
  }
212
212
 
213
213
  export interface ClearStorageOptions {
@@ -245,6 +245,13 @@ export interface SyncBucketDataChunk {
245
245
  export interface ReplicationCheckpoint {
246
246
  readonly checkpoint: util.InternalOpId;
247
247
  readonly lsn: string | null;
248
+
249
+ /**
250
+ * Used to resolve "dynamic" parameter queries.
251
+ *
252
+ * This gets parameter sets specific to this checkpoint.
253
+ */
254
+ getParameterSets(lookups: ParameterLookup[]): Promise<SqliteJsonRow[]>;
248
255
  }
249
256
 
250
257
  export interface WatchWriteCheckpointOptions {
@@ -440,7 +440,7 @@ export class BucketParameterState {
440
440
  if (hasParameterChange || this.cachedDynamicBuckets == null || this.cachedDynamicBucketSet == null) {
441
441
  dynamicBuckets = await querier.queryDynamicBucketDescriptions({
442
442
  getParameterSets(lookups) {
443
- return storage.getParameterSets(checkpoint.base.checkpoint, lookups);
443
+ return checkpoint.base.getParameterSets(lookups);
444
444
  }
445
445
  });
446
446
  this.cachedDynamicBuckets = dynamicBuckets;
@@ -501,7 +501,7 @@ export interface CheckpointLine {
501
501
  }
502
502
 
503
503
  // Use a more specific type to simplify testing
504
- export type BucketChecksumStateStorage = Pick<storage.SyncRulesBucketStorage, 'getChecksums' | 'getParameterSets'>;
504
+ export type BucketChecksumStateStorage = Pick<storage.SyncRulesBucketStorage, 'getChecksums'>;
505
505
 
506
506
  function limitedBuckets(buckets: string[] | { bucket: string }[], limit: number) {
507
507
  buckets = buckets.map((b) => {
package/src/sync/util.ts CHANGED
@@ -3,6 +3,7 @@ import * as timers from 'timers/promises';
3
3
  import { SemaphoreInterface } from 'async-mutex';
4
4
  import * as util from '../util/util-index.js';
5
5
  import { RequestTracker } from './RequestTracker.js';
6
+ import { serialize } from 'bson';
6
7
 
7
8
  export type TokenStreamOptions = {
8
9
  /**
@@ -76,6 +77,27 @@ export async function* tokenStream(
76
77
  }
77
78
  }
78
79
 
80
+ export function syncLineToBson(line: string | Record<string, any>): Buffer {
81
+ if (typeof line == 'string') {
82
+ // Should not happen with binary_data: true
83
+ throw new Error(`Unexpected string data: ${line}`);
84
+ } else {
85
+ // On NodeJS, serialize always returns a Buffer
86
+ return serialize(line) as Buffer;
87
+ }
88
+ }
89
+
90
+ export async function* bsonLines(iterator: AsyncIterable<string | null | Record<string, any>>): AsyncGenerator<Buffer> {
91
+ for await (let line of iterator) {
92
+ if (line == null) {
93
+ // Empty value just to flush iterator memory
94
+ continue;
95
+ } else {
96
+ yield syncLineToBson(line);
97
+ }
98
+ }
99
+ }
100
+
79
101
  export async function* ndjson(iterator: AsyncIterable<string | null | Record<string, any>>): AsyncGenerator<string> {
80
102
  for await (let data of iterator) {
81
103
  if (data == null) {
@@ -91,11 +113,18 @@ export async function* ndjson(iterator: AsyncIterable<string | null | Record<str
91
113
  }
92
114
 
93
115
  export async function* transformToBytesTracked(
94
- iterator: AsyncIterable<string>,
116
+ iterator: AsyncIterable<string | Buffer>,
95
117
  tracker: RequestTracker
96
118
  ): AsyncGenerator<Buffer> {
97
119
  for await (let data of iterator) {
98
- const encoded = Buffer.from(data, 'utf8');
120
+ let encoded: Buffer;
121
+
122
+ if (typeof data == 'string') {
123
+ encoded = Buffer.from(data, 'utf8');
124
+ } else {
125
+ encoded = data;
126
+ }
127
+
99
128
  tracker.addDataSynced(encoded.length);
100
129
  yield encoded;
101
130
  }
@@ -89,6 +89,7 @@ export class CompoundConfigCollector {
89
89
  }
90
90
  ])
91
91
  );
92
+ keyStore.supabaseAuthDebug.sharedSecretEnabled = true;
92
93
  }
93
94
 
94
95
  let jwks_uris = baseConfig.client_auth?.jwks_uri ?? [];
@@ -114,6 +115,29 @@ export class CompoundConfigCollector {
114
115
  for (let uri of jwks_uris) {
115
116
  collectors.add(new auth.CachedKeyCollector(new auth.RemoteJWKSCollector(uri, { lookupOptions: jwksLookup })));
116
117
  }
118
+ const supabaseAuthDetails = auth.getSupabaseJwksUrl(baseConfig.replication?.connections?.[0]);
119
+ keyStore.supabaseAuthDebug.jwksDetails = supabaseAuthDetails;
120
+
121
+ if (baseConfig.client_auth?.supabase) {
122
+ // Automatic support for Supabase signing keys:
123
+ // https://supabase.com/docs/guides/auth/signing-keys
124
+ if (supabaseAuthDetails != null) {
125
+ const collector = new auth.RemoteJWKSCollector(supabaseAuthDetails.url, {
126
+ lookupOptions: jwksLookup,
127
+ // Special case aud and max lifetime for Supabase keys
128
+ keyOptions: auth.SUPABASE_KEY_OPTIONS
129
+ });
130
+ collectors.add(new auth.CachedKeyCollector(collector));
131
+ keyStore.supabaseAuthDebug.jwksEnabled = true;
132
+ logger.info(`Configured Supabase Auth with ${supabaseAuthDetails.url}`);
133
+ } else {
134
+ logger.warn(
135
+ 'Supabase Auth is enabled, but no Supabase connection string found. Skipping Supabase JWKS URL configuration.'
136
+ );
137
+ }
138
+ } else if (supabaseAuthDetails != null) {
139
+ logger.warn(`Supabase connection string found, but Supabase Auth is not enabled in the config.`);
140
+ }
117
141
 
118
142
  const sync_rules = await this.collectSyncRules(baseConfig, runnerConfig);
119
143
 
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Return the larger of two LSNs.
3
+ */
4
+ export function maxLsn(a: string | null | undefined, b: string | null | undefined): string | null {
5
+ if (a == null) return b ?? null;
6
+ if (b == null) return a;
7
+ return a > b ? a : b;
8
+ }
@@ -1,5 +1,6 @@
1
1
  export * from './alerting.js';
2
2
  export * from './env.js';
3
+ export * from './lsn.js';
3
4
  export * from './memory-tracking.js';
4
5
  export * from './Mutex.js';
5
6
  export * from './protocol-types.js';