@powersync/service-core 0.13.0 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (181) hide show
  1. package/CHANGELOG.md +31 -0
  2. package/dist/entry/commands/compact-action.js +14 -14
  3. package/dist/entry/commands/compact-action.js.map +1 -1
  4. package/dist/entry/commands/migrate-action.js +15 -4
  5. package/dist/entry/commands/migrate-action.js.map +1 -1
  6. package/dist/index.d.ts +1 -3
  7. package/dist/index.js +1 -3
  8. package/dist/index.js.map +1 -1
  9. package/dist/migrations/PowerSyncMigrationManager.d.ts +17 -0
  10. package/dist/migrations/PowerSyncMigrationManager.js +21 -0
  11. package/dist/migrations/PowerSyncMigrationManager.js.map +1 -0
  12. package/dist/migrations/ensure-automatic-migrations.d.ts +4 -0
  13. package/dist/migrations/ensure-automatic-migrations.js +14 -0
  14. package/dist/migrations/ensure-automatic-migrations.js.map +1 -0
  15. package/dist/migrations/migrations-index.d.ts +2 -3
  16. package/dist/migrations/migrations-index.js +2 -3
  17. package/dist/migrations/migrations-index.js.map +1 -1
  18. package/dist/routes/configure-fastify.d.ts +12 -12
  19. package/dist/routes/endpoints/admin.d.ts +24 -24
  20. package/dist/storage/BucketStorage.d.ts +51 -3
  21. package/dist/storage/BucketStorage.js +26 -0
  22. package/dist/storage/BucketStorage.js.map +1 -1
  23. package/dist/storage/bson.d.ts +24 -0
  24. package/dist/storage/bson.js +73 -0
  25. package/dist/storage/bson.js.map +1 -0
  26. package/dist/storage/storage-index.d.ts +3 -14
  27. package/dist/storage/storage-index.js +3 -14
  28. package/dist/storage/storage-index.js.map +1 -1
  29. package/dist/sync/sync.js +3 -1
  30. package/dist/sync/sync.js.map +1 -1
  31. package/dist/system/ServiceContext.d.ts +3 -0
  32. package/dist/system/ServiceContext.js +11 -3
  33. package/dist/system/ServiceContext.js.map +1 -1
  34. package/dist/util/config/types.d.ts +2 -2
  35. package/dist/util/utils.d.ts +17 -1
  36. package/dist/util/utils.js +49 -1
  37. package/dist/util/utils.js.map +1 -1
  38. package/package.json +7 -8
  39. package/src/entry/commands/compact-action.ts +19 -14
  40. package/src/entry/commands/migrate-action.ts +17 -4
  41. package/src/index.ts +1 -4
  42. package/src/migrations/PowerSyncMigrationManager.ts +42 -0
  43. package/src/migrations/ensure-automatic-migrations.ts +15 -0
  44. package/src/migrations/migrations-index.ts +2 -3
  45. package/src/storage/BucketStorage.ts +59 -3
  46. package/src/storage/bson.ts +78 -0
  47. package/src/storage/storage-index.ts +3 -15
  48. package/src/sync/sync.ts +3 -1
  49. package/src/system/ServiceContext.ts +17 -4
  50. package/src/util/config/types.ts +2 -2
  51. package/src/util/utils.ts +47 -1
  52. package/test/src/env.ts +0 -1
  53. package/tsconfig.tsbuildinfo +1 -1
  54. package/dist/db/db-index.d.ts +0 -1
  55. package/dist/db/db-index.js +0 -2
  56. package/dist/db/db-index.js.map +0 -1
  57. package/dist/db/mongo.d.ts +0 -35
  58. package/dist/db/mongo.js +0 -73
  59. package/dist/db/mongo.js.map +0 -1
  60. package/dist/locks/LockManager.d.ts +0 -10
  61. package/dist/locks/LockManager.js +0 -7
  62. package/dist/locks/LockManager.js.map +0 -1
  63. package/dist/locks/MongoLocks.d.ts +0 -36
  64. package/dist/locks/MongoLocks.js +0 -81
  65. package/dist/locks/MongoLocks.js.map +0 -1
  66. package/dist/locks/locks-index.d.ts +0 -2
  67. package/dist/locks/locks-index.js +0 -3
  68. package/dist/locks/locks-index.js.map +0 -1
  69. package/dist/migrations/db/migrations/1684951997326-init.d.ts +0 -3
  70. package/dist/migrations/db/migrations/1684951997326-init.js +0 -33
  71. package/dist/migrations/db/migrations/1684951997326-init.js.map +0 -1
  72. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.d.ts +0 -2
  73. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js +0 -5
  74. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js.map +0 -1
  75. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +0 -3
  76. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +0 -56
  77. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +0 -1
  78. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +0 -3
  79. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +0 -29
  80. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +0 -1
  81. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +0 -3
  82. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +0 -31
  83. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +0 -1
  84. package/dist/migrations/definitions.d.ts +0 -18
  85. package/dist/migrations/definitions.js +0 -6
  86. package/dist/migrations/definitions.js.map +0 -1
  87. package/dist/migrations/executor.d.ts +0 -16
  88. package/dist/migrations/executor.js +0 -64
  89. package/dist/migrations/executor.js.map +0 -1
  90. package/dist/migrations/migrations.d.ts +0 -18
  91. package/dist/migrations/migrations.js +0 -110
  92. package/dist/migrations/migrations.js.map +0 -1
  93. package/dist/migrations/store/migration-store.d.ts +0 -11
  94. package/dist/migrations/store/migration-store.js +0 -46
  95. package/dist/migrations/store/migration-store.js.map +0 -1
  96. package/dist/storage/MongoBucketStorage.d.ts +0 -48
  97. package/dist/storage/MongoBucketStorage.js +0 -427
  98. package/dist/storage/MongoBucketStorage.js.map +0 -1
  99. package/dist/storage/mongo/MongoBucketBatch.d.ts +0 -74
  100. package/dist/storage/mongo/MongoBucketBatch.js +0 -683
  101. package/dist/storage/mongo/MongoBucketBatch.js.map +0 -1
  102. package/dist/storage/mongo/MongoCompactor.d.ts +0 -40
  103. package/dist/storage/mongo/MongoCompactor.js +0 -310
  104. package/dist/storage/mongo/MongoCompactor.js.map +0 -1
  105. package/dist/storage/mongo/MongoIdSequence.d.ts +0 -12
  106. package/dist/storage/mongo/MongoIdSequence.js +0 -21
  107. package/dist/storage/mongo/MongoIdSequence.js.map +0 -1
  108. package/dist/storage/mongo/MongoPersistedSyncRules.d.ts +0 -9
  109. package/dist/storage/mongo/MongoPersistedSyncRules.js +0 -9
  110. package/dist/storage/mongo/MongoPersistedSyncRules.js.map +0 -1
  111. package/dist/storage/mongo/MongoPersistedSyncRulesContent.d.ts +0 -20
  112. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js +0 -26
  113. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js.map +0 -1
  114. package/dist/storage/mongo/MongoStorageProvider.d.ts +0 -5
  115. package/dist/storage/mongo/MongoStorageProvider.js +0 -26
  116. package/dist/storage/mongo/MongoStorageProvider.js.map +0 -1
  117. package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +0 -38
  118. package/dist/storage/mongo/MongoSyncBucketStorage.js +0 -534
  119. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +0 -1
  120. package/dist/storage/mongo/MongoSyncRulesLock.d.ts +0 -16
  121. package/dist/storage/mongo/MongoSyncRulesLock.js +0 -65
  122. package/dist/storage/mongo/MongoSyncRulesLock.js.map +0 -1
  123. package/dist/storage/mongo/MongoWriteCheckpointAPI.d.ts +0 -20
  124. package/dist/storage/mongo/MongoWriteCheckpointAPI.js +0 -104
  125. package/dist/storage/mongo/MongoWriteCheckpointAPI.js.map +0 -1
  126. package/dist/storage/mongo/OperationBatch.d.ts +0 -35
  127. package/dist/storage/mongo/OperationBatch.js +0 -119
  128. package/dist/storage/mongo/OperationBatch.js.map +0 -1
  129. package/dist/storage/mongo/PersistedBatch.d.ts +0 -46
  130. package/dist/storage/mongo/PersistedBatch.js +0 -223
  131. package/dist/storage/mongo/PersistedBatch.js.map +0 -1
  132. package/dist/storage/mongo/config.d.ts +0 -19
  133. package/dist/storage/mongo/config.js +0 -26
  134. package/dist/storage/mongo/config.js.map +0 -1
  135. package/dist/storage/mongo/db.d.ts +0 -36
  136. package/dist/storage/mongo/db.js +0 -47
  137. package/dist/storage/mongo/db.js.map +0 -1
  138. package/dist/storage/mongo/models.d.ts +0 -163
  139. package/dist/storage/mongo/models.js +0 -27
  140. package/dist/storage/mongo/models.js.map +0 -1
  141. package/dist/storage/mongo/util.d.ts +0 -54
  142. package/dist/storage/mongo/util.js +0 -190
  143. package/dist/storage/mongo/util.js.map +0 -1
  144. package/src/db/db-index.ts +0 -1
  145. package/src/db/mongo.ts +0 -81
  146. package/src/locks/LockManager.ts +0 -16
  147. package/src/locks/MongoLocks.ts +0 -142
  148. package/src/locks/locks-index.ts +0 -2
  149. package/src/migrations/db/migrations/1684951997326-init.ts +0 -38
  150. package/src/migrations/db/migrations/1688556755264-initial-sync-rules.ts +0 -5
  151. package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +0 -102
  152. package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +0 -34
  153. package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +0 -37
  154. package/src/migrations/definitions.ts +0 -21
  155. package/src/migrations/executor.ts +0 -87
  156. package/src/migrations/migrations.ts +0 -142
  157. package/src/migrations/store/migration-store.ts +0 -63
  158. package/src/storage/MongoBucketStorage.ts +0 -541
  159. package/src/storage/mongo/MongoBucketBatch.ts +0 -900
  160. package/src/storage/mongo/MongoCompactor.ts +0 -393
  161. package/src/storage/mongo/MongoIdSequence.ts +0 -24
  162. package/src/storage/mongo/MongoPersistedSyncRules.ts +0 -16
  163. package/src/storage/mongo/MongoPersistedSyncRulesContent.ts +0 -50
  164. package/src/storage/mongo/MongoStorageProvider.ts +0 -31
  165. package/src/storage/mongo/MongoSyncBucketStorage.ts +0 -640
  166. package/src/storage/mongo/MongoSyncRulesLock.ts +0 -85
  167. package/src/storage/mongo/MongoWriteCheckpointAPI.ts +0 -154
  168. package/src/storage/mongo/OperationBatch.ts +0 -131
  169. package/src/storage/mongo/PersistedBatch.ts +0 -285
  170. package/src/storage/mongo/config.ts +0 -40
  171. package/src/storage/mongo/db.ts +0 -88
  172. package/src/storage/mongo/models.ts +0 -187
  173. package/src/storage/mongo/util.ts +0 -203
  174. package/test/src/__snapshots__/sync.test.ts.snap +0 -332
  175. package/test/src/bucket_validation.test.ts +0 -143
  176. package/test/src/bucket_validation.ts +0 -60
  177. package/test/src/compacting.test.ts +0 -295
  178. package/test/src/data_storage.test.ts +0 -1569
  179. package/test/src/stream_utils.ts +0 -42
  180. package/test/src/sync.test.ts +0 -511
  181. package/test/src/util.ts +0 -150
@@ -1,42 +0,0 @@
1
- import { OplogEntry } from '@/util/protocol-types.js';
2
- import { JSONBig } from '@powersync/service-jsonbig';
3
-
4
- export function putOp(table: string, data: Record<string, any>): Partial<OplogEntry> {
5
- return {
6
- op: 'PUT',
7
- object_type: table,
8
- object_id: data.id,
9
- data: JSONBig.stringify(data)
10
- };
11
- }
12
-
13
- export function removeOp(table: string, id: string): Partial<OplogEntry> {
14
- return {
15
- op: 'REMOVE',
16
- object_type: table,
17
- object_id: id
18
- };
19
- }
20
-
21
- export function compareIds(a: OplogEntry, b: OplogEntry) {
22
- return a.object_id!.localeCompare(b.object_id!);
23
- }
24
-
25
- export async function oneFromAsync<T>(source: Iterable<T> | AsyncIterable<T>): Promise<T> {
26
- const items: T[] = [];
27
- for await (const item of source) {
28
- items.push(item);
29
- }
30
- if (items.length != 1) {
31
- throw new Error(`One item expected, got: ${items.length}`);
32
- }
33
- return items[0];
34
- }
35
-
36
- export async function fromAsync<T>(source: Iterable<T> | AsyncIterable<T>): Promise<T[]> {
37
- const items: T[] = [];
38
- for await (const item of source) {
39
- items.push(item);
40
- }
41
- return items;
42
- }
@@ -1,511 +0,0 @@
1
- import { SaveOperationTag } from '@/storage/storage-index.js';
2
- import { RequestTracker } from '@/sync/RequestTracker.js';
3
- import { streamResponse, SyncStreamParameters } from '@/sync/sync.js';
4
- import { StreamingSyncLine } from '@/util/protocol-types.js';
5
- import { JSONBig } from '@powersync/service-jsonbig';
6
- import { RequestParameters } from '@powersync/service-sync-rules';
7
- import * as timers from 'timers/promises';
8
- import { describe, expect, test } from 'vitest';
9
- import { BATCH_OPTIONS, makeTestTable, MONGO_STORAGE_FACTORY, PARSE_OPTIONS, StorageFactory } from './util.js';
10
-
11
- describe('sync - mongodb', function () {
12
- defineTests(MONGO_STORAGE_FACTORY);
13
- });
14
-
15
- const TEST_TABLE = makeTestTable('test', ['id']);
16
-
17
- const BASIC_SYNC_RULES = `
18
- bucket_definitions:
19
- mybucket:
20
- data:
21
- - SELECT * FROM test
22
- `;
23
-
24
- function defineTests(factory: StorageFactory) {
25
- const tracker = new RequestTracker();
26
-
27
- test('sync global data', async () => {
28
- const f = await factory();
29
-
30
- const syncRules = await f.updateSyncRules({
31
- content: BASIC_SYNC_RULES
32
- });
33
-
34
- const storage = f.getInstance(syncRules);
35
- await storage.autoActivate();
36
-
37
- const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
38
- await batch.save({
39
- sourceTable: TEST_TABLE,
40
- tag: SaveOperationTag.INSERT,
41
- after: {
42
- id: 't1',
43
- description: 'Test 1'
44
- },
45
- afterReplicaId: 't1'
46
- });
47
-
48
- await batch.save({
49
- sourceTable: TEST_TABLE,
50
- tag: SaveOperationTag.INSERT,
51
- after: {
52
- id: 't2',
53
- description: 'Test 2'
54
- },
55
- afterReplicaId: 't2'
56
- });
57
-
58
- await batch.commit('0/1');
59
- });
60
-
61
- const stream = streamResponse({
62
- storage: f,
63
- params: {
64
- buckets: [],
65
- include_checksum: true,
66
- raw_data: true
67
- },
68
- parseOptions: PARSE_OPTIONS,
69
- tracker,
70
- syncParams: new RequestParameters({ sub: '' }, {}),
71
- token: { exp: Date.now() / 1000 + 10 } as any
72
- });
73
-
74
- const lines = await consumeCheckpointLines(stream);
75
- expect(lines).toMatchSnapshot();
76
- });
77
-
78
- test('sync legacy non-raw data', async () => {
79
- const f = await factory();
80
-
81
- const syncRules = await f.updateSyncRules({
82
- content: BASIC_SYNC_RULES
83
- });
84
-
85
- const storage = await f.getInstance(syncRules);
86
- await storage.autoActivate();
87
-
88
- const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
89
- await batch.save({
90
- sourceTable: TEST_TABLE,
91
- tag: SaveOperationTag.INSERT,
92
- after: {
93
- id: 't1',
94
- description: 'Test\n"string"',
95
- large_num: 12345678901234567890n
96
- },
97
- afterReplicaId: 't1'
98
- });
99
-
100
- await batch.commit('0/1');
101
- });
102
-
103
- const stream = streamResponse({
104
- storage: f,
105
- params: {
106
- buckets: [],
107
- include_checksum: true,
108
- raw_data: false
109
- },
110
- parseOptions: PARSE_OPTIONS,
111
- tracker,
112
- syncParams: new RequestParameters({ sub: '' }, {}),
113
- token: { exp: Date.now() / 1000 + 10 } as any
114
- });
115
-
116
- const lines = await consumeCheckpointLines(stream);
117
- expect(lines).toMatchSnapshot();
118
- // Specifically check the number - this is the important part of the test
119
- expect(lines[1].data.data[0].data.large_num).toEqual(12345678901234567890n);
120
- });
121
-
122
- test('expired token', async () => {
123
- const f = await factory();
124
-
125
- const syncRules = await f.updateSyncRules({
126
- content: BASIC_SYNC_RULES
127
- });
128
-
129
- const storage = await f.getInstance(syncRules);
130
- await storage.autoActivate();
131
-
132
- const stream = streamResponse({
133
- storage: f,
134
- params: {
135
- buckets: [],
136
- include_checksum: true,
137
- raw_data: true
138
- },
139
- parseOptions: PARSE_OPTIONS,
140
- tracker,
141
- syncParams: new RequestParameters({ sub: '' }, {}),
142
- token: { exp: 0 } as any
143
- });
144
-
145
- const lines = await consumeCheckpointLines(stream);
146
- expect(lines).toMatchSnapshot();
147
- });
148
-
149
- test('sync updates to global data', async () => {
150
- const f = await factory();
151
-
152
- const syncRules = await f.updateSyncRules({
153
- content: BASIC_SYNC_RULES
154
- });
155
-
156
- const storage = await f.getInstance(syncRules);
157
- await storage.autoActivate();
158
-
159
- const stream = streamResponse({
160
- storage: f,
161
- params: {
162
- buckets: [],
163
- include_checksum: true,
164
- raw_data: true
165
- },
166
- parseOptions: PARSE_OPTIONS,
167
- tracker,
168
- syncParams: new RequestParameters({ sub: '' }, {}),
169
- token: { exp: Date.now() / 1000 + 10 } as any
170
- });
171
- const iter = stream[Symbol.asyncIterator]();
172
-
173
- expect(await getCheckpointLines(iter)).toMatchSnapshot();
174
-
175
- await storage.startBatch(BATCH_OPTIONS, async (batch) => {
176
- await batch.save({
177
- sourceTable: TEST_TABLE,
178
- tag: SaveOperationTag.INSERT,
179
- after: {
180
- id: 't1',
181
- description: 'Test 1'
182
- },
183
- afterReplicaId: 't1'
184
- });
185
-
186
- await batch.commit('0/1');
187
- });
188
-
189
- expect(await getCheckpointLines(iter)).toMatchSnapshot();
190
-
191
- await storage.startBatch(BATCH_OPTIONS, async (batch) => {
192
- await batch.save({
193
- sourceTable: TEST_TABLE,
194
- tag: SaveOperationTag.INSERT,
195
- after: {
196
- id: 't2',
197
- description: 'Test 2'
198
- },
199
- afterReplicaId: 't2'
200
- });
201
-
202
- await batch.commit('0/2');
203
- });
204
-
205
- expect(await getCheckpointLines(iter)).toMatchSnapshot();
206
-
207
- iter.return?.();
208
- });
209
-
210
- test('expiring token', async () => {
211
- const f = await factory();
212
-
213
- const syncRules = await f.updateSyncRules({
214
- content: BASIC_SYNC_RULES
215
- });
216
-
217
- const storage = await f.getInstance(syncRules);
218
- await storage.autoActivate();
219
-
220
- const exp = Date.now() / 1000 + 0.1;
221
-
222
- const stream = streamResponse({
223
- storage: f,
224
- params: {
225
- buckets: [],
226
- include_checksum: true,
227
- raw_data: true
228
- },
229
- parseOptions: PARSE_OPTIONS,
230
- tracker,
231
- syncParams: new RequestParameters({ sub: '' }, {}),
232
- token: { exp: exp } as any
233
- });
234
- const iter = stream[Symbol.asyncIterator]();
235
-
236
- const checkpoint = await getCheckpointLines(iter);
237
- expect(checkpoint).toMatchSnapshot();
238
-
239
- const expLines = await getCheckpointLines(iter);
240
- expect(expLines).toMatchSnapshot();
241
- });
242
-
243
- test('compacting data - invalidate checkpoint', async () => {
244
- // This tests a case of a compact operation invalidating a checkpoint in the
245
- // middle of syncing data.
246
- // This is expected to be rare in practice, but it is important to handle
247
- // this case correctly to maintain consistency on the client.
248
-
249
- const f = await factory();
250
-
251
- const syncRules = await f.updateSyncRules({
252
- content: BASIC_SYNC_RULES
253
- });
254
-
255
- const storage = await f.getInstance(syncRules);
256
- await storage.autoActivate();
257
-
258
- await storage.startBatch(BATCH_OPTIONS, async (batch) => {
259
- await batch.save({
260
- sourceTable: TEST_TABLE,
261
- tag: SaveOperationTag.INSERT,
262
- after: {
263
- id: 't1',
264
- description: 'Test 1'
265
- },
266
- afterReplicaId: 't1'
267
- });
268
-
269
- await batch.save({
270
- sourceTable: TEST_TABLE,
271
- tag: SaveOperationTag.INSERT,
272
- after: {
273
- id: 't2',
274
- description: 'Test 2'
275
- },
276
- afterReplicaId: 't2'
277
- });
278
-
279
- await batch.commit('0/1');
280
- });
281
-
282
- const stream = streamResponse({
283
- storage: f,
284
- params: {
285
- buckets: [],
286
- include_checksum: true,
287
- raw_data: true
288
- },
289
- parseOptions: PARSE_OPTIONS,
290
- tracker,
291
- syncParams: new RequestParameters({ sub: '' }, {}),
292
- token: { exp: Date.now() / 1000 + 10 } as any
293
- });
294
-
295
- const iter = stream[Symbol.asyncIterator]();
296
-
297
- // Only consume the first "checkpoint" message, and pause before receiving data.
298
- const lines = await consumeIterator(iter, { consume: false, isDone: (line) => (line as any)?.checkpoint != null });
299
- expect(lines).toMatchSnapshot();
300
- expect(lines[0]).toEqual({
301
- checkpoint: expect.objectContaining({
302
- last_op_id: '2'
303
- })
304
- });
305
-
306
- // Now we save additional data AND compact before continuing.
307
- // This invalidates the checkpoint we've received above.
308
-
309
- await storage.startBatch(BATCH_OPTIONS, async (batch) => {
310
- await batch.save({
311
- sourceTable: TEST_TABLE,
312
- tag: SaveOperationTag.UPDATE,
313
- after: {
314
- id: 't1',
315
- description: 'Test 1b'
316
- },
317
- afterReplicaId: 't1'
318
- });
319
-
320
- await batch.save({
321
- sourceTable: TEST_TABLE,
322
- tag: SaveOperationTag.UPDATE,
323
- after: {
324
- id: 't2',
325
- description: 'Test 2b'
326
- },
327
- afterReplicaId: 't2'
328
- });
329
-
330
- await batch.commit('0/2');
331
- });
332
-
333
- await storage.compact();
334
-
335
- const lines2 = await getCheckpointLines(iter, { consume: true });
336
-
337
- // Snapshot test checks for changes in general.
338
- // The tests after that documents the specific things we're looking for
339
- // in this test.
340
- expect(lines2).toMatchSnapshot();
341
-
342
- expect(lines2[0]).toEqual({
343
- data: expect.objectContaining({
344
- has_more: false,
345
- data: [
346
- // The first two ops have been replaced by a single CLEAR op
347
- expect.objectContaining({
348
- op: 'CLEAR'
349
- })
350
- ]
351
- })
352
- });
353
-
354
- // Note: No checkpoint_complete here, since the checkpoint has been
355
- // invalidated by the CLEAR op.
356
-
357
- expect(lines2[1]).toEqual({
358
- checkpoint_diff: expect.objectContaining({
359
- last_op_id: '4'
360
- })
361
- });
362
-
363
- expect(lines2[2]).toEqual({
364
- data: expect.objectContaining({
365
- has_more: false,
366
- data: [
367
- expect.objectContaining({
368
- op: 'PUT'
369
- }),
370
- expect.objectContaining({
371
- op: 'PUT'
372
- })
373
- ]
374
- })
375
- });
376
-
377
- // Now we get a checkpoint_complete
378
- expect(lines2[3]).toEqual({
379
- checkpoint_complete: expect.objectContaining({
380
- last_op_id: '4'
381
- })
382
- });
383
- });
384
-
385
- test('write checkpoint', async () => {
386
- const f = await factory();
387
-
388
- const syncRules = await f.updateSyncRules({
389
- content: BASIC_SYNC_RULES
390
- });
391
-
392
- const storage = f.getInstance(syncRules);
393
- await storage.autoActivate();
394
-
395
- await storage.startBatch(BATCH_OPTIONS, async (batch) => {
396
- // <= the managed write checkpoint LSN below
397
- await batch.commit('0/1');
398
- });
399
-
400
- const checkpoint = await storage.createManagedWriteCheckpoint({
401
- user_id: 'test',
402
- heads: { '1': '1/0' }
403
- });
404
-
405
- const params: SyncStreamParameters = {
406
- storage: f,
407
- params: {
408
- buckets: [],
409
- include_checksum: true,
410
- raw_data: true
411
- },
412
- parseOptions: PARSE_OPTIONS,
413
- tracker,
414
- syncParams: new RequestParameters({ sub: 'test' }, {}),
415
- token: { sub: 'test', exp: Date.now() / 1000 + 10 } as any
416
- };
417
- const stream1 = streamResponse(params);
418
- const lines1 = await consumeCheckpointLines(stream1);
419
-
420
- // If write checkpoints are not correctly filtered, this may already
421
- // contain the write checkpoint.
422
- expect(lines1[0]).toMatchObject({
423
- checkpoint: expect.objectContaining({
424
- last_op_id: '0',
425
- write_checkpoint: undefined
426
- })
427
- });
428
-
429
- await storage.startBatch(BATCH_OPTIONS, async (batch) => {
430
- // must be >= the managed write checkpoint LSN
431
- await batch.commit('1/0');
432
- });
433
-
434
- // At this point the LSN has advanced, so the write checkpoint should be
435
- // included in the next checkpoint message.
436
- const stream2 = streamResponse(params);
437
- const lines2 = await consumeCheckpointLines(stream2);
438
- expect(lines2[0]).toMatchObject({
439
- checkpoint: expect.objectContaining({
440
- last_op_id: '0',
441
- write_checkpoint: `${checkpoint}`
442
- })
443
- });
444
- });
445
- }
446
-
447
- /**
448
- * Get lines on an iterator until isDone(line) == true.
449
- *
450
- * Does not stop the iterator unless options.consume is true.
451
- */
452
- async function consumeIterator<T>(
453
- iter: AsyncIterator<T>,
454
- options: { isDone: (line: T) => boolean; consume?: boolean }
455
- ) {
456
- let lines: T[] = [];
457
- try {
458
- const controller = new AbortController();
459
- const timeout = timers.setTimeout(1500, { value: null, done: 'timeout' }, { signal: controller.signal });
460
- while (true) {
461
- let { value, done } = await Promise.race([timeout, iter.next()]);
462
- if (done == 'timeout') {
463
- throw new Error('Timeout');
464
- }
465
- if (typeof value == 'string') {
466
- value = JSONBig.parse(value);
467
- }
468
- if (value) {
469
- lines.push(value);
470
- }
471
- if (done || options.isDone(value)) {
472
- break;
473
- }
474
- }
475
- controller.abort();
476
-
477
- if (options?.consume) {
478
- iter.return?.();
479
- }
480
- return lines;
481
- } catch (e) {
482
- if (options?.consume) {
483
- iter.throw?.(e);
484
- }
485
- throw e;
486
- }
487
- }
488
-
489
- /**
490
- * Get lines on an iterator until the next checkpoint_complete.
491
- *
492
- * Does not stop the iterator unless options.consume is true.
493
- */
494
- async function getCheckpointLines(
495
- iter: AsyncIterator<StreamingSyncLine | string | null>,
496
- options?: { consume?: boolean }
497
- ) {
498
- return consumeIterator(iter, {
499
- consume: options?.consume,
500
- isDone: (line) => (line as any)?.checkpoint_complete
501
- });
502
- }
503
-
504
- /**
505
- * Get lines on an iterator until the next checkpoint_complete.
506
- *
507
- * Stops the iterator afterwards.
508
- */
509
- async function consumeCheckpointLines(iterable: AsyncIterable<StreamingSyncLine | string | null>): Promise<any[]> {
510
- return getCheckpointLines(iterable[Symbol.asyncIterator](), { consume: true });
511
- }
package/test/src/util.ts DELETED
@@ -1,150 +0,0 @@
1
- import { Metrics } from '@/metrics/Metrics.js';
2
- import {
3
- BucketStorageFactory,
4
- ParseSyncRulesOptions,
5
- PersistedSyncRulesContent,
6
- StartBatchOptions,
7
- SyncBucketDataBatch
8
- } from '@/storage/BucketStorage.js';
9
- import { MongoBucketStorage } from '@/storage/MongoBucketStorage.js';
10
- import { SourceTable } from '@/storage/SourceTable.js';
11
- import { PowerSyncMongo } from '@/storage/mongo/db.js';
12
- import { SyncBucketData } from '@/util/protocol-types.js';
13
- import { getUuidReplicaIdentityBson, hashData } from '@/util/utils.js';
14
- import { SqlSyncRules } from '@powersync/service-sync-rules';
15
- import * as bson from 'bson';
16
- import * as mongo from 'mongodb';
17
- import { env } from './env.js';
18
-
19
- // The metrics need to be initialised before they can be used
20
- await Metrics.initialise({
21
- disable_telemetry_sharing: true,
22
- powersync_instance_id: 'test',
23
- internal_metrics_endpoint: 'unused.for.tests.com'
24
- });
25
- Metrics.getInstance().resetCounters();
26
-
27
- export interface StorageOptions {
28
- /**
29
- * By default, collections are only cleared/
30
- * Setting this to true will drop the collections completely.
31
- */
32
- dropAll?: boolean;
33
-
34
- doNotClear?: boolean;
35
- }
36
- export type StorageFactory = (options?: StorageOptions) => Promise<BucketStorageFactory>;
37
-
38
- export const MONGO_STORAGE_FACTORY: StorageFactory = async (options?: StorageOptions) => {
39
- const db = await connectMongo();
40
- if (options?.dropAll) {
41
- await db.drop();
42
- } else if (!options?.doNotClear) {
43
- await db.clear();
44
- }
45
- return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
46
- };
47
-
48
- export const ZERO_LSN = '0/0';
49
-
50
- export const PARSE_OPTIONS: ParseSyncRulesOptions = {
51
- defaultSchema: 'public'
52
- };
53
-
54
- export const BATCH_OPTIONS: StartBatchOptions = {
55
- ...PARSE_OPTIONS,
56
- zeroLSN: ZERO_LSN,
57
- storeCurrentData: true
58
- };
59
-
60
- export function testRules(content: string): PersistedSyncRulesContent {
61
- return {
62
- id: 1,
63
- sync_rules_content: content,
64
- slot_name: 'test',
65
- parsed(options) {
66
- return {
67
- id: 1,
68
- sync_rules: SqlSyncRules.fromYaml(content, options),
69
- slot_name: 'test'
70
- };
71
- },
72
- lock() {
73
- throw new Error('Not implemented');
74
- }
75
- };
76
- }
77
-
78
- export async function connectMongo() {
79
- // Short timeout for tests, to fail fast when the server is not available.
80
- // Slightly longer timeouts for CI, to avoid arbitrary test failures
81
- const client = new mongo.MongoClient(env.MONGO_TEST_URL, {
82
- connectTimeoutMS: env.CI ? 15_000 : 5_000,
83
- socketTimeoutMS: env.CI ? 15_000 : 5_000,
84
- serverSelectionTimeoutMS: env.CI ? 15_000 : 2_500
85
- });
86
- return new PowerSyncMongo(client);
87
- }
88
-
89
- export function makeTestTable(name: string, columns?: string[] | undefined) {
90
- const relId = hashData('table', name, (columns ?? ['id']).join(','));
91
- const id = new bson.ObjectId('6544e3899293153fa7b38331');
92
- return new SourceTable(
93
- id,
94
- SourceTable.DEFAULT_TAG,
95
- relId,
96
- 'public',
97
- name,
98
- (columns ?? ['id']).map((column) => ({ name: column, type: 'VARCHAR', typeId: 25 })),
99
- true
100
- );
101
- }
102
-
103
- export function getBatchData(batch: SyncBucketData[] | SyncBucketDataBatch[] | SyncBucketDataBatch) {
104
- const first = getFirst(batch);
105
- if (first == null) {
106
- return [];
107
- }
108
- return first.data.map((d) => {
109
- return {
110
- op_id: d.op_id,
111
- op: d.op,
112
- object_id: d.object_id,
113
- checksum: d.checksum
114
- };
115
- });
116
- }
117
-
118
- export function getBatchMeta(batch: SyncBucketData[] | SyncBucketDataBatch[] | SyncBucketDataBatch) {
119
- const first = getFirst(batch);
120
- if (first == null) {
121
- return null;
122
- }
123
- return {
124
- has_more: first.has_more,
125
- after: first.after,
126
- next_after: first.next_after
127
- };
128
- }
129
-
130
- function getFirst(batch: SyncBucketData[] | SyncBucketDataBatch[] | SyncBucketDataBatch): SyncBucketData | null {
131
- if (!Array.isArray(batch)) {
132
- return batch.batch;
133
- }
134
- if (batch.length == 0) {
135
- return null;
136
- }
137
- let first = batch[0];
138
- if ((first as SyncBucketDataBatch).batch != null) {
139
- return (first as SyncBucketDataBatch).batch;
140
- } else {
141
- return first as SyncBucketData;
142
- }
143
- }
144
-
145
- /**
146
- * Replica id in the old Postgres format, for backwards-compatible tests.
147
- */
148
- export function rid(id: string): bson.UUID {
149
- return getUuidReplicaIdentityBson({ id: id }, [{ name: 'id', type: 'VARCHAR', typeId: 25 }]);
150
- }