@powersync/common 0.0.0-dev-20240506092851

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +6 -0
  3. package/lib/client/AbstractPowerSyncDatabase.d.ts +367 -0
  4. package/lib/client/AbstractPowerSyncDatabase.js +644 -0
  5. package/lib/client/AbstractPowerSyncOpenFactory.d.ts +29 -0
  6. package/lib/client/AbstractPowerSyncOpenFactory.js +24 -0
  7. package/lib/client/connection/PowerSyncBackendConnector.d.ts +23 -0
  8. package/lib/client/connection/PowerSyncBackendConnector.js +1 -0
  9. package/lib/client/connection/PowerSyncCredentials.d.ts +5 -0
  10. package/lib/client/connection/PowerSyncCredentials.js +1 -0
  11. package/lib/client/sync/bucket/BucketStorageAdapter.d.ts +71 -0
  12. package/lib/client/sync/bucket/BucketStorageAdapter.js +7 -0
  13. package/lib/client/sync/bucket/CrudBatch.d.ts +31 -0
  14. package/lib/client/sync/bucket/CrudBatch.js +25 -0
  15. package/lib/client/sync/bucket/CrudEntry.d.ts +86 -0
  16. package/lib/client/sync/bucket/CrudEntry.js +84 -0
  17. package/lib/client/sync/bucket/CrudTransaction.d.ts +29 -0
  18. package/lib/client/sync/bucket/CrudTransaction.js +24 -0
  19. package/lib/client/sync/bucket/OpType.d.ts +16 -0
  20. package/lib/client/sync/bucket/OpType.js +22 -0
  21. package/lib/client/sync/bucket/OplogEntry.d.ts +23 -0
  22. package/lib/client/sync/bucket/OplogEntry.js +33 -0
  23. package/lib/client/sync/bucket/SqliteBucketStorage.d.ts +66 -0
  24. package/lib/client/sync/bucket/SqliteBucketStorage.js +298 -0
  25. package/lib/client/sync/bucket/SyncDataBatch.d.ts +6 -0
  26. package/lib/client/sync/bucket/SyncDataBatch.js +11 -0
  27. package/lib/client/sync/bucket/SyncDataBucket.d.ts +40 -0
  28. package/lib/client/sync/bucket/SyncDataBucket.js +40 -0
  29. package/lib/client/sync/stream/AbstractRemote.d.ts +25 -0
  30. package/lib/client/sync/stream/AbstractRemote.js +42 -0
  31. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +101 -0
  32. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +477 -0
  33. package/lib/client/sync/stream/streaming-sync-types.d.ts +116 -0
  34. package/lib/client/sync/stream/streaming-sync-types.js +22 -0
  35. package/lib/db/Column.d.ts +19 -0
  36. package/lib/db/Column.js +25 -0
  37. package/lib/db/DBAdapter.d.ts +95 -0
  38. package/lib/db/DBAdapter.js +19 -0
  39. package/lib/db/crud/SyncStatus.d.ts +38 -0
  40. package/lib/db/crud/SyncStatus.js +57 -0
  41. package/lib/db/crud/UploadQueueStatus.d.ts +20 -0
  42. package/lib/db/crud/UploadQueueStatus.js +24 -0
  43. package/lib/db/schema/Index.d.ts +22 -0
  44. package/lib/db/schema/Index.js +29 -0
  45. package/lib/db/schema/IndexedColumn.d.ts +19 -0
  46. package/lib/db/schema/IndexedColumn.js +29 -0
  47. package/lib/db/schema/Schema.d.ts +38 -0
  48. package/lib/db/schema/Schema.js +37 -0
  49. package/lib/db/schema/Table.d.ts +51 -0
  50. package/lib/db/schema/Table.js +113 -0
  51. package/lib/db/schema/TableV2.d.ts +30 -0
  52. package/lib/db/schema/TableV2.js +43 -0
  53. package/lib/index.d.ts +32 -0
  54. package/lib/index.js +32 -0
  55. package/lib/types/types.d.ts +8 -0
  56. package/lib/types/types.js +1 -0
  57. package/lib/utils/AbortOperation.d.ts +9 -0
  58. package/lib/utils/AbortOperation.js +18 -0
  59. package/lib/utils/BaseObserver.d.ts +20 -0
  60. package/lib/utils/BaseObserver.js +22 -0
  61. package/lib/utils/mutex.d.ts +7 -0
  62. package/lib/utils/mutex.js +28 -0
  63. package/lib/utils/parseQuery.d.ts +6 -0
  64. package/lib/utils/parseQuery.js +16 -0
  65. package/lib/utils/strings.d.ts +3 -0
  66. package/lib/utils/strings.js +9 -0
  67. package/package.json +44 -0
@@ -0,0 +1,477 @@
1
+ import throttle from 'lodash/throttle';
2
+ import Logger from 'js-logger';
3
+ import { isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncData } from './streaming-sync-types';
4
+ import ndjsonStream from 'can-ndjson-stream';
5
+ import { SyncStatus } from '../../../db/crud/SyncStatus';
6
+ import { SyncDataBucket } from '../bucket/SyncDataBucket';
7
+ import { BaseObserver } from '../../../utils/BaseObserver';
8
+ import { AbortOperation } from '../../../utils/AbortOperation';
9
+ export var LockType;
10
+ (function (LockType) {
11
+ LockType["CRUD"] = "crud";
12
+ LockType["SYNC"] = "sync";
13
+ })(LockType || (LockType = {}));
14
+ export const DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
15
+ export const DEFAULT_STREAMING_SYNC_OPTIONS = {
16
+ retryDelayMs: 5000,
17
+ logger: Logger.get('PowerSyncStream'),
18
+ crudUploadThrottleMs: DEFAULT_CRUD_UPLOAD_THROTTLE_MS
19
+ };
20
+ export class AbstractStreamingSyncImplementation extends BaseObserver {
21
+ _lastSyncedAt;
22
+ options;
23
+ abortController;
24
+ crudUpdateListener;
25
+ streamingSyncPromise;
26
+ syncStatus;
27
+ triggerCrudUpload;
28
+ constructor(options) {
29
+ super();
30
+ this.options = { ...DEFAULT_STREAMING_SYNC_OPTIONS, ...options };
31
+ this.syncStatus = new SyncStatus({
32
+ connected: false,
33
+ lastSyncedAt: undefined,
34
+ dataFlow: {
35
+ uploading: false,
36
+ downloading: false
37
+ }
38
+ });
39
+ this.abortController = null;
40
+ this.triggerCrudUpload = throttle(() => {
41
+ if (!this.syncStatus.connected || this.syncStatus.dataFlowStatus.uploading) {
42
+ return;
43
+ }
44
+ this._uploadAllCrud();
45
+ }, this.options.crudUploadThrottleMs, { trailing: true });
46
+ }
47
+ async waitForReady() { }
48
+ waitForStatus(status) {
49
+ return new Promise((resolve) => {
50
+ const l = this.registerListener({
51
+ statusChanged: (updatedStatus) => {
52
+ /**
53
+ * Match only the partial status options provided in the
54
+ * matching status
55
+ */
56
+ const matchPartialObject = (compA, compB) => {
57
+ return Object.entries(compA).every(([key, value]) => {
58
+ const comparisonBValue = compB[key];
59
+ if (typeof value == 'object' && typeof comparisonBValue == 'object') {
60
+ return matchPartialObject(value, comparisonBValue);
61
+ }
62
+ return value == comparisonBValue;
63
+ });
64
+ };
65
+ if (matchPartialObject(status, updatedStatus.toJSON())) {
66
+ resolve();
67
+ l?.();
68
+ }
69
+ }
70
+ });
71
+ });
72
+ }
73
+ get lastSyncedAt() {
74
+ const lastSynced = this.syncStatus.lastSyncedAt;
75
+ return lastSynced && new Date(lastSynced);
76
+ }
77
+ get isConnected() {
78
+ return this.syncStatus.connected;
79
+ }
80
+ get logger() {
81
+ return this.options.logger;
82
+ }
83
+ async dispose() {
84
+ this.crudUpdateListener?.();
85
+ this.crudUpdateListener = undefined;
86
+ }
87
+ async hasCompletedSync() {
88
+ return this.options.adapter.hasCompletedSync();
89
+ }
90
+ async getWriteCheckpoint() {
91
+ const response = await this.options.remote.get('/write-checkpoint2.json');
92
+ return response['data']['write_checkpoint'];
93
+ }
94
+ async _uploadAllCrud() {
95
+ return this.obtainLock({
96
+ type: LockType.CRUD,
97
+ callback: async () => {
98
+ this.updateSyncStatus({
99
+ dataFlow: {
100
+ uploading: true
101
+ }
102
+ });
103
+ while (true) {
104
+ try {
105
+ const done = await this.uploadCrudBatch();
106
+ if (done) {
107
+ break;
108
+ }
109
+ }
110
+ catch (ex) {
111
+ this.updateSyncStatus({
112
+ connected: false,
113
+ dataFlow: {
114
+ uploading: false
115
+ }
116
+ });
117
+ await this.delayRetry();
118
+ break;
119
+ }
120
+ finally {
121
+ this.updateSyncStatus({
122
+ dataFlow: {
123
+ uploading: false
124
+ }
125
+ });
126
+ }
127
+ }
128
+ }
129
+ });
130
+ }
131
+ async uploadCrudBatch() {
132
+ const hasCrud = await this.options.adapter.hasCrud();
133
+ if (hasCrud) {
134
+ await this.options.uploadCrud();
135
+ return false;
136
+ }
137
+ else {
138
+ await this.options.adapter.updateLocalTarget(() => this.getWriteCheckpoint());
139
+ return true;
140
+ }
141
+ }
142
+ async connect() {
143
+ if (this.abortController) {
144
+ await this.disconnect();
145
+ }
146
+ this.abortController = new AbortController();
147
+ this.streamingSyncPromise = this.streamingSync(this.abortController.signal);
148
+ // Return a promise that resolves when the connection status is updated
149
+ return new Promise((resolve) => {
150
+ const l = this.registerListener({
151
+ statusUpdated: (update) => {
152
+ // This is triggered as soon as a connection is read from
153
+ if (typeof update.connected == 'undefined') {
154
+ // only concern with connection updates
155
+ return;
156
+ }
157
+ if (update.connected == false) {
158
+ /**
159
+ * This function does not reject if initial connect attempt failed
160
+ */
161
+ this.logger.warn('Initial connect attempt did not successfully connect to server');
162
+ }
163
+ resolve();
164
+ l();
165
+ }
166
+ });
167
+ });
168
+ }
169
+ async disconnect() {
170
+ if (!this.abortController) {
171
+ return;
172
+ }
173
+ // This might be called multiple times
174
+ if (!this.abortController.signal.aborted) {
175
+ this.abortController.abort(new AbortOperation('Disconnect has been requested'));
176
+ }
177
+ // Await any pending operations before completing the disconnect operation
178
+ try {
179
+ await this.streamingSyncPromise;
180
+ }
181
+ catch (ex) {
182
+ // The operation might have failed, all we care about is if it has completed
183
+ this.logger.warn(ex);
184
+ }
185
+ this.streamingSyncPromise = undefined;
186
+ this.abortController = null;
187
+ this.updateSyncStatus({ connected: false });
188
+ }
189
+ /**
190
+ * @deprecated use [connect instead]
191
+ */
192
+ async streamingSync(signal) {
193
+ if (!signal) {
194
+ this.abortController = new AbortController();
195
+ signal = this.abortController.signal;
196
+ }
197
+ /**
198
+ * Listen for CRUD updates and trigger upstream uploads
199
+ */
200
+ this.crudUpdateListener = this.options.adapter.registerListener({
201
+ crudUpdate: () => this.triggerCrudUpload()
202
+ });
203
+ /**
204
+ * Create a new abort controller which aborts items downstream.
205
+ * This is needed to close any previous connections on exception.
206
+ */
207
+ let nestedAbortController = new AbortController();
208
+ signal.addEventListener('abort', () => {
209
+ /**
210
+ * A request for disconnect was received upstream. Relay the request
211
+ * to the nested abort controller.
212
+ */
213
+ nestedAbortController.abort(signal?.reason ?? new AbortOperation('Received command to disconnect from upstream'));
214
+ this.crudUpdateListener?.();
215
+ this.crudUpdateListener = undefined;
216
+ this.updateSyncStatus({
217
+ connected: false,
218
+ dataFlow: {
219
+ downloading: false
220
+ }
221
+ });
222
+ });
223
+ /**
224
+ * This loops runs until [retry] is false or the abort signal is set to aborted.
225
+ * Aborting the nestedAbortController will:
226
+ * - Abort any pending fetch requests
227
+ * - Close any sync stream ReadableStreams (which will also close any established network requests)
228
+ */
229
+ while (true) {
230
+ try {
231
+ if (signal?.aborted) {
232
+ break;
233
+ }
234
+ const { retry } = await this.streamingSyncIteration(nestedAbortController.signal);
235
+ if (!retry) {
236
+ /**
237
+ * A sync error ocurred that we cannot recover from here.
238
+ * This loop must terminate.
239
+ * The nestedAbortController will close any open network requests and streams below.
240
+ */
241
+ break;
242
+ }
243
+ // Continue immediately
244
+ }
245
+ catch (ex) {
246
+ /**
247
+ * Either:
248
+ * - A network request failed with a failed connection or not OKAY response code.
249
+ * - There was a sync processing error.
250
+ * This loop will retry.
251
+ * The nested abort controller will cleanup any open network requests and streams.
252
+ * The WebRemote should only abort pending fetch requests or close active Readable streams.
253
+ */
254
+ if (ex instanceof AbortOperation) {
255
+ this.logger.warn(ex);
256
+ }
257
+ else {
258
+ this.logger.error(ex);
259
+ }
260
+ await this.delayRetry();
261
+ }
262
+ finally {
263
+ if (!signal.aborted) {
264
+ nestedAbortController.abort(new AbortOperation('Closing sync stream network requests before retry.'));
265
+ nestedAbortController = new AbortController();
266
+ }
267
+ this.updateSyncStatus({
268
+ connected: false
269
+ });
270
+ // On error, wait a little before retrying
271
+ }
272
+ }
273
+ // Mark as disconnected if here
274
+ this.updateSyncStatus({ connected: false });
275
+ }
276
+ async streamingSyncIteration(signal, progress) {
277
+ return await this.obtainLock({
278
+ type: LockType.SYNC,
279
+ signal,
280
+ callback: async () => {
281
+ this.logger.debug('Streaming sync iteration started');
282
+ this.options.adapter.startSession();
283
+ const bucketEntries = await this.options.adapter.getBucketStates();
284
+ const initialBuckets = new Map();
285
+ bucketEntries.forEach((entry) => {
286
+ initialBuckets.set(entry.bucket, entry.op_id);
287
+ });
288
+ const req = Array.from(initialBuckets.entries()).map(([bucket, after]) => ({
289
+ name: bucket,
290
+ after: after
291
+ }));
292
+ // These are compared by reference
293
+ let targetCheckpoint = null;
294
+ let validatedCheckpoint = null;
295
+ let appliedCheckpoint = null;
296
+ let bucketSet = new Set(initialBuckets.keys());
297
+ for await (const line of this.streamingSyncRequest({
298
+ buckets: req,
299
+ include_checksum: true,
300
+ raw_data: true
301
+ }, signal)) {
302
+ if (isStreamingSyncCheckpoint(line)) {
303
+ targetCheckpoint = line.checkpoint;
304
+ const bucketsToDelete = new Set(bucketSet);
305
+ const newBuckets = new Set();
306
+ for (const checksum of line.checkpoint.buckets) {
307
+ newBuckets.add(checksum.bucket);
308
+ bucketsToDelete.delete(checksum.bucket);
309
+ }
310
+ if (bucketsToDelete.size > 0) {
311
+ this.logger.debug('Removing buckets', [...bucketsToDelete]);
312
+ }
313
+ bucketSet = newBuckets;
314
+ await this.options.adapter.removeBuckets([...bucketsToDelete]);
315
+ await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
316
+ }
317
+ else if (isStreamingSyncCheckpointComplete(line)) {
318
+ this.logger.debug('Checkpoint complete', targetCheckpoint);
319
+ const result = await this.options.adapter.syncLocalDatabase(targetCheckpoint);
320
+ if (!result.checkpointValid) {
321
+ // This means checksums failed. Start again with a new checkpoint.
322
+ // TODO: better back-off
323
+ await new Promise((resolve) => setTimeout(resolve, 50));
324
+ return { retry: true };
325
+ }
326
+ else if (!result.ready) {
327
+ // Checksums valid, but need more data for a consistent checkpoint.
328
+ // Continue waiting.
329
+ // landing here the whole time
330
+ }
331
+ else {
332
+ appliedCheckpoint = targetCheckpoint;
333
+ this.logger.debug('validated checkpoint', appliedCheckpoint);
334
+ this.updateSyncStatus({
335
+ connected: true,
336
+ lastSyncedAt: new Date(),
337
+ dataFlow: {
338
+ downloading: false
339
+ }
340
+ });
341
+ }
342
+ validatedCheckpoint = targetCheckpoint;
343
+ }
344
+ else if (isStreamingSyncCheckpointDiff(line)) {
345
+ // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint
346
+ if (targetCheckpoint == null) {
347
+ throw new Error('Checkpoint diff without previous checkpoint');
348
+ }
349
+ const diff = line.checkpoint_diff;
350
+ const newBuckets = new Map();
351
+ for (const checksum of targetCheckpoint.buckets) {
352
+ newBuckets.set(checksum.bucket, checksum);
353
+ }
354
+ for (const checksum of diff.updated_buckets) {
355
+ newBuckets.set(checksum.bucket, checksum);
356
+ }
357
+ for (const bucket of diff.removed_buckets) {
358
+ newBuckets.delete(bucket);
359
+ }
360
+ const newCheckpoint = {
361
+ last_op_id: diff.last_op_id,
362
+ buckets: [...newBuckets.values()],
363
+ write_checkpoint: diff.write_checkpoint
364
+ };
365
+ targetCheckpoint = newCheckpoint;
366
+ bucketSet = new Set(newBuckets.keys());
367
+ const bucketsToDelete = diff.removed_buckets;
368
+ if (bucketsToDelete.length > 0) {
369
+ this.logger.debug('Remove buckets', bucketsToDelete);
370
+ }
371
+ await this.options.adapter.removeBuckets(bucketsToDelete);
372
+ await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
373
+ }
374
+ else if (isStreamingSyncData(line)) {
375
+ const { data } = line;
376
+ this.updateSyncStatus({
377
+ dataFlow: {
378
+ downloading: true
379
+ }
380
+ });
381
+ await this.options.adapter.saveSyncData({ buckets: [SyncDataBucket.fromRow(data)] });
382
+ }
383
+ else if (isStreamingKeepalive(line)) {
384
+ const remaining_seconds = line.token_expires_in;
385
+ if (remaining_seconds == 0) {
386
+ // Connection would be closed automatically right after this
387
+ this.logger.debug('Token expiring; reconnect');
388
+ return { retry: true };
389
+ }
390
+ this.triggerCrudUpload();
391
+ }
392
+ else {
393
+ this.logger.debug('Sync complete');
394
+ if (targetCheckpoint === appliedCheckpoint) {
395
+ this.updateSyncStatus({
396
+ connected: true,
397
+ lastSyncedAt: new Date()
398
+ });
399
+ }
400
+ else if (validatedCheckpoint === targetCheckpoint) {
401
+ const result = await this.options.adapter.syncLocalDatabase(targetCheckpoint);
402
+ if (!result.checkpointValid) {
403
+ // This means checksums failed. Start again with a new checkpoint.
404
+ // TODO: better back-off
405
+ await new Promise((resolve) => setTimeout(resolve, 50));
406
+ return { retry: false };
407
+ }
408
+ else if (!result.ready) {
409
+ // Checksums valid, but need more data for a consistent checkpoint.
410
+ // Continue waiting.
411
+ }
412
+ else {
413
+ appliedCheckpoint = targetCheckpoint;
414
+ this.updateSyncStatus({
415
+ connected: true,
416
+ lastSyncedAt: new Date(),
417
+ dataFlow: {
418
+ downloading: false
419
+ }
420
+ });
421
+ }
422
+ }
423
+ }
424
+ progress?.();
425
+ }
426
+ this.logger.debug('Stream input empty');
427
+ // Connection closed. Likely due to auth issue.
428
+ return { retry: true };
429
+ }
430
+ });
431
+ }
432
+ async *streamingSyncRequest(req, signal) {
433
+ const body = await this.options.remote.postStreaming('/sync/stream', req, {}, signal);
434
+ // A connection is active
435
+ // There is a connection now
436
+ Promise.resolve().then(() => this.triggerCrudUpload());
437
+ this.updateSyncStatus({
438
+ connected: true
439
+ });
440
+ const stream = ndjsonStream(body);
441
+ const reader = stream.getReader();
442
+ try {
443
+ while (true) {
444
+ // Read from the stream
445
+ const { done, value } = await reader.read();
446
+ // Exit if we're done
447
+ if (done)
448
+ return;
449
+ // Else yield the chunk
450
+ yield value;
451
+ }
452
+ }
453
+ finally {
454
+ reader.releaseLock();
455
+ }
456
+ }
457
+ updateSyncStatus(options) {
458
+ const updatedStatus = new SyncStatus({
459
+ connected: options.connected ?? this.syncStatus.connected,
460
+ lastSyncedAt: options.lastSyncedAt ?? this.syncStatus.lastSyncedAt,
461
+ dataFlow: {
462
+ ...this.syncStatus.dataFlowStatus,
463
+ ...options.dataFlow
464
+ }
465
+ });
466
+ if (!this.syncStatus.isEqual(updatedStatus)) {
467
+ this.syncStatus = updatedStatus;
468
+ // Only trigger this is there was a change
469
+ this.iterateListeners((cb) => cb.statusChanged?.(updatedStatus));
470
+ }
471
+ // trigger this for all updates
472
+ this.iterateListeners((cb) => cb.statusUpdated?.(options));
473
+ }
474
+ async delayRetry() {
475
+ return new Promise((resolve) => setTimeout(resolve, this.options.retryDelayMs));
476
+ }
477
+ }
@@ -0,0 +1,116 @@
1
+ import { BucketChecksum, Checkpoint } from '../bucket/BucketStorageAdapter';
2
+ import { CrudEntry, OpId } from '../bucket/CrudEntry';
3
+ import { SyncDataBucketJSON } from '../bucket/SyncDataBucket';
4
+ /**
5
+ * For sync2.json
6
+ */
7
+ export interface ContinueCheckpointRequest {
8
+ /**
9
+ * Existing bucket states. Only these buckets are synchronized.
10
+ */
11
+ buckets: BucketRequest[];
12
+ checkpoint_token: string;
13
+ limit?: number;
14
+ }
15
+ export interface SyncNewCheckpointRequest {
16
+ /**
17
+ * Existing bucket states. Used if include_data is specified.
18
+ */
19
+ buckets?: BucketRequest[];
20
+ request_checkpoint: {
21
+ /**
22
+ * Whether or not to include an initial data request.
23
+ */
24
+ include_data: boolean;
25
+ /**
26
+ * Whether or not to compute a checksum.
27
+ */
28
+ include_checksum: boolean;
29
+ };
30
+ limit?: number;
31
+ }
32
+ export type SyncRequest = ContinueCheckpointRequest | SyncNewCheckpointRequest;
33
+ export interface SyncResponse {
34
+ /**
35
+ * Data for the buckets returned. May not have an an entry for each bucket in the request.
36
+ */
37
+ data?: SyncDataBucketJSON[];
38
+ /**
39
+ * True if the response limit has been reached, and another request must be made.
40
+ */
41
+ has_more: boolean;
42
+ checkpoint_token?: string;
43
+ checkpoint?: Checkpoint;
44
+ }
45
+ export interface StreamingSyncRequest {
46
+ /**
47
+ * Existing bucket states.
48
+ */
49
+ buckets?: BucketRequest[];
50
+ /**
51
+ * If specified, limit the response to only include these buckets.
52
+ */
53
+ only?: string[];
54
+ /**
55
+ * Whether or not to compute a checksum for each checkpoint
56
+ */
57
+ include_checksum: boolean;
58
+ /**
59
+ * Changes the response to stringified data in each OplogEntry
60
+ */
61
+ raw_data: boolean;
62
+ }
63
+ export interface StreamingSyncCheckpoint {
64
+ checkpoint: Checkpoint;
65
+ }
66
+ export interface StreamingSyncCheckpointDiff {
67
+ checkpoint_diff: {
68
+ last_op_id: OpId;
69
+ updated_buckets: BucketChecksum[];
70
+ removed_buckets: string[];
71
+ write_checkpoint: string;
72
+ };
73
+ }
74
+ export interface StreamingSyncDataJSON {
75
+ data: SyncDataBucketJSON;
76
+ }
77
+ export interface StreamingSyncCheckpointComplete {
78
+ checkpoint_complete: {
79
+ last_op_id: OpId;
80
+ };
81
+ }
82
+ export interface StreamingSyncKeepalive {
83
+ /** If specified, token expires in this many seconds. */
84
+ token_expires_in: number;
85
+ }
86
+ export type StreamingSyncLine = StreamingSyncDataJSON | StreamingSyncCheckpoint | StreamingSyncCheckpointDiff | StreamingSyncCheckpointComplete | StreamingSyncKeepalive;
87
+ export interface BucketRequest {
88
+ name: string;
89
+ /**
90
+ * Base-10 number. Sync all data from this bucket with op_id > after.
91
+ */
92
+ after: OpId;
93
+ }
94
+ export declare function isStreamingSyncData(line: StreamingSyncLine): line is StreamingSyncDataJSON;
95
+ export declare function isStreamingKeepalive(line: StreamingSyncLine): line is StreamingSyncKeepalive;
96
+ export declare function isStreamingSyncCheckpoint(line: StreamingSyncLine): line is StreamingSyncCheckpoint;
97
+ export declare function isStreamingSyncCheckpointComplete(line: StreamingSyncLine): line is StreamingSyncCheckpointComplete;
98
+ export declare function isStreamingSyncCheckpointDiff(line: StreamingSyncLine): line is StreamingSyncCheckpointDiff;
99
+ export declare function isContinueCheckpointRequest(request: SyncRequest): request is ContinueCheckpointRequest;
100
+ export declare function isSyncNewCheckpointRequest(request: SyncRequest): request is SyncNewCheckpointRequest;
101
+ /**
102
+ * For crud.json
103
+ */
104
+ export interface CrudRequest {
105
+ data: CrudEntry[];
106
+ }
107
+ export interface CrudResponse {
108
+ /**
109
+ * A sync response with a checkpoint >= this checkpoint would contain all the changes in this request.
110
+ *
111
+ * Any earlier checkpoint may or may not contain these changes.
112
+ *
113
+ * May be empty when the request contains no ops.
114
+ */
115
+ checkpoint?: OpId;
116
+ }
@@ -0,0 +1,22 @@
1
+ export function isStreamingSyncData(line) {
2
+ return line.data != null;
3
+ }
4
+ export function isStreamingKeepalive(line) {
5
+ return line.token_expires_in != null;
6
+ }
7
+ export function isStreamingSyncCheckpoint(line) {
8
+ return line.checkpoint != null;
9
+ }
10
+ export function isStreamingSyncCheckpointComplete(line) {
11
+ return line.checkpoint_complete != null;
12
+ }
13
+ export function isStreamingSyncCheckpointDiff(line) {
14
+ return line.checkpoint_diff != null;
15
+ }
16
+ export function isContinueCheckpointRequest(request) {
17
+ return (Array.isArray(request.buckets) &&
18
+ typeof request.checkpoint_token == 'string');
19
+ }
20
+ export function isSyncNewCheckpointRequest(request) {
21
+ return typeof request.request_checkpoint == 'object';
22
+ }
@@ -0,0 +1,19 @@
1
+ export declare enum ColumnType {
2
+ TEXT = "TEXT",
3
+ INTEGER = "INTEGER",
4
+ REAL = "REAL"
5
+ }
6
+ export interface ColumnOptions {
7
+ name: string;
8
+ type?: ColumnType;
9
+ }
10
+ export declare class Column {
11
+ protected options: ColumnOptions;
12
+ constructor(options: ColumnOptions);
13
+ get name(): string;
14
+ get type(): ColumnType | undefined;
15
+ toJSON(): {
16
+ name: string;
17
+ type: ColumnType | undefined;
18
+ };
19
+ }
@@ -0,0 +1,25 @@
1
+ // https://www.sqlite.org/lang_expr.html#castexpr
2
+ export var ColumnType;
3
+ (function (ColumnType) {
4
+ ColumnType["TEXT"] = "TEXT";
5
+ ColumnType["INTEGER"] = "INTEGER";
6
+ ColumnType["REAL"] = "REAL";
7
+ })(ColumnType || (ColumnType = {}));
8
+ export class Column {
9
+ options;
10
+ constructor(options) {
11
+ this.options = options;
12
+ }
13
+ get name() {
14
+ return this.options.name;
15
+ }
16
+ get type() {
17
+ return this.options.type;
18
+ }
19
+ toJSON() {
20
+ return {
21
+ name: this.name,
22
+ type: this.type
23
+ };
24
+ }
25
+ }