@powersync/service-module-mssql 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/LICENSE +67 -0
  2. package/README.md +3 -0
  3. package/ci/init-mssql.sql +50 -0
  4. package/dist/api/MSSQLRouteAPIAdapter.d.ts +21 -0
  5. package/dist/api/MSSQLRouteAPIAdapter.js +248 -0
  6. package/dist/api/MSSQLRouteAPIAdapter.js.map +1 -0
  7. package/dist/common/LSN.d.ts +37 -0
  8. package/dist/common/LSN.js +64 -0
  9. package/dist/common/LSN.js.map +1 -0
  10. package/dist/common/MSSQLSourceTable.d.ts +27 -0
  11. package/dist/common/MSSQLSourceTable.js +35 -0
  12. package/dist/common/MSSQLSourceTable.js.map +1 -0
  13. package/dist/common/MSSQLSourceTableCache.d.ts +14 -0
  14. package/dist/common/MSSQLSourceTableCache.js +28 -0
  15. package/dist/common/MSSQLSourceTableCache.js.map +1 -0
  16. package/dist/common/mssqls-to-sqlite.d.ts +18 -0
  17. package/dist/common/mssqls-to-sqlite.js +143 -0
  18. package/dist/common/mssqls-to-sqlite.js.map +1 -0
  19. package/dist/index.d.ts +1 -0
  20. package/dist/index.js +2 -0
  21. package/dist/index.js.map +1 -0
  22. package/dist/module/MSSQLModule.d.ts +15 -0
  23. package/dist/module/MSSQLModule.js +68 -0
  24. package/dist/module/MSSQLModule.js.map +1 -0
  25. package/dist/replication/CDCPoller.d.ts +67 -0
  26. package/dist/replication/CDCPoller.js +183 -0
  27. package/dist/replication/CDCPoller.js.map +1 -0
  28. package/dist/replication/CDCReplicationJob.d.ts +17 -0
  29. package/dist/replication/CDCReplicationJob.js +76 -0
  30. package/dist/replication/CDCReplicationJob.js.map +1 -0
  31. package/dist/replication/CDCReplicator.d.ts +18 -0
  32. package/dist/replication/CDCReplicator.js +55 -0
  33. package/dist/replication/CDCReplicator.js.map +1 -0
  34. package/dist/replication/CDCStream.d.ts +106 -0
  35. package/dist/replication/CDCStream.js +536 -0
  36. package/dist/replication/CDCStream.js.map +1 -0
  37. package/dist/replication/MSSQLConnectionManager.d.ts +23 -0
  38. package/dist/replication/MSSQLConnectionManager.js +97 -0
  39. package/dist/replication/MSSQLConnectionManager.js.map +1 -0
  40. package/dist/replication/MSSQLConnectionManagerFactory.d.ts +10 -0
  41. package/dist/replication/MSSQLConnectionManagerFactory.js +28 -0
  42. package/dist/replication/MSSQLConnectionManagerFactory.js.map +1 -0
  43. package/dist/replication/MSSQLErrorRateLimiter.d.ts +10 -0
  44. package/dist/replication/MSSQLErrorRateLimiter.js +34 -0
  45. package/dist/replication/MSSQLErrorRateLimiter.js.map +1 -0
  46. package/dist/replication/MSSQLSnapshotQuery.d.ts +71 -0
  47. package/dist/replication/MSSQLSnapshotQuery.js +190 -0
  48. package/dist/replication/MSSQLSnapshotQuery.js.map +1 -0
  49. package/dist/types/mssql-data-types.d.ts +66 -0
  50. package/dist/types/mssql-data-types.js +62 -0
  51. package/dist/types/mssql-data-types.js.map +1 -0
  52. package/dist/types/types.d.ts +177 -0
  53. package/dist/types/types.js +141 -0
  54. package/dist/types/types.js.map +1 -0
  55. package/dist/utils/mssql.d.ts +80 -0
  56. package/dist/utils/mssql.js +329 -0
  57. package/dist/utils/mssql.js.map +1 -0
  58. package/dist/utils/schema.d.ts +21 -0
  59. package/dist/utils/schema.js +131 -0
  60. package/dist/utils/schema.js.map +1 -0
  61. package/package.json +51 -0
  62. package/src/api/MSSQLRouteAPIAdapter.ts +283 -0
  63. package/src/common/LSN.ts +77 -0
  64. package/src/common/MSSQLSourceTable.ts +54 -0
  65. package/src/common/MSSQLSourceTableCache.ts +36 -0
  66. package/src/common/mssqls-to-sqlite.ts +151 -0
  67. package/src/index.ts +1 -0
  68. package/src/module/MSSQLModule.ts +82 -0
  69. package/src/replication/CDCPoller.ts +241 -0
  70. package/src/replication/CDCReplicationJob.ts +87 -0
  71. package/src/replication/CDCReplicator.ts +70 -0
  72. package/src/replication/CDCStream.ts +688 -0
  73. package/src/replication/MSSQLConnectionManager.ts +113 -0
  74. package/src/replication/MSSQLConnectionManagerFactory.ts +33 -0
  75. package/src/replication/MSSQLErrorRateLimiter.ts +36 -0
  76. package/src/replication/MSSQLSnapshotQuery.ts +230 -0
  77. package/src/types/mssql-data-types.ts +79 -0
  78. package/src/types/types.ts +224 -0
  79. package/src/utils/mssql.ts +420 -0
  80. package/src/utils/schema.ts +172 -0
  81. package/test/src/CDCStream.test.ts +206 -0
  82. package/test/src/CDCStreamTestContext.ts +212 -0
  83. package/test/src/CDCStream_resumable_snapshot.test.ts +152 -0
  84. package/test/src/env.ts +11 -0
  85. package/test/src/mssql-to-sqlite.test.ts +474 -0
  86. package/test/src/setup.ts +12 -0
  87. package/test/src/util.ts +189 -0
  88. package/test/tsconfig.json +28 -0
  89. package/test/tsconfig.tsbuildinfo +1 -0
  90. package/tsconfig.json +26 -0
  91. package/tsconfig.tsbuildinfo +1 -0
  92. package/vitest.config.ts +15 -0
@@ -0,0 +1,152 @@
1
+ import { describe, expect, test } from 'vitest';
2
+ import { env } from './env.js';
3
+ import { createTestTable, createTestTableWithBasicId, describeWithStorage, waitForPendingCDCChanges } from './util.js';
4
+ import { TestStorageFactory } from '@powersync/service-core';
5
+ import { METRICS_HELPER } from '@powersync/service-core-tests';
6
+ import { ReplicationMetric } from '@powersync/service-types';
7
+ import * as timers from 'node:timers/promises';
8
+ import { ReplicationAbortedError } from '@powersync/lib-services-framework';
9
+ import { CDCStreamTestContext } from './CDCStreamTestContext.js';
10
+ import { getLatestReplicatedLSN } from '@module/utils/mssql.js';
11
+
12
+ describe.skipIf(!(env.CI || env.SLOW_TESTS))('batch replication', function () {
13
+ describeWithStorage({ timeout: 240_000 }, function (factory) {
14
+ test('resuming initial replication (1)', async () => {
15
+ // Stop early - likely to not include deleted row in first replication attempt.
16
+ await testResumingReplication(factory, 2000);
17
+ });
18
+ test('resuming initial replication (2)', async () => {
19
+ // Stop late - likely to include deleted row in first replication attempt.
20
+ await testResumingReplication(factory, 8000);
21
+ });
22
+ });
23
+ });
24
+
25
+ async function testResumingReplication(factory: TestStorageFactory, stopAfter: number) {
26
+ // This tests interrupting and then resuming initial replication.
27
+ // We interrupt replication after test_data1 has fully replicated, and
28
+ // test_data2 has partially replicated.
29
+ // This test relies on interval behavior that is not 100% deterministic:
30
+ // 1. We attempt to abort initial replication once a certain number of
31
+ // rows have been replicated, but this is not exact. Our only requirement
32
+ // is that we have not fully replicated test_data2 yet.
33
+ // 2. Order of replication is not deterministic, so which specific rows
34
+ // have been / have not been replicated at that point is not deterministic.
35
+ // We do allow for some variation in the test results to account for this.
36
+
37
+ await using context = await CDCStreamTestContext.open(factory, { cdcStreamOptions: { snapshotBatchSize: 1000 } });
38
+
39
+ await context.updateSyncRules(`bucket_definitions:
40
+ global:
41
+ data:
42
+ - SELECT * FROM test_data1
43
+ - SELECT * FROM test_data2`);
44
+ const { connectionManager } = context;
45
+
46
+ await createTestTableWithBasicId(connectionManager, 'test_data1');
47
+ await createTestTableWithBasicId(connectionManager, 'test_data2');
48
+
49
+ let beforeLSN = await getLatestReplicatedLSN(connectionManager);
50
+ await connectionManager.query(`INSERT INTO test_data1(description) SELECT 'value' FROM GENERATE_SERIES(1, 1000, 1)`);
51
+ await connectionManager.query(`INSERT INTO test_data2(description) SELECT 'value' FROM GENERATE_SERIES(1, 10000, 1)`);
52
+
53
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
54
+
55
+ const p = context.replicateSnapshot();
56
+
57
+ let done = false;
58
+
59
+ const startRowCount = (await METRICS_HELPER.getMetricValueForTests(ReplicationMetric.ROWS_REPLICATED)) ?? 0;
60
+ try {
61
+ (async () => {
62
+ while (!done) {
63
+ const count =
64
+ ((await METRICS_HELPER.getMetricValueForTests(ReplicationMetric.ROWS_REPLICATED)) ?? 0) - startRowCount;
65
+
66
+ if (count >= stopAfter) {
67
+ break;
68
+ }
69
+ await timers.setTimeout(1);
70
+ }
71
+ // This interrupts initial replication
72
+ await context.dispose();
73
+ })();
74
+ // This confirms that initial replication was interrupted
75
+ const error = await p.catch((e) => e);
76
+ expect(error).toBeInstanceOf(ReplicationAbortedError);
77
+ done = true;
78
+ } finally {
79
+ done = true;
80
+ }
81
+
82
+ // Bypass the usual "clear db on factory open" step.
83
+ await using context2 = await CDCStreamTestContext.open(factory, {
84
+ doNotClear: true,
85
+ cdcStreamOptions: { snapshotBatchSize: 1000 }
86
+ });
87
+
88
+ beforeLSN = await getLatestReplicatedLSN(context2.connectionManager);
89
+ // This delete should be using one of the ids already replicated
90
+ const {
91
+ recordset: [id1]
92
+ } = await context2.connectionManager.query(`DELETE TOP (1) FROM test_data2 OUTPUT DELETED.id`);
93
+ // This update should also be using one of the ids already replicated
94
+ const {
95
+ recordset: [id2]
96
+ } = await context2.connectionManager.query(
97
+ `UPDATE test_data2 SET description = 'update1' OUTPUT INSERTED.id WHERE id = (SELECT TOP 1 id FROM test_data2)`
98
+ );
99
+ const {
100
+ recordset: [id3]
101
+ } = await context2.connectionManager.query(
102
+ `INSERT INTO test_data2(description) OUTPUT INSERTED.id VALUES ('insert1')`
103
+ );
104
+ await waitForPendingCDCChanges(beforeLSN, context2.connectionManager);
105
+
106
+ await context2.loadNextSyncRules();
107
+ await context2.replicateSnapshot();
108
+
109
+ await context2.startStreaming();
110
+ const data = await context2.getBucketData('global[]', undefined, {});
111
+
112
+ const deletedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === String(id1));
113
+ const updatedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === String(id2));
114
+ const insertedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === String(id3));
115
+
116
+ if (deletedRowOps.length != 0) {
117
+ // The deleted row was part of the first replication batch,
118
+ // so it is removed by streaming replication.
119
+ expect(deletedRowOps.length).toEqual(2);
120
+ expect(deletedRowOps[1].op).toEqual('REMOVE');
121
+ } else {
122
+ // The deleted row was not part of the first replication batch,
123
+ // so it's not in the resulting ops at all.
124
+ }
125
+
126
+ expect(updatedRowOps.length).toEqual(2);
127
+ // description for the first op could be 'foo' or 'update1'.
128
+ // We only test the final version.
129
+ expect(JSON.parse(updatedRowOps[1].data as string).description).toEqual('update1');
130
+
131
+ expect(insertedRowOps.length).toEqual(2);
132
+ expect(JSON.parse(insertedRowOps[0].data as string).description).toEqual('insert1');
133
+ expect(JSON.parse(insertedRowOps[1].data as string).description).toEqual('insert1');
134
+
135
+ // 1000 of test_data1 during first replication attempt.
136
+ // N >= 1000 of test_data2 during first replication attempt.
137
+ // 10000 - N - 1 + 1 of test_data2 during second replication attempt.
138
+ // An additional update during streaming replication (2x total for this row).
139
+ // An additional insert during streaming replication (2x total for this row).
140
+ // If the deleted row was part of the first replication batch, it's removed by streaming replication.
141
+ // This adds 2 ops.
142
+ // We expect this to be 11002 for stopAfter: 2000, and 11004 for stopAfter: 8000.
143
+ // However, this is not deterministic.
144
+ const expectedCount = 11002 + deletedRowOps.length;
145
+ expect(data.length).toEqual(expectedCount);
146
+
147
+ const replicatedCount =
148
+ ((await METRICS_HELPER.getMetricValueForTests(ReplicationMetric.ROWS_REPLICATED)) ?? 0) - startRowCount;
149
+
150
+ // With resumable replication, there should be no need to re-replicate anything.
151
+ expect(replicatedCount).toEqual(expectedCount);
152
+ }
@@ -0,0 +1,11 @@
1
+ import { utils } from '@powersync/lib-services-framework';
2
+
3
+ export const env = utils.collectEnvironmentVariables({
4
+ MSSQL_TEST_URI: utils.type.string.default(`mssql://sa:321strong_ROOT_password@localhost:1433/powersync`),
5
+ MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
6
+ CI: utils.type.boolean.default('false'),
7
+ SLOW_TESTS: utils.type.boolean.default('false'),
8
+ PG_STORAGE_TEST_URL: utils.type.string.default('postgres://postgres:postgres@localhost:5431/powersync_storage_test'),
9
+ TEST_MONGO_STORAGE: utils.type.boolean.default('true'),
10
+ TEST_POSTGRES_STORAGE: utils.type.boolean.default('true')
11
+ });
@@ -0,0 +1,474 @@
1
+ import { SqliteInputRow } from '@powersync/service-sync-rules';
2
+ import { afterAll, beforeEach, describe, expect, test } from 'vitest';
3
+ import { clearTestDb, createUpperCaseUUID, TEST_CONNECTION_OPTIONS, waitForPendingCDCChanges } from './util.js';
4
+ import { CDCToSqliteRow, toSqliteInputRow } from '@module/common/mssqls-to-sqlite.js';
5
+ import { MSSQLConnectionManager } from '@module/replication/MSSQLConnectionManager.js';
6
+ import {
7
+ enableCDCForTable,
8
+ getCaptureInstance,
9
+ getLatestReplicatedLSN,
10
+ getMinLSN,
11
+ toQualifiedTableName
12
+ } from '@module/utils/mssql.js';
13
+ import sql from 'mssql';
14
+
15
+ describe('MSSQL Data Types Tests', () => {
16
+ const connectionManager = new MSSQLConnectionManager(TEST_CONNECTION_OPTIONS, {});
17
+
18
+ beforeEach(async () => {
19
+ await clearTestDb(connectionManager);
20
+ await setupTestTable();
21
+ });
22
+ afterAll(async () => {
23
+ await connectionManager.end();
24
+ });
25
+
26
+ async function setupTestTable() {
27
+ await connectionManager.query(`
28
+ CREATE TABLE ${connectionManager.schema}.test_data (
29
+ id INT IDENTITY(1,1) PRIMARY KEY,
30
+ tinyint_col TINYINT,
31
+ smallint_col SMALLINT,
32
+ int_col INT,
33
+ bigint_col BIGINT,
34
+ float_col FLOAT,
35
+ real_col REAL,
36
+ decimal_col DECIMAL(10,2),
37
+ numeric_col NUMERIC(10,2),
38
+ money_col MONEY,
39
+ smallmoney_col SMALLMONEY,
40
+ bit_col BIT,
41
+
42
+ date_col DATE,
43
+ datetime_col DATETIME,
44
+ datetime2_col DATETIME2(6),
45
+ smalldatetime_col SMALLDATETIME,
46
+ datetimeoffset_col DATETIMEOFFSET(3),
47
+ time_col TIME(6),
48
+
49
+ char_col CHAR(10),
50
+ varchar_col VARCHAR(255),
51
+ varchar_max_col VARCHAR(MAX),
52
+ nchar_col NCHAR(15),
53
+ nvarchar_col NVARCHAR(255),
54
+ nvarchar_max_col NVARCHAR(MAX),
55
+ text_col TEXT,
56
+ ntext_col NTEXT,
57
+
58
+ binary_col BINARY(16),
59
+ varbinary_col VARBINARY(256),
60
+ varbinary_max_col VARBINARY(MAX),
61
+ image_col IMAGE,
62
+
63
+ uniqueidentifier_col UNIQUEIDENTIFIER,
64
+ xml_col XML,
65
+ json_col NVARCHAR(MAX),
66
+
67
+ hierarchyid_col HIERARCHYID,
68
+ geometry_col GEOMETRY,
69
+ geography_col GEOGRAPHY
70
+ )
71
+ `);
72
+
73
+ await enableCDCForTable({ connectionManager, table: 'test_data' });
74
+ }
75
+
76
+ test('Number types mappings', async () => {
77
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
78
+ await connectionManager.query(`
79
+ INSERT INTO ${connectionManager.schema}.test_data(
80
+ tinyint_col,
81
+ smallint_col,
82
+ int_col,
83
+ bigint_col,
84
+ float_col,
85
+ real_col,
86
+ decimal_col,
87
+ numeric_col,
88
+ money_col,
89
+ smallmoney_col,
90
+ bit_col
91
+ ) VALUES (
92
+ 255, -- TINYINT maximum value
93
+ 32767, -- SMALLINT maximum value
94
+ 2147483647, -- INT maximum value
95
+ 9223372036854775807, -- BIGINT maximum value
96
+ 3.1415926535, -- FLOAT example
97
+ 3.14, -- REAL example
98
+ 12345.67, -- DECIMAL(10,2) example
99
+ 12345.67, -- NUMERIC(10,2) example
100
+ 12345.67, -- MONEY example
101
+ 123.45, -- SMALLMONEY example
102
+ 1 -- BIT value
103
+ )
104
+ `);
105
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
106
+
107
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
108
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
109
+
110
+ const expectedResult: SqliteInputRow = {
111
+ tinyint_col: 255,
112
+ smallint_col: 32767,
113
+ int_col: 2147483647,
114
+ bigint_col: 9223372036854775807n,
115
+ float_col: 3.1415926535,
116
+ real_col: expect.closeTo(3.14, 2),
117
+ decimal_col: 12345.67,
118
+ numeric_col: 12345.67,
119
+ money_col: 12345.67,
120
+ smallmoney_col: 123.45,
121
+ bit_col: 1
122
+ };
123
+ expect(databaseRows[0]).toMatchObject(expectedResult);
124
+ expect(replicatedRows[0]).toMatchObject(expectedResult);
125
+ });
126
+
127
+ test('Character types mappings', async () => {
128
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
129
+ await connectionManager.query(`
130
+ INSERT INTO [${connectionManager.schema}].test_data (
131
+ char_col,
132
+ varchar_col,
133
+ varchar_max_col,
134
+ nchar_col,
135
+ nvarchar_col,
136
+ nvarchar_max_col,
137
+ text_col,
138
+ ntext_col
139
+ ) VALUES (
140
+ 'CharData', -- CHAR(10) with padding spaces
141
+ 'Variable character data',-- VARCHAR(255)
142
+ 'Variable character data MAX', -- VARCHAR(MAX)
143
+ N'UnicodeChar', -- NCHAR(15)
144
+ N'Variable Unicode data', -- NVARCHAR(255)
145
+ N'Variable Unicode data MAX', -- NVARCHAR(MAX)
146
+ 'TextData', -- TEXT
147
+ N'UnicodeTextData' -- NTEXT
148
+ )
149
+ `);
150
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
151
+
152
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
153
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
154
+ const expectedResult = {
155
+ char_col: 'CharData ', // CHAR pads with spaces up to the defined length (10)
156
+ varchar_col: 'Variable character data',
157
+ varchar_max_col: 'Variable character data MAX',
158
+ nchar_col: 'UnicodeChar ', // NCHAR pads with spaces up to the defined length (15)
159
+ nvarchar_col: 'Variable Unicode data',
160
+ nvarchar_max_col: 'Variable Unicode data MAX',
161
+ text_col: 'TextData',
162
+ ntext_col: 'UnicodeTextData'
163
+ };
164
+
165
+ expect(databaseRows[0]).toMatchObject(expectedResult);
166
+ expect(replicatedRows[0]).toMatchObject(expectedResult);
167
+ });
168
+
169
+ test('Binary types mappings', async () => {
170
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
171
+ const binaryData = Buffer.from('BinaryData');
172
+ await connectionManager.query(
173
+ `
174
+ INSERT INTO [${connectionManager.schema}].test_data (
175
+ binary_col,
176
+ varbinary_col,
177
+ varbinary_max_col,
178
+ image_col
179
+ ) VALUES (
180
+ @binary_col,
181
+ @varbinary_col,
182
+ @varbinary_max_col,
183
+ @image_col
184
+ )
185
+ `,
186
+ [
187
+ { name: 'binary_col', type: sql.Binary, value: binaryData },
188
+ { name: 'varbinary_col', type: sql.VarBinary, value: binaryData },
189
+ { name: 'varbinary_max_col', type: sql.VarBinary(sql.MAX), value: binaryData },
190
+ { name: 'image_col', type: sql.Image, value: binaryData }
191
+ ]
192
+ );
193
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
194
+
195
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
196
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
197
+
198
+ const expectedBinary = new Uint8Array(binaryData);
199
+ const expectedBinaryPadded = new Uint8Array(16);
200
+ expectedBinaryPadded.set(expectedBinary.slice(0, 16), 0);
201
+
202
+ const expectedResult: SqliteInputRow = {
203
+ binary_col: expectedBinaryPadded,
204
+ varbinary_col: expectedBinary,
205
+ varbinary_max_col: expectedBinary,
206
+ image_col: expectedBinary
207
+ };
208
+
209
+ expect(databaseRows[0]).toMatchObject(expectedResult);
210
+ expect(replicatedRows[0]).toMatchObject(expectedResult);
211
+ });
212
+
213
+ test('Date types mappings', async () => {
214
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
215
+ const testDate = new Date('2023-03-06T15:47:00.000Z');
216
+ await connectionManager.query(
217
+ `
218
+ INSERT INTO [${connectionManager.schema}].test_data(
219
+ date_col,
220
+ datetime_col,
221
+ datetime2_col,
222
+ smalldatetime_col,
223
+ time_col
224
+ )
225
+ VALUES (
226
+ @date_col,
227
+ @datetime_col,
228
+ @datetime2_col,
229
+ @smalldatetime_col,
230
+ @time_col
231
+ )
232
+ `,
233
+ [
234
+ { name: 'date_col', type: sql.Date, value: testDate },
235
+ { name: 'datetime_col', type: sql.DateTime, value: testDate },
236
+ { name: 'datetime2_col', type: sql.DateTime2(6), value: testDate },
237
+ { name: 'smalldatetime_col', type: sql.SmallDateTime, value: testDate },
238
+ { name: 'time_col', type: sql.Time(6), value: testDate }
239
+ ]
240
+ );
241
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
242
+
243
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
244
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
245
+ const expectedResult = {
246
+ date_col: '2023-03-06',
247
+ datetime_col: '2023-03-06T15:47:00.000Z',
248
+ datetime2_col: '2023-03-06T15:47:00.000Z',
249
+ smalldatetime_col: '2023-03-06T15:47:00.000Z',
250
+ time_col: '15:47:00.000'
251
+ };
252
+
253
+ expect(databaseRows[0]).toMatchObject(expectedResult);
254
+ expect(replicatedRows[0]).toMatchObject(expectedResult);
255
+ });
256
+
257
+ test('Date types edge cases mappings', async () => {
258
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
259
+
260
+ await connectionManager.query(`
261
+ INSERT INTO [${connectionManager.schema}].test_data(datetime2_col)
262
+ VALUES ('0001-01-01 00:00:00.000')
263
+ `);
264
+ await connectionManager.query(`
265
+ INSERT INTO [${connectionManager.schema}].test_data(datetime2_col)
266
+ VALUES ('9999-12-31 23:59:59.999')
267
+ `);
268
+ await connectionManager.query(`
269
+ INSERT INTO [${connectionManager.schema}].test_data(datetime_col)
270
+ VALUES ('1753-01-01 00:00:00')
271
+ `);
272
+ await connectionManager.query(`
273
+ INSERT INTO [${connectionManager.schema}].test_data(datetime_col)
274
+ VALUES ('9999-12-31 23:59:59.997')
275
+ `);
276
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
277
+
278
+ const expectedResults = [
279
+ { datetime2_col: '0001-01-01T00:00:00.000Z' },
280
+ { datetime2_col: '9999-12-31T23:59:59.999Z' },
281
+ { datetime_col: '1753-01-01T00:00:00.000Z' },
282
+ { datetime_col: '9999-12-31T23:59:59.997Z' }
283
+ ];
284
+
285
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
286
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
287
+
288
+ for (let i = 0; i < expectedResults.length; i++) {
289
+ expect(databaseRows[i]).toMatchObject(expectedResults[i]);
290
+ expect(replicatedRows[i]).toMatchObject(expectedResults[i]);
291
+ }
292
+ });
293
+
294
+ test('DateTimeOffset type mapping', async () => {
295
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
296
+ // DateTimeOffset preserves timezone information
297
+ await connectionManager.query(`
298
+ INSERT INTO [${connectionManager.schema}].test_data(datetimeoffset_col)
299
+ VALUES ('2023-03-06 15:47:00.000 +05:00')
300
+ `);
301
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
302
+
303
+ const expectedResult = {
304
+ datetimeoffset_col: '2023-03-06T10:47:00.000Z' // Converted to UTC
305
+ };
306
+
307
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
308
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
309
+
310
+ // Note: The driver converts DateTimeOffset to Date, which incorporates the timezone offset which is then represented in UTC.
311
+ expect(databaseRows[0]).toMatchObject(expectedResult);
312
+ expect(replicatedRows[0]).toMatchObject(expectedResult);
313
+ });
314
+
315
+ test('UniqueIdentifier type mapping', async () => {
316
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
317
+
318
+ const testGuid = createUpperCaseUUID();
319
+ await connectionManager.query(
320
+ `
321
+ INSERT INTO [${connectionManager.schema}].test_data(uniqueidentifier_col)
322
+ VALUES (@guid)
323
+ `,
324
+ [{ name: 'guid', type: sql.UniqueIdentifier, value: testGuid }]
325
+ );
326
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
327
+
328
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
329
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
330
+
331
+ // GUIDs are returned as strings
332
+ expect(databaseRows[0].uniqueidentifier_col).toBe(testGuid);
333
+ expect(replicatedRows[0].uniqueidentifier_col).toBe(testGuid);
334
+ });
335
+
336
+ test('JSON type mapping', async () => {
337
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
338
+ const expectedJSON = { name: 'John Doe', age: 30, married: true };
339
+ await connectionManager.query(
340
+ `
341
+ INSERT INTO [${connectionManager.schema}].test_data(json_col)
342
+ VALUES (@json)
343
+ `,
344
+ [{ name: 'json', type: sql.NVarChar(sql.MAX), value: JSON.stringify(expectedJSON) }]
345
+ );
346
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
347
+
348
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
349
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
350
+
351
+ const actualDBJSONValue = JSON.parse(databaseRows[0].json_col as string);
352
+ const actualReplicatedJSONValue = JSON.parse(replicatedRows[0].json_col as string);
353
+ expect(actualDBJSONValue).toEqual(expectedJSON);
354
+ expect(actualReplicatedJSONValue).toEqual(expectedJSON);
355
+ });
356
+
357
+ test('XML type mapping', async () => {
358
+ const beforeLSN = await getLatestReplicatedLSN(connectionManager);
359
+ const xmlData = '<root><item>value</item></root>';
360
+ await connectionManager.query(
361
+ `
362
+ INSERT INTO [${connectionManager.schema}].test_data(xml_col)
363
+ VALUES (@xml)
364
+ `,
365
+ [{ name: 'xml', type: sql.Xml, value: xmlData }]
366
+ );
367
+ await waitForPendingCDCChanges(beforeLSN, connectionManager);
368
+
369
+ const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
370
+ const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
371
+
372
+ expect(databaseRows[0].xml_col).toBe(xmlData);
373
+ expect(replicatedRows[0].xml_col).toBe(xmlData);
374
+ });
375
+
376
+ // TODO: Update test when properly converting spatial types
377
+ // test('Spatial types mappings', async () => {
378
+ // const beforeLSN = await getLatestReplicatedLSN(connectionManager);
379
+ // // Geometry and Geography types are stored as binary/WKT strings
380
+ // await connectionManager.query(`
381
+ // INSERT INTO [${connectionManager.schema}].test_data(geometry_col, geography_col)
382
+ // VALUES (
383
+ // geometry::STGeomFromText('POINT(1 2)', 0),
384
+ // geography::STGeomFromText('POINT(1 2)', 4326)
385
+ // )
386
+ // `);
387
+ // await waitForPendingCDCChanges(beforeLSN, connectionManager);
388
+ //
389
+ // const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
390
+ // const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
391
+ //
392
+ // // The driver currently returns spatial types as non standard objects. We just convert them to JSON strings for now
393
+ // expect(databaseRows[0].geometry_col).toBeDefined();
394
+ // expect(databaseRows[0].geography_col).toBeDefined();
395
+ // expect(replicatedRows[0].geometry_col).toBeDefined();
396
+ // expect(replicatedRows[0].geography_col).toBeDefined();
397
+ // });
398
+
399
+ // TODO: Enable when HierarchyID type is properly supported
400
+ // test('HierarchyID type mapping', async () => {
401
+ // const hierarchyid = '/1/';
402
+ // const beforeLSN = await getLatestReplicatedLSN(connectionManager);
403
+ // await connectionManager.query(`
404
+ // INSERT INTO [${connectionManager.schema}].test_data(hierarchyid_col)
405
+ // VALUES (@hierarchyid)
406
+ // `,
407
+ // [{ name: 'hierarchyid', type: sql.VarChar, value: hierarchyid }]
408
+ // );
409
+ // await waitForPendingCDCChanges(beforeLSN, connectionManager);
410
+ //
411
+ // const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
412
+ // const replicatedRows = await getReplicatedRows(connectionManager, 'test_data');
413
+ //
414
+ // const expectedBinary = new Uint8Array(Buffer.from(hierarchyid));
415
+ //
416
+ // expect(databaseRows[0].hierarchyid_col).toEqual(expectedBinary);
417
+ // expect(replicatedRows[0].hierarchyid_col).toEqual(expectedBinary);
418
+ // });
419
+ });
420
+
421
+ async function getDatabaseRows(
422
+ connectionManager: MSSQLConnectionManager,
423
+ tableName: string
424
+ ): Promise<SqliteInputRow[]> {
425
+ const { recordset: rows } = await connectionManager.query(
426
+ `SELECT * FROM ${toQualifiedTableName(connectionManager.schema, tableName)}`
427
+ );
428
+ return rows.map((row) => {
429
+ const converted = toSqliteInputRow(row, rows.columns);
430
+ // Exclude id column from results
431
+ const { id, ...rest } = converted;
432
+ return rest;
433
+ });
434
+ }
435
+
436
+ /**
437
+ * Return all the updates from the CDC stream for the table.
438
+ */
439
+ async function getReplicatedRows(
440
+ connectionManager: MSSQLConnectionManager,
441
+ tableName: string
442
+ ): Promise<SqliteInputRow[]> {
443
+ const endLSN = await getLatestReplicatedLSN(connectionManager);
444
+
445
+ const captureInstance = await getCaptureInstance({
446
+ connectionManager,
447
+ schema: connectionManager.schema,
448
+ tableName
449
+ });
450
+ if (!captureInstance) {
451
+ throw new Error(`No CDC capture instance found for table ${tableName}`);
452
+ }
453
+
454
+ const startLSN = await getMinLSN(connectionManager, captureInstance.name);
455
+ // Query CDC changes
456
+ const { recordset: results } = await connectionManager.query(
457
+ `
458
+ SELECT * FROM ${captureInstance.schema}.fn_cdc_get_all_changes_${captureInstance.name}(@from_lsn, @to_lsn, 'all update old') ORDER BY __$start_lsn, __$seqval
459
+ `,
460
+ [
461
+ { name: 'from_lsn', type: sql.VarBinary, value: startLSN.toBinary() },
462
+ { name: 'to_lsn', type: sql.VarBinary, value: endLSN.toBinary() }
463
+ ]
464
+ );
465
+
466
+ return results
467
+ .filter((row) => row.__$operation === 2) // Only INSERT operations
468
+ .map((row) => {
469
+ const converted = CDCToSqliteRow({ row, columns: results.columns });
470
+ // Exclude id column from results
471
+ const { id, ...rest } = converted;
472
+ return rest;
473
+ });
474
+ }
@@ -0,0 +1,12 @@
1
+ import { container } from '@powersync/lib-services-framework';
2
+ import { METRICS_HELPER } from '@powersync/service-core-tests';
3
+ import { beforeAll, beforeEach } from 'vitest';
4
+
5
+ beforeAll(async () => {
6
+ // Executes for every test file
7
+ container.registerDefaults();
8
+ });
9
+
10
+ beforeEach(async () => {
11
+ METRICS_HELPER.resetMetrics();
12
+ });