@powersync/service-module-mongodb 0.0.0-dev-20241001150444
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/LICENSE +67 -0
- package/README.md +3 -0
- package/dist/api/MongoRouteAPIAdapter.d.ts +22 -0
- package/dist/api/MongoRouteAPIAdapter.js +64 -0
- package/dist/api/MongoRouteAPIAdapter.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +4 -0
- package/dist/index.js.map +1 -0
- package/dist/module/MongoModule.d.ts +13 -0
- package/dist/module/MongoModule.js +46 -0
- package/dist/module/MongoModule.js.map +1 -0
- package/dist/replication/ChangeStream.d.ts +53 -0
- package/dist/replication/ChangeStream.js +389 -0
- package/dist/replication/ChangeStream.js.map +1 -0
- package/dist/replication/ChangeStreamReplicationJob.d.ts +16 -0
- package/dist/replication/ChangeStreamReplicationJob.js +90 -0
- package/dist/replication/ChangeStreamReplicationJob.js.map +1 -0
- package/dist/replication/ChangeStreamReplicator.d.ts +13 -0
- package/dist/replication/ChangeStreamReplicator.js +26 -0
- package/dist/replication/ChangeStreamReplicator.js.map +1 -0
- package/dist/replication/ConnectionManagerFactory.d.ts +9 -0
- package/dist/replication/ConnectionManagerFactory.js +21 -0
- package/dist/replication/ConnectionManagerFactory.js.map +1 -0
- package/dist/replication/MongoErrorRateLimiter.d.ts +11 -0
- package/dist/replication/MongoErrorRateLimiter.js +44 -0
- package/dist/replication/MongoErrorRateLimiter.js.map +1 -0
- package/dist/replication/MongoManager.d.ts +14 -0
- package/dist/replication/MongoManager.js +36 -0
- package/dist/replication/MongoManager.js.map +1 -0
- package/dist/replication/MongoRelation.d.ts +9 -0
- package/dist/replication/MongoRelation.js +174 -0
- package/dist/replication/MongoRelation.js.map +1 -0
- package/dist/replication/replication-index.d.ts +4 -0
- package/dist/replication/replication-index.js +5 -0
- package/dist/replication/replication-index.js.map +1 -0
- package/dist/types/types.d.ts +51 -0
- package/dist/types/types.js +37 -0
- package/dist/types/types.js.map +1 -0
- package/package.json +47 -0
- package/src/api/MongoRouteAPIAdapter.ts +86 -0
- package/src/index.ts +5 -0
- package/src/module/MongoModule.ts +52 -0
- package/src/replication/ChangeStream.ts +503 -0
- package/src/replication/ChangeStreamReplicationJob.ts +104 -0
- package/src/replication/ChangeStreamReplicator.ts +36 -0
- package/src/replication/ConnectionManagerFactory.ts +27 -0
- package/src/replication/MongoErrorRateLimiter.ts +45 -0
- package/src/replication/MongoManager.ts +47 -0
- package/src/replication/MongoRelation.ts +156 -0
- package/src/replication/replication-index.ts +4 -0
- package/src/types/types.ts +65 -0
- package/test/src/change_stream.test.ts +306 -0
- package/test/src/change_stream_utils.ts +148 -0
- package/test/src/env.ts +7 -0
- package/test/src/mongo_test.test.ts +219 -0
- package/test/src/setup.ts +7 -0
- package/test/src/util.ts +52 -0
- package/test/tsconfig.json +28 -0
- package/tsconfig.json +28 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/vitest.config.ts +9 -0
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import { ActiveCheckpoint, BucketStorageFactory, OpId, SyncRulesBucketStorage } from '@powersync/service-core';
|
|
2
|
+
|
|
3
|
+
import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
|
|
4
|
+
import { fromAsync } from '@core-tests/stream_utils.js';
|
|
5
|
+
import { MongoManager } from '@module/replication/MongoManager.js';
|
|
6
|
+
import { ChangeStream, ChangeStreamOptions } from '@module/replication/ChangeStream.js';
|
|
7
|
+
import * as mongo from 'mongodb';
|
|
8
|
+
import { createCheckpoint } from '@module/replication/MongoRelation.js';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Tests operating on the wal stream need to configure the stream and manage asynchronous
|
|
12
|
+
* replication, which gets a little tricky.
|
|
13
|
+
*
|
|
14
|
+
* This wraps a test in a function that configures all the context, and tears it down afterwards.
|
|
15
|
+
*/
|
|
16
|
+
export function walStreamTest(
|
|
17
|
+
factory: () => Promise<BucketStorageFactory>,
|
|
18
|
+
test: (context: ChangeStreamTestContext) => Promise<void>
|
|
19
|
+
): () => Promise<void> {
|
|
20
|
+
return async () => {
|
|
21
|
+
const f = await factory();
|
|
22
|
+
const connectionManager = new MongoManager(TEST_CONNECTION_OPTIONS);
|
|
23
|
+
|
|
24
|
+
await clearTestDb(connectionManager.db);
|
|
25
|
+
const context = new ChangeStreamTestContext(f, connectionManager);
|
|
26
|
+
try {
|
|
27
|
+
await test(context);
|
|
28
|
+
} finally {
|
|
29
|
+
await context.dispose();
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export class ChangeStreamTestContext {
|
|
35
|
+
private _walStream?: ChangeStream;
|
|
36
|
+
private abortController = new AbortController();
|
|
37
|
+
private streamPromise?: Promise<void>;
|
|
38
|
+
public storage?: SyncRulesBucketStorage;
|
|
39
|
+
|
|
40
|
+
constructor(public factory: BucketStorageFactory, public connectionManager: MongoManager) {}
|
|
41
|
+
|
|
42
|
+
async dispose() {
|
|
43
|
+
this.abortController.abort();
|
|
44
|
+
await this.streamPromise?.catch((e) => e);
|
|
45
|
+
await this.connectionManager.destroy();
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
get client() {
|
|
49
|
+
return this.connectionManager.client;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
get db() {
|
|
53
|
+
return this.connectionManager.db;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
get connectionTag() {
|
|
57
|
+
return this.connectionManager.connectionTag;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async updateSyncRules(content: string) {
|
|
61
|
+
const syncRules = await this.factory.updateSyncRules({ content: content });
|
|
62
|
+
this.storage = this.factory.getInstance(syncRules);
|
|
63
|
+
return this.storage!;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
get walStream() {
|
|
67
|
+
if (this.storage == null) {
|
|
68
|
+
throw new Error('updateSyncRules() first');
|
|
69
|
+
}
|
|
70
|
+
if (this._walStream) {
|
|
71
|
+
return this._walStream;
|
|
72
|
+
}
|
|
73
|
+
const options: ChangeStreamOptions = {
|
|
74
|
+
storage: this.storage,
|
|
75
|
+
connections: this.connectionManager,
|
|
76
|
+
abort_signal: this.abortController.signal
|
|
77
|
+
};
|
|
78
|
+
this._walStream = new ChangeStream(options);
|
|
79
|
+
return this._walStream!;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
async replicateSnapshot() {
|
|
83
|
+
await this.walStream.initReplication();
|
|
84
|
+
await this.storage!.autoActivate();
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
startStreaming() {
|
|
88
|
+
this.streamPromise = this.walStream.streamChanges();
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
async getCheckpoint(options?: { timeout?: number }) {
|
|
92
|
+
let checkpoint = await Promise.race([
|
|
93
|
+
getClientCheckpoint(this.client, this.db, this.factory, { timeout: options?.timeout ?? 15_000 }),
|
|
94
|
+
this.streamPromise
|
|
95
|
+
]);
|
|
96
|
+
if (typeof checkpoint == undefined) {
|
|
97
|
+
// This indicates an issue with the test setup - streamingPromise completed instead
|
|
98
|
+
// of getClientCheckpoint()
|
|
99
|
+
throw new Error('Test failure - streamingPromise completed');
|
|
100
|
+
}
|
|
101
|
+
return checkpoint as string;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
async getBucketsDataBatch(buckets: Record<string, string>, options?: { timeout?: number }) {
|
|
105
|
+
let checkpoint = await this.getCheckpoint(options);
|
|
106
|
+
const map = new Map<string, string>(Object.entries(buckets));
|
|
107
|
+
return fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
async getBucketData(bucket: string, start?: string, options?: { timeout?: number }) {
|
|
111
|
+
start ??= '0';
|
|
112
|
+
let checkpoint = await this.getCheckpoint(options);
|
|
113
|
+
const map = new Map<string, string>([[bucket, start]]);
|
|
114
|
+
const batch = this.storage!.getBucketDataBatch(checkpoint, map);
|
|
115
|
+
const batches = await fromAsync(batch);
|
|
116
|
+
return batches[0]?.batch.data ?? [];
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
export async function getClientCheckpoint(
|
|
121
|
+
client: mongo.MongoClient,
|
|
122
|
+
db: mongo.Db,
|
|
123
|
+
bucketStorage: BucketStorageFactory,
|
|
124
|
+
options?: { timeout?: number }
|
|
125
|
+
): Promise<OpId> {
|
|
126
|
+
const start = Date.now();
|
|
127
|
+
const lsn = await createCheckpoint(client, db);
|
|
128
|
+
// This old API needs a persisted checkpoint id.
|
|
129
|
+
// Since we don't use LSNs anymore, the only way to get that is to wait.
|
|
130
|
+
|
|
131
|
+
const timeout = options?.timeout ?? 50_000;
|
|
132
|
+
let lastCp: ActiveCheckpoint | null = null;
|
|
133
|
+
|
|
134
|
+
while (Date.now() - start < timeout) {
|
|
135
|
+
const cp = await bucketStorage.getActiveCheckpoint();
|
|
136
|
+
lastCp = cp;
|
|
137
|
+
if (!cp.hasSyncRules()) {
|
|
138
|
+
throw new Error('No sync rules available');
|
|
139
|
+
}
|
|
140
|
+
if (cp.lsn && cp.lsn >= lsn) {
|
|
141
|
+
return cp.checkpoint;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
await new Promise((resolve) => setTimeout(resolve, 30));
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
throw new Error(`Timeout while waiting for checkpoint ${lsn}. Last checkpoint: ${lastCp?.lsn}`);
|
|
148
|
+
}
|
package/test/src/env.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { utils } from '@powersync/lib-services-framework';
|
|
2
|
+
|
|
3
|
+
export const env = utils.collectEnvironmentVariables({
|
|
4
|
+
MONGO_TEST_DATA_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test_data'),
|
|
5
|
+
CI: utils.type.boolean.default('false'),
|
|
6
|
+
SLOW_TESTS: utils.type.boolean.default('false')
|
|
7
|
+
});
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import { ChangeStream } from '@module/replication/ChangeStream.js';
|
|
2
|
+
import * as mongo from 'mongodb';
|
|
3
|
+
import { describe, expect, test } from 'vitest';
|
|
4
|
+
import { clearTestDb, connectMongoData } from './util.js';
|
|
5
|
+
import { SqliteRow } from '@powersync/service-sync-rules';
|
|
6
|
+
import { constructAfterRecord } from '@module/replication/MongoRelation.js';
|
|
7
|
+
|
|
8
|
+
describe('mongo data types', () => {
|
|
9
|
+
async function setupTable(db: mongo.Db) {
|
|
10
|
+
await clearTestDb(db);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
async function insert(collection: mongo.Collection) {
|
|
14
|
+
await collection.insertMany([
|
|
15
|
+
{
|
|
16
|
+
_id: 1 as any,
|
|
17
|
+
null: null,
|
|
18
|
+
text: 'text',
|
|
19
|
+
uuid: new mongo.UUID('baeb2514-4c57-436d-b3cc-c1256211656d'),
|
|
20
|
+
bool: true,
|
|
21
|
+
bytea: Buffer.from('test'),
|
|
22
|
+
int2: 1000,
|
|
23
|
+
int4: 1000000,
|
|
24
|
+
int8: 9007199254740993n,
|
|
25
|
+
float: 3.14
|
|
26
|
+
},
|
|
27
|
+
{ _id: 2 as any, nested: { test: 'thing' } },
|
|
28
|
+
{ _id: 3 as any, date: new Date('2023-03-06 15:47+02') },
|
|
29
|
+
{
|
|
30
|
+
_id: 4 as any,
|
|
31
|
+
timestamp: mongo.Timestamp.fromBits(123, 456),
|
|
32
|
+
objectId: mongo.ObjectId.createFromHexString('66e834cc91d805df11fa0ecb')
|
|
33
|
+
}
|
|
34
|
+
]);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async function insertNested(collection: mongo.Collection) {
|
|
38
|
+
await collection.insertMany([
|
|
39
|
+
{
|
|
40
|
+
_id: 1 as any,
|
|
41
|
+
null: [null],
|
|
42
|
+
text: ['text'],
|
|
43
|
+
uuid: [new mongo.UUID('baeb2514-4c57-436d-b3cc-c1256211656d')],
|
|
44
|
+
bool: [true],
|
|
45
|
+
bytea: [Buffer.from('test')],
|
|
46
|
+
int2: [1000],
|
|
47
|
+
int4: [1000000],
|
|
48
|
+
int8: [9007199254740993n],
|
|
49
|
+
float: [3.14]
|
|
50
|
+
},
|
|
51
|
+
{ _id: 2 as any, nested: [{ test: 'thing' }] },
|
|
52
|
+
{ _id: 3 as any, date: [new Date('2023-03-06 15:47+02')] },
|
|
53
|
+
{
|
|
54
|
+
_id: 10 as any,
|
|
55
|
+
timestamp: [mongo.Timestamp.fromBits(123, 456)],
|
|
56
|
+
objectId: [mongo.ObjectId.createFromHexString('66e834cc91d805df11fa0ecb')]
|
|
57
|
+
}
|
|
58
|
+
]);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
function checkResults(transformed: Record<string, any>[]) {
|
|
62
|
+
expect(transformed[0]).toMatchObject({
|
|
63
|
+
_id: 1n,
|
|
64
|
+
text: 'text',
|
|
65
|
+
uuid: 'baeb2514-4c57-436d-b3cc-c1256211656d',
|
|
66
|
+
bool: 1n,
|
|
67
|
+
bytea: new Uint8Array([116, 101, 115, 116]),
|
|
68
|
+
int2: 1000n,
|
|
69
|
+
int4: 1000000n,
|
|
70
|
+
int8: 9007199254740993n,
|
|
71
|
+
float: 3.14,
|
|
72
|
+
null: null
|
|
73
|
+
});
|
|
74
|
+
expect(transformed[1]).toMatchObject({
|
|
75
|
+
_id: 2n,
|
|
76
|
+
nested: '{"test":"thing"}'
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
expect(transformed[2]).toMatchObject({
|
|
80
|
+
_id: 3n,
|
|
81
|
+
date: '2023-03-06 13:47:00.000Z'
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
expect(transformed[3]).toMatchObject({
|
|
85
|
+
_id: 4n,
|
|
86
|
+
objectId: '66e834cc91d805df11fa0ecb',
|
|
87
|
+
timestamp: 1958505087099n
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function checkResultsNested(transformed: Record<string, any>[]) {
|
|
92
|
+
expect(transformed[0]).toMatchObject({
|
|
93
|
+
_id: 1n,
|
|
94
|
+
text: `["text"]`,
|
|
95
|
+
uuid: '["baeb2514-4c57-436d-b3cc-c1256211656d"]',
|
|
96
|
+
bool: '[1]',
|
|
97
|
+
bytea: '[null]',
|
|
98
|
+
int2: '[1000]',
|
|
99
|
+
int4: '[1000000]',
|
|
100
|
+
int8: `[9007199254740993]`,
|
|
101
|
+
float: '[3.14]',
|
|
102
|
+
null: '[null]'
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
// Note: Depending on to what extent we use the original postgres value, the whitespace may change, and order may change.
|
|
106
|
+
// We do expect that decimals and big numbers are preserved.
|
|
107
|
+
expect(transformed[1]).toMatchObject({
|
|
108
|
+
_id: 2n,
|
|
109
|
+
nested: '[{"test":"thing"}]'
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
expect(transformed[2]).toMatchObject({
|
|
113
|
+
_id: 3n,
|
|
114
|
+
date: '["2023-03-06 13:47:00.000Z"]'
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
expect(transformed[3]).toMatchObject({
|
|
118
|
+
_id: 10n,
|
|
119
|
+
objectId: '["66e834cc91d805df11fa0ecb"]',
|
|
120
|
+
timestamp: '[1958505087099]'
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
test('test direct queries', async () => {
|
|
125
|
+
const { db, client } = await connectMongoData();
|
|
126
|
+
const collection = db.collection('test_data');
|
|
127
|
+
try {
|
|
128
|
+
await setupTable(db);
|
|
129
|
+
|
|
130
|
+
await insert(collection);
|
|
131
|
+
|
|
132
|
+
const transformed = [...ChangeStream.getQueryData(await db.collection('test_data').find().toArray())];
|
|
133
|
+
|
|
134
|
+
checkResults(transformed);
|
|
135
|
+
} finally {
|
|
136
|
+
await client.close();
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
test('test direct queries - arrays', async () => {
|
|
141
|
+
const { db, client } = await connectMongoData();
|
|
142
|
+
const collection = db.collection('test_data_arrays');
|
|
143
|
+
try {
|
|
144
|
+
await setupTable(db);
|
|
145
|
+
|
|
146
|
+
await insertNested(collection);
|
|
147
|
+
|
|
148
|
+
const transformed = [...ChangeStream.getQueryData(await db.collection('test_data_arrays').find().toArray())];
|
|
149
|
+
|
|
150
|
+
checkResultsNested(transformed);
|
|
151
|
+
} finally {
|
|
152
|
+
await client.close();
|
|
153
|
+
}
|
|
154
|
+
});
|
|
155
|
+
|
|
156
|
+
test('test replication', async () => {
|
|
157
|
+
// With MongoDB, replication uses the exact same document format
|
|
158
|
+
// as normal queries. We test it anyway.
|
|
159
|
+
const { db, client } = await connectMongoData();
|
|
160
|
+
const collection = db.collection('test_data');
|
|
161
|
+
try {
|
|
162
|
+
await setupTable(db);
|
|
163
|
+
|
|
164
|
+
const stream = db.watch([], {
|
|
165
|
+
useBigInt64: true,
|
|
166
|
+
maxAwaitTimeMS: 50,
|
|
167
|
+
fullDocument: 'updateLookup'
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
await stream.tryNext();
|
|
171
|
+
|
|
172
|
+
await insert(collection);
|
|
173
|
+
|
|
174
|
+
const transformed = await getReplicationTx(stream, 4);
|
|
175
|
+
|
|
176
|
+
checkResults(transformed);
|
|
177
|
+
} finally {
|
|
178
|
+
await client.close();
|
|
179
|
+
}
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
test('test replication - arrays', async () => {
|
|
183
|
+
const { db, client } = await connectMongoData();
|
|
184
|
+
const collection = db.collection('test_data');
|
|
185
|
+
try {
|
|
186
|
+
await setupTable(db);
|
|
187
|
+
|
|
188
|
+
const stream = db.watch([], {
|
|
189
|
+
useBigInt64: true,
|
|
190
|
+
maxAwaitTimeMS: 50,
|
|
191
|
+
fullDocument: 'updateLookup'
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
await stream.tryNext();
|
|
195
|
+
|
|
196
|
+
await insertNested(collection);
|
|
197
|
+
|
|
198
|
+
const transformed = await getReplicationTx(stream, 4);
|
|
199
|
+
|
|
200
|
+
checkResultsNested(transformed);
|
|
201
|
+
} finally {
|
|
202
|
+
await client.close();
|
|
203
|
+
}
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Return all the inserts from the first transaction in the replication stream.
|
|
209
|
+
*/
|
|
210
|
+
async function getReplicationTx(replicationStream: mongo.ChangeStream, count: number) {
|
|
211
|
+
let transformed: SqliteRow[] = [];
|
|
212
|
+
for await (const doc of replicationStream) {
|
|
213
|
+
transformed.push(constructAfterRecord((doc as any).fullDocument));
|
|
214
|
+
if (transformed.length == count) {
|
|
215
|
+
break;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
return transformed;
|
|
219
|
+
}
|
package/test/src/util.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import * as types from '@module/types/types.js';
|
|
2
|
+
import { BucketStorageFactory, Metrics, MongoBucketStorage, OpId } from '@powersync/service-core';
|
|
3
|
+
|
|
4
|
+
import { env } from './env.js';
|
|
5
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
6
|
+
import { connectMongo } from '@core-tests/util.js';
|
|
7
|
+
import * as mongo from 'mongodb';
|
|
8
|
+
|
|
9
|
+
// The metrics need to be initialized before they can be used
|
|
10
|
+
await Metrics.initialise({
|
|
11
|
+
disable_telemetry_sharing: true,
|
|
12
|
+
powersync_instance_id: 'test',
|
|
13
|
+
internal_metrics_endpoint: 'unused.for.tests.com'
|
|
14
|
+
});
|
|
15
|
+
Metrics.getInstance().resetCounters();
|
|
16
|
+
|
|
17
|
+
export const TEST_URI = env.MONGO_TEST_DATA_URL;
|
|
18
|
+
|
|
19
|
+
export const TEST_CONNECTION_OPTIONS = types.normalizeConnectionConfig({
|
|
20
|
+
type: 'mongodb',
|
|
21
|
+
uri: TEST_URI
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
export type StorageFactory = () => Promise<BucketStorageFactory>;
|
|
25
|
+
|
|
26
|
+
export const INITIALIZED_MONGO_STORAGE_FACTORY: StorageFactory = async () => {
|
|
27
|
+
const db = await connectMongo();
|
|
28
|
+
|
|
29
|
+
// None of the PG tests insert data into this collection, so it was never created
|
|
30
|
+
if (!(await db.db.listCollections({ name: db.bucket_parameters.collectionName }).hasNext())) {
|
|
31
|
+
await db.db.createCollection('bucket_parameters');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
await db.clear();
|
|
35
|
+
|
|
36
|
+
return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
export async function clearTestDb(db: mongo.Db) {
|
|
40
|
+
await db.dropDatabase();
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export async function connectMongoData() {
|
|
44
|
+
const client = new mongo.MongoClient(env.MONGO_TEST_DATA_URL, {
|
|
45
|
+
connectTimeoutMS: env.CI ? 15_000 : 5_000,
|
|
46
|
+
socketTimeoutMS: env.CI ? 15_000 : 5_000,
|
|
47
|
+
serverSelectionTimeoutMS: env.CI ? 15_000 : 2_500,
|
|
48
|
+
useBigInt64: true
|
|
49
|
+
});
|
|
50
|
+
const dbname = new URL(env.MONGO_TEST_DATA_URL).pathname.substring(1);
|
|
51
|
+
return { client, db: client.db(dbname) };
|
|
52
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../../tsconfig.base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"rootDir": "src",
|
|
5
|
+
"baseUrl": "./",
|
|
6
|
+
"noEmit": true,
|
|
7
|
+
"esModuleInterop": true,
|
|
8
|
+
"skipLibCheck": true,
|
|
9
|
+
"sourceMap": true,
|
|
10
|
+
"paths": {
|
|
11
|
+
"@/*": ["../../../packages/service-core/src/*"],
|
|
12
|
+
"@module/*": ["../src/*"],
|
|
13
|
+
"@core-tests/*": ["../../../packages/service-core/test/src/*"]
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"include": ["src"],
|
|
17
|
+
"references": [
|
|
18
|
+
{
|
|
19
|
+
"path": "../"
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"path": "../../../packages/service-core/test"
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"path": "../../../packages/service-core/"
|
|
26
|
+
}
|
|
27
|
+
]
|
|
28
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../tsconfig.base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"rootDir": "src",
|
|
5
|
+
"outDir": "dist",
|
|
6
|
+
"esModuleInterop": true,
|
|
7
|
+
"skipLibCheck": true,
|
|
8
|
+
"sourceMap": true
|
|
9
|
+
},
|
|
10
|
+
"include": ["src"],
|
|
11
|
+
"references": [
|
|
12
|
+
{
|
|
13
|
+
"path": "../../packages/types"
|
|
14
|
+
},
|
|
15
|
+
{
|
|
16
|
+
"path": "../../packages/jsonbig"
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
"path": "../../packages/sync-rules"
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"path": "../../packages/service-core"
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"path": "../../libs/lib-services"
|
|
26
|
+
}
|
|
27
|
+
]
|
|
28
|
+
}
|