@powersync/service-module-mongodb 0.1.8 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,13 @@
1
- import { putOp, removeOp } from '@core-tests/stream_utils.js';
2
- import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
3
- import { BucketStorageFactory } from '@powersync/service-core';
1
+ import { test_utils } from '@powersync/service-core-tests';
2
+
3
+ import { PostImagesOption } from '@module/types/types.js';
4
+ import { storage } from '@powersync/service-core';
4
5
  import * as crypto from 'crypto';
5
6
  import * as mongo from 'mongodb';
6
7
  import { setTimeout } from 'node:timers/promises';
7
8
  import { describe, expect, test, vi } from 'vitest';
8
9
  import { ChangeStreamTestContext } from './change_stream_utils.js';
9
- import { PostImagesOption } from '@module/types/types.js';
10
-
11
- type StorageFactory = () => Promise<BucketStorageFactory>;
10
+ import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
12
11
 
13
12
  const BASIC_SYNC_RULES = `
14
13
  bucket_definitions:
@@ -18,10 +17,10 @@ bucket_definitions:
18
17
  `;
19
18
 
20
19
  describe('change stream - mongodb', { timeout: 20_000 }, function () {
21
- defineChangeStreamTests(MONGO_STORAGE_FACTORY);
20
+ defineChangeStreamTests(INITIALIZED_MONGO_STORAGE_FACTORY);
22
21
  });
23
22
 
24
- function defineChangeStreamTests(factory: StorageFactory) {
23
+ function defineChangeStreamTests(factory: storage.TestStorageFactory) {
25
24
  test('replicating basic values', async () => {
26
25
  await using context = await ChangeStreamTestContext.open(factory);
27
26
  const { db } = context;
@@ -52,10 +51,10 @@ bucket_definitions:
52
51
  const data = await context.getBucketData('global[]');
53
52
 
54
53
  expect(data).toMatchObject([
55
- putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
56
- putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n }),
57
- putOp('test_data', { id: test_id.toHexString(), description: 'test3' }),
58
- removeOp('test_data', test_id.toHexString())
54
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
55
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n }),
56
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test3' }),
57
+ test_utils.removeOp('test_data', test_id.toHexString())
59
58
  ]);
60
59
  });
61
60
 
@@ -86,8 +85,8 @@ bucket_definitions:
86
85
  const data = await context.getBucketData('global[]');
87
86
 
88
87
  expect(data).toMatchObject([
89
- putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
90
- putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n })
88
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
89
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n })
91
90
  ]);
92
91
  });
93
92
 
@@ -125,11 +124,11 @@ bucket_definitions:
125
124
  const data = await context.getBucketData('global[]');
126
125
 
127
126
  expect(data).toMatchObject([
128
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
127
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
129
128
  // fullDocument is not available at the point this is replicated, resulting in it treated as a remove
130
- removeOp('test_data', test_id!.toHexString()),
131
- putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
132
- removeOp('test_data', test_id!.toHexString())
129
+ test_utils.removeOp('test_data', test_id!.toHexString()),
130
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
131
+ test_utils.removeOp('test_data', test_id!.toHexString())
133
132
  ]);
134
133
  });
135
134
 
@@ -171,11 +170,11 @@ bucket_definitions:
171
170
  const data = await context.getBucketData('global[]');
172
171
 
173
172
  expect(data).toMatchObject([
174
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
173
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
175
174
  // The postImage helps us get this data
176
- putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
177
- putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
178
- removeOp('test_data', test_id!.toHexString())
175
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
176
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
177
+ test_utils.removeOp('test_data', test_id!.toHexString())
179
178
  ]);
180
179
  });
181
180
 
@@ -216,11 +215,11 @@ bucket_definitions:
216
215
  const data = await context.getBucketData('global[]');
217
216
 
218
217
  expect(data).toMatchObject([
219
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
218
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
220
219
  // The postImage helps us get this data
221
- putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
222
- putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
223
- removeOp('test_data', test_id!.toHexString())
220
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
221
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
222
+ test_utils.removeOp('test_data', test_id!.toHexString())
224
223
  ]);
225
224
  });
226
225
 
@@ -244,7 +243,7 @@ bucket_definitions:
244
243
 
245
244
  const data = await context.getBucketData('global[]');
246
245
 
247
- expect(data).toMatchObject([putOp('test_DATA', { id: test_id, description: 'test1' })]);
246
+ expect(data).toMatchObject([test_utils.putOp('test_DATA', { id: test_id, description: 'test1' })]);
248
247
  });
249
248
 
250
249
  test('replicating large values', async () => {
@@ -270,10 +269,10 @@ bucket_definitions:
270
269
 
271
270
  const data = await context.getBucketData('global[]');
272
271
  expect(data.slice(0, 1)).toMatchObject([
273
- putOp('test_data', { id: test_id.toHexString(), name: 'test1', description: largeDescription })
272
+ test_utils.putOp('test_data', { id: test_id.toHexString(), name: 'test1', description: largeDescription })
274
273
  ]);
275
274
  expect(data.slice(1)).toMatchObject([
276
- putOp('test_data', { id: test_id.toHexString(), name: 'test2', description: largeDescription })
275
+ test_utils.putOp('test_data', { id: test_id.toHexString(), name: 'test2', description: largeDescription })
277
276
  ]);
278
277
  });
279
278
 
@@ -302,8 +301,8 @@ bucket_definitions:
302
301
  const data = await context.getBucketData('global[]');
303
302
 
304
303
  expect(data).toMatchObject([
305
- putOp('test_data', { id: test_id, description: 'test1' }),
306
- removeOp('test_data', test_id)
304
+ test_utils.putOp('test_data', { id: test_id, description: 'test1' }),
305
+ test_utils.removeOp('test_data', test_id)
307
306
  ]);
308
307
  });
309
308
 
@@ -330,9 +329,9 @@ bucket_definitions:
330
329
  const data = await context.getBucketData('global[]');
331
330
 
332
331
  expect(data).toMatchObject([
333
- putOp('test_data1', { id: test_id, description: 'test1' }),
334
- removeOp('test_data1', test_id),
335
- putOp('test_data2', { id: test_id, description: 'test1' })
332
+ test_utils.putOp('test_data1', { id: test_id, description: 'test1' }),
333
+ test_utils.removeOp('test_data1', test_id),
334
+ test_utils.putOp('test_data2', { id: test_id, description: 'test1' })
336
335
  ]);
337
336
  });
338
337
 
@@ -349,7 +348,7 @@ bucket_definitions:
349
348
  context.startStreaming();
350
349
 
351
350
  const data = await context.getBucketData('global[]');
352
- expect(data).toMatchObject([putOp('test_data', { id: test_id, description: 'test1' })]);
351
+ expect(data).toMatchObject([test_utils.putOp('test_data', { id: test_id, description: 'test1' })]);
353
352
  });
354
353
 
355
354
  test('large record', async () => {
@@ -446,8 +445,8 @@ bucket_definitions:
446
445
 
447
446
  const data = await context.getBucketData('global[]');
448
447
  expect(data).toMatchObject([
449
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1' }),
450
- putOp('test_data', { id: test_id!.toHexString(), description: 'test2' })
448
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1' }),
449
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test2' })
451
450
  ]);
452
451
  });
453
452
 
@@ -1,12 +1,12 @@
1
1
  import { ActiveCheckpoint, BucketStorageFactory, OpId, SyncRulesBucketStorage } from '@powersync/service-core';
2
2
 
3
- import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
4
- import { fromAsync } from '@core-tests/stream_utils.js';
5
- import { MongoManager } from '@module/replication/MongoManager.js';
6
3
  import { ChangeStream, ChangeStreamOptions } from '@module/replication/ChangeStream.js';
7
- import * as mongo from 'mongodb';
4
+ import { MongoManager } from '@module/replication/MongoManager.js';
8
5
  import { createCheckpoint } from '@module/replication/MongoRelation.js';
9
6
  import { NormalizedMongoConnectionConfig } from '@module/types/types.js';
7
+ import { test_utils } from '@powersync/service-core-tests';
8
+ import * as mongo from 'mongodb';
9
+ import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
10
10
 
11
11
  export class ChangeStreamTestContext {
12
12
  private _walStream?: ChangeStream;
@@ -102,7 +102,7 @@ export class ChangeStreamTestContext {
102
102
  async getBucketsDataBatch(buckets: Record<string, string>, options?: { timeout?: number }) {
103
103
  let checkpoint = await this.getCheckpoint(options);
104
104
  const map = new Map<string, string>(Object.entries(buckets));
105
- return fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
105
+ return test_utils.fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
106
106
  }
107
107
 
108
108
  async getBucketData(
@@ -117,7 +117,7 @@ export class ChangeStreamTestContext {
117
117
  limit: options?.limit,
118
118
  chunkLimitBytes: options?.chunkLimitBytes
119
119
  });
120
- const batches = await fromAsync(batch);
120
+ const batches = await test_utils.fromAsync(batch);
121
121
  return batches[0]?.batch.data ?? [];
122
122
  }
123
123
 
package/test/src/env.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { utils } from '@powersync/lib-services-framework';
2
2
 
3
3
  export const env = utils.collectEnvironmentVariables({
4
+ MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
4
5
  MONGO_TEST_DATA_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test_data'),
5
6
  CI: utils.type.boolean.default('false'),
6
7
  SLOW_TESTS: utils.type.boolean.default('false')
@@ -45,12 +45,52 @@ describe('mongo data types', () => {
45
45
  mongo.ObjectId.createFromHexString('66e834cc91d805df11fa0ecb'),
46
46
  'mydb',
47
47
  { foo: 'bar' }
48
- ),
49
- undefined: undefined
48
+ )
50
49
  }
51
50
  ]);
52
51
  }
53
52
 
53
+ async function insertUndefined(db: mongo.Db, collection: string, array?: boolean) {
54
+ // MongoDB has deprecated the `undefined` value, making it really
55
+ // difficult to insert one into the database.
56
+ // mapReduce is also deprecated, but it's one way to still generate
57
+ // the value.
58
+ const mapInput = db.collection('map_input');
59
+ await mapInput.insertOne({ test: 'test' });
60
+ const fin = array ? `return { result: [undefined] }` : `return { result: undefined }`;
61
+ await db.command({
62
+ mapReduce: 'map_input',
63
+ map: new mongo.Code(`function () {
64
+ // We only need to emit once for a single result:
65
+ emit(5, {});
66
+ }`),
67
+ reduce: new mongo.Code(`function (key, values) {
68
+ // Return an object whose property is explicitly set to undefined
69
+ return undefined;
70
+ }`),
71
+ finalize: new mongo.Code(`function (key, reducedVal) {
72
+ ${fin};
73
+ }`),
74
+ out: { merge: 'map_output' }
75
+ });
76
+
77
+ await db
78
+ .collection('map_output')
79
+ .aggregate([
80
+ { $set: { undefined: '$value.result' } },
81
+ { $project: { undefined: 1 } },
82
+ {
83
+ $merge: {
84
+ into: collection
85
+ }
86
+ }
87
+ ])
88
+ .toArray();
89
+
90
+ await mapInput.drop();
91
+ await db.collection('map_output').drop();
92
+ }
93
+
54
94
  async function insertNested(collection: mongo.Collection) {
55
95
  await collection.insertMany([
56
96
  {
@@ -118,9 +158,11 @@ describe('mongo data types', () => {
118
158
  js: '{"code":"testcode","scope":null}',
119
159
  js2: '{"code":"testcode","scope":{"foo":"bar"}}',
120
160
  pointer: '{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","fields":{}}',
121
- pointer2: '{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","db":"mydb","fields":{"foo":"bar"}}',
122
- undefined: null
161
+ pointer2: '{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","db":"mydb","fields":{"foo":"bar"}}'
123
162
  });
163
+
164
+ // This must specifically be null, and not undefined.
165
+ expect(transformed[4].undefined).toBeNull();
124
166
  }
125
167
 
126
168
  function checkResultsNested(transformed: Record<string, any>[]) {
@@ -158,20 +200,27 @@ describe('mongo data types', () => {
158
200
  js: '[{"code":"testcode","scope":null}]',
159
201
  pointer: '[{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","fields":{}}]',
160
202
  minKey: '[null]',
161
- maxKey: '[null]',
203
+ maxKey: '[null]'
204
+ });
205
+
206
+ expect(transformed[4]).toMatchObject({
162
207
  undefined: '[null]'
163
208
  });
164
209
  }
165
210
 
166
211
  test('test direct queries', async () => {
167
212
  const { db, client } = await connectMongoData();
213
+
168
214
  const collection = db.collection('test_data');
169
215
  try {
170
216
  await setupTable(db);
171
-
172
217
  await insert(collection);
218
+ await insertUndefined(db, 'test_data');
173
219
 
174
- const transformed = [...ChangeStream.getQueryData(await db.collection('test_data').find().toArray())];
220
+ const rawResults = await db.collection('test_data').find().toArray();
221
+ // It is tricky to save "undefined" with mongo, so we check that it succeeded.
222
+ expect(rawResults[4].undefined).toBeUndefined();
223
+ const transformed = [...ChangeStream.getQueryData(rawResults)];
175
224
 
176
225
  checkResults(transformed);
177
226
  } finally {
@@ -186,8 +235,11 @@ describe('mongo data types', () => {
186
235
  await setupTable(db);
187
236
 
188
237
  await insertNested(collection);
238
+ await insertUndefined(db, 'test_data_arrays', true);
189
239
 
190
- const transformed = [...ChangeStream.getQueryData(await db.collection('test_data_arrays').find().toArray())];
240
+ const rawResults = await db.collection('test_data_arrays').find().toArray();
241
+ expect(rawResults[4].undefined).toEqual([undefined]);
242
+ const transformed = [...ChangeStream.getQueryData(rawResults)];
191
243
 
192
244
  checkResultsNested(transformed);
193
245
  } finally {
@@ -212,8 +264,9 @@ describe('mongo data types', () => {
212
264
  await stream.tryNext();
213
265
 
214
266
  await insert(collection);
267
+ await insertUndefined(db, 'test_data');
215
268
 
216
- const transformed = await getReplicationTx(stream, 4);
269
+ const transformed = await getReplicationTx(stream, 5);
217
270
 
218
271
  checkResults(transformed);
219
272
  } finally {
@@ -236,8 +289,9 @@ describe('mongo data types', () => {
236
289
  await stream.tryNext();
237
290
 
238
291
  await insertNested(collection);
292
+ await insertUndefined(db, 'test_data_arrays', true);
239
293
 
240
- const transformed = await getReplicationTx(stream, 4);
294
+ const transformed = await getReplicationTx(stream, 5);
241
295
 
242
296
  checkResultsNested(transformed);
243
297
  } finally {
@@ -256,6 +310,7 @@ describe('mongo data types', () => {
256
310
  const collection = db.collection('test_data');
257
311
  await setupTable(db);
258
312
  await insert(collection);
313
+ await insertUndefined(db, 'test_data');
259
314
 
260
315
  const schema = await adapter.getConnectionSchema();
261
316
  const dbSchema = schema.filter((s) => s.name == TEST_CONNECTION_OPTIONS.database)[0];
@@ -440,6 +495,10 @@ bucket_definitions:
440
495
  async function getReplicationTx(replicationStream: mongo.ChangeStream, count: number) {
441
496
  let transformed: SqliteRow[] = [];
442
497
  for await (const doc of replicationStream) {
498
+ // Specifically filter out map_input / map_output collections
499
+ if (!(doc as any)?.ns?.coll?.startsWith('test_data')) {
500
+ continue;
501
+ }
443
502
  transformed.push(constructAfterRecord((doc as any).fullDocument));
444
503
  if (transformed.length == count) {
445
504
  break;
package/test/src/setup.ts CHANGED
@@ -1,7 +1,10 @@
1
1
  import { container } from '@powersync/lib-services-framework';
2
+ import { test_utils } from '@powersync/service-core-tests';
2
3
  import { beforeAll } from 'vitest';
3
4
 
4
- beforeAll(() => {
5
+ beforeAll(async () => {
5
6
  // Executes for every test file
6
7
  container.registerDefaults();
8
+
9
+ await test_utils.initMetrics();
7
10
  });
@@ -1,30 +1,21 @@
1
- import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
2
- import { BucketStorageFactory } from '@powersync/service-core';
1
+ import { storage } from '@powersync/service-core';
3
2
  import * as mongo from 'mongodb';
4
3
  import { setTimeout } from 'node:timers/promises';
5
4
  import { describe, expect, test } from 'vitest';
6
5
  import { ChangeStreamTestContext, setSnapshotHistorySeconds } from './change_stream_utils.js';
7
6
  import { env } from './env.js';
8
-
9
- type StorageFactory = () => Promise<BucketStorageFactory>;
10
-
11
- const BASIC_SYNC_RULES = `
12
- bucket_definitions:
13
- global:
14
- data:
15
- - SELECT _id as id, description FROM "test_data"
16
- `;
7
+ import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
17
8
 
18
9
  describe('change stream slow tests - mongodb', { timeout: 60_000 }, function () {
19
10
  if (env.CI || env.SLOW_TESTS) {
20
- defineSlowTests(MONGO_STORAGE_FACTORY);
11
+ defineSlowTests(INITIALIZED_MONGO_STORAGE_FACTORY);
21
12
  } else {
22
13
  // Need something in this file.
23
14
  test('no-op', () => {});
24
15
  }
25
16
  });
26
17
 
27
- function defineSlowTests(factory: StorageFactory) {
18
+ function defineSlowTests(factory: storage.TestStorageFactory) {
28
19
  test('replicating snapshot with lots of data', async () => {
29
20
  await using context = await ChangeStreamTestContext.open(factory);
30
21
  // Test with low minSnapshotHistoryWindowInSeconds, to trigger:
@@ -96,8 +87,10 @@ bucket_definitions:
96
87
 
97
88
  const data = await context.getBucketData('global[]', undefined, { limit: 50_000, chunkLimitBytes: 60_000_000 });
98
89
 
99
- const preDocuments = data.filter((d) => JSON.parse(d.data! as string).description.startsWith('pre')).length;
100
- const updatedDocuments = data.filter((d) => JSON.parse(d.data! as string).description.startsWith('updated')).length;
90
+ const preDocuments = data.filter((d: any) => JSON.parse(d.data! as string).description.startsWith('pre')).length;
91
+ const updatedDocuments = data.filter((d: any) =>
92
+ JSON.parse(d.data! as string).description.startsWith('updated')
93
+ ).length;
101
94
 
102
95
  // If the test works properly, preDocuments should be around 2000-3000.
103
96
  // The total should be around 9000-9900.
package/test/src/util.ts CHANGED
@@ -1,18 +1,8 @@
1
1
  import * as types from '@module/types/types.js';
2
- import { BucketStorageFactory, Metrics, MongoBucketStorage, OpId } from '@powersync/service-core';
3
2
 
4
- import { env } from './env.js';
5
- import { logger } from '@powersync/lib-services-framework';
6
- import { connectMongo } from '@core-tests/util.js';
3
+ import * as mongo_storage from '@powersync/service-module-mongodb-storage';
7
4
  import * as mongo from 'mongodb';
8
-
9
- // The metrics need to be initialized before they can be used
10
- await Metrics.initialise({
11
- disable_telemetry_sharing: true,
12
- powersync_instance_id: 'test',
13
- internal_metrics_endpoint: 'unused.for.tests.com'
14
- });
15
- Metrics.getInstance().resetCounters();
5
+ import { env } from './env.js';
16
6
 
17
7
  export const TEST_URI = env.MONGO_TEST_DATA_URL;
18
8
 
@@ -21,20 +11,10 @@ export const TEST_CONNECTION_OPTIONS = types.normalizeConnectionConfig({
21
11
  uri: TEST_URI
22
12
  });
23
13
 
24
- export type StorageFactory = () => Promise<BucketStorageFactory>;
25
-
26
- export const INITIALIZED_MONGO_STORAGE_FACTORY: StorageFactory = async () => {
27
- const db = await connectMongo();
28
-
29
- // None of the PG tests insert data into this collection, so it was never created
30
- if (!(await db.db.listCollections({ name: db.bucket_parameters.collectionName }).hasNext())) {
31
- await db.db.createCollection('bucket_parameters');
32
- }
33
-
34
- await db.clear();
35
-
36
- return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
37
- };
14
+ export const INITIALIZED_MONGO_STORAGE_FACTORY = mongo_storage.MongoTestStorageFactoryGenerator({
15
+ url: env.MONGO_TEST_URL,
16
+ isCI: env.CI
17
+ });
38
18
 
39
19
  export async function clearTestDb(db: mongo.Db) {
40
20
  await db.dropDatabase();
@@ -23,6 +23,9 @@
23
23
  },
24
24
  {
25
25
  "path": "../../../packages/service-core/"
26
+ },
27
+ {
28
+ "path": "../../../packages/service-core-tests/"
26
29
  }
27
30
  ]
28
31
  }
package/tsconfig.json CHANGED
@@ -23,6 +23,12 @@
23
23
  },
24
24
  {
25
25
  "path": "../../libs/lib-services"
26
+ },
27
+ {
28
+ "path": "../../libs/lib-mongodb"
29
+ },
30
+ {
31
+ "path": "../module-mongodb-storage"
26
32
  }
27
33
  ]
28
34
  }