@friggframework/core 2.0.0--canary.490.856adab.0 → 2.0.0--canary.490.d8be1b9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/database/repositories/health-check-repository-documentdb.js +73 -0
- package/database/repositories/health-check-repository-factory.js +7 -0
- package/integrations/repositories/integration-mapping-repository-documentdb.js +60 -0
- package/integrations/repositories/integration-mapping-repository-factory.js +9 -7
- package/integrations/repositories/integration-repository-documentdb.js +103 -0
- package/integrations/repositories/integration-repository-factory.js +7 -1
- package/integrations/repositories/process-repository-documentdb.js +61 -0
- package/integrations/repositories/process-repository-factory.js +7 -1
- package/modules/repositories/module-repository-documentdb.js +82 -0
- package/modules/repositories/module-repository-factory.js +7 -7
- package/package.json +5 -5
- package/syncs/repositories/sync-repository-documentdb.js +62 -0
- package/syncs/repositories/sync-repository-factory.js +7 -12
- package/token/repositories/token-repository-documentdb.js +91 -0
- package/token/repositories/token-repository-factory.js +7 -7
- package/user/repositories/user-repository-documentdb.js +148 -0
- package/user/repositories/user-repository-factory.js +7 -1
- package/websocket/repositories/websocket-connection-repository-documentdb.js +64 -0
- package/websocket/repositories/websocket-connection-repository-factory.js +9 -13
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { HealthCheckRepositoryInterface } = require('./health-check-repository-interface');
|
|
8
|
+
|
|
9
|
+
class HealthCheckRepositoryDocumentDB extends HealthCheckRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
this.nativeClient = nativeClient;
|
|
15
|
+
|
|
16
|
+
const collection = nativeClient.collection('Credential');
|
|
17
|
+
|
|
18
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
19
|
+
const encryptionService = new FieldEncryptionService({
|
|
20
|
+
cryptor,
|
|
21
|
+
schema: { getEncryptedFields },
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'Credential');
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async getDatabaseConnectionState() {
|
|
28
|
+
let isConnected = false;
|
|
29
|
+
let stateName = 'unknown';
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
await this.nativeClient.runCommand({ ping: 1 });
|
|
33
|
+
isConnected = true;
|
|
34
|
+
stateName = 'connected';
|
|
35
|
+
} catch (error) {
|
|
36
|
+
stateName = 'disconnected';
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return {
|
|
40
|
+
readyState: isConnected ? 1 : 0,
|
|
41
|
+
stateName,
|
|
42
|
+
isConnected,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async pingDatabase(maxTimeMS = 2000) {
|
|
47
|
+
const pingStart = Date.now();
|
|
48
|
+
await this.nativeClient.runCommand({ ping: 1 });
|
|
49
|
+
return Date.now() - pingStart;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async createCredential(credentialData) {
|
|
53
|
+
const result = await this.collection.insertOne(credentialData);
|
|
54
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async findCredentialById(id) {
|
|
58
|
+
return await this.collection.findOne({ _id: new ObjectId(id) });
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async getRawCredentialById(id) {
|
|
62
|
+
const rawCollection = this.nativeClient.collection('Credential');
|
|
63
|
+
return await rawCollection.findOne({ _id: new ObjectId(id) });
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async deleteCredentialById(id) {
|
|
67
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(id) });
|
|
68
|
+
return result.deletedCount > 0;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
module.exports = { HealthCheckRepositoryDocumentDB };
|
|
73
|
+
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
const { HealthCheckRepositoryMongoDB } = require('./health-check-repository-mongodb');
|
|
2
2
|
const { HealthCheckRepositoryPostgreSQL } = require('./health-check-repository-postgres');
|
|
3
|
+
const { HealthCheckRepositoryDocumentDB } = require('./health-check-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../utils/documentdb-compatibility');
|
|
3
5
|
const config = require('../config');
|
|
4
6
|
|
|
5
7
|
/**
|
|
@@ -16,6 +18,10 @@ const config = require('../config');
|
|
|
16
18
|
* const repository = createHealthCheckRepository({ prismaClient: prisma });
|
|
17
19
|
*/
|
|
18
20
|
function createHealthCheckRepository({ prismaClient } = {}) {
|
|
21
|
+
if (isDocumentDB()) {
|
|
22
|
+
return new HealthCheckRepositoryDocumentDB();
|
|
23
|
+
}
|
|
24
|
+
|
|
19
25
|
if (!prismaClient) {
|
|
20
26
|
throw new Error('prismaClient is required');
|
|
21
27
|
}
|
|
@@ -40,4 +46,5 @@ module.exports = {
|
|
|
40
46
|
createHealthCheckRepository,
|
|
41
47
|
HealthCheckRepositoryMongoDB,
|
|
42
48
|
HealthCheckRepositoryPostgreSQL,
|
|
49
|
+
HealthCheckRepositoryDocumentDB,
|
|
43
50
|
};
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { IntegrationMappingRepositoryInterface } = require('./integration-mapping-repository-interface');
|
|
8
|
+
|
|
9
|
+
class IntegrationMappingRepositoryDocumentDB extends IntegrationMappingRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
const collection = nativeClient.collection('IntegrationMapping');
|
|
15
|
+
|
|
16
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
17
|
+
const encryptionService = new FieldEncryptionService({
|
|
18
|
+
cryptor,
|
|
19
|
+
schema: { getEncryptedFields },
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'IntegrationMapping');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async findMappingById(mappingId) {
|
|
26
|
+
return await this.collection.findOne({ _id: new ObjectId(mappingId) });
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async findMappingsByIntegrationId(integrationId) {
|
|
30
|
+
return await this.collection.find({ integrationId });
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async createMapping(mappingData) {
|
|
34
|
+
const doc = {
|
|
35
|
+
...mappingData,
|
|
36
|
+
createdAt: new Date(),
|
|
37
|
+
updatedAt: new Date(),
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
const result = await this.collection.insertOne(doc);
|
|
41
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async updateMapping(mappingId, updates) {
|
|
45
|
+
await this.collection.updateOne(
|
|
46
|
+
{ _id: new ObjectId(mappingId) },
|
|
47
|
+
{ $set: { ...updates, updatedAt: new Date() } }
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
return await this.findMappingById(mappingId);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async deleteMapping(mappingId) {
|
|
54
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(mappingId) });
|
|
55
|
+
return result.deletedCount > 0;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
module.exports = { IntegrationMappingRepositoryDocumentDB };
|
|
60
|
+
|
|
@@ -1,9 +1,7 @@
|
|
|
1
|
-
const {
|
|
2
|
-
|
|
3
|
-
} = require('./integration-mapping-repository-
|
|
4
|
-
const {
|
|
5
|
-
IntegrationMappingRepositoryPostgres,
|
|
6
|
-
} = require('./integration-mapping-repository-postgres');
|
|
1
|
+
const { IntegrationMappingRepositoryMongo } = require('./integration-mapping-repository-mongo');
|
|
2
|
+
const { IntegrationMappingRepositoryPostgres } = require('./integration-mapping-repository-postgres');
|
|
3
|
+
const { IntegrationMappingRepositoryDocumentDB } = require('./integration-mapping-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
7
5
|
const config = require('../../database/config');
|
|
8
6
|
|
|
9
7
|
/**
|
|
@@ -26,6 +24,10 @@ const config = require('../../database/config');
|
|
|
26
24
|
* @returns {IntegrationMappingRepositoryInterface} Configured repository adapter
|
|
27
25
|
*/
|
|
28
26
|
function createIntegrationMappingRepository() {
|
|
27
|
+
if (isDocumentDB()) {
|
|
28
|
+
return new IntegrationMappingRepositoryDocumentDB();
|
|
29
|
+
}
|
|
30
|
+
|
|
29
31
|
const dbType = config.DB_TYPE;
|
|
30
32
|
|
|
31
33
|
switch (dbType) {
|
|
@@ -44,7 +46,7 @@ function createIntegrationMappingRepository() {
|
|
|
44
46
|
|
|
45
47
|
module.exports = {
|
|
46
48
|
createIntegrationMappingRepository,
|
|
47
|
-
// Export adapters for direct testing
|
|
48
49
|
IntegrationMappingRepositoryMongo,
|
|
49
50
|
IntegrationMappingRepositoryPostgres,
|
|
51
|
+
IntegrationMappingRepositoryDocumentDB,
|
|
50
52
|
};
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { IntegrationRepositoryInterface } = require('./integration-repository-interface');
|
|
8
|
+
|
|
9
|
+
class IntegrationRepositoryDocumentDB extends IntegrationRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
const collection = nativeClient.collection('Integration');
|
|
15
|
+
|
|
16
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
17
|
+
const encryptionService = new FieldEncryptionService({
|
|
18
|
+
cryptor,
|
|
19
|
+
schema: { getEncryptedFields },
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'Integration');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async findIntegrationById(id) {
|
|
26
|
+
return await this.collection.findOne({ _id: new ObjectId(id) });
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async findIntegrationByUserId(userId) {
|
|
30
|
+
const integrations = await this.collection.find({ userId });
|
|
31
|
+
return integrations[0] || null;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async findIntegrationsByUserId(userId) {
|
|
35
|
+
return await this.collection.find({ userId });
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async findIntegrationByName(name) {
|
|
39
|
+
return await this.collection.findOne({ name });
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async createIntegration(entities, userId, config) {
|
|
43
|
+
const entityIds = Array.isArray(entities)
|
|
44
|
+
? entities.map(e => e.id || e._id?.toString() || e)
|
|
45
|
+
: [entities.id || entities._id?.toString() || entities];
|
|
46
|
+
|
|
47
|
+
const doc = {
|
|
48
|
+
userId,
|
|
49
|
+
entityIds,
|
|
50
|
+
config: config || {},
|
|
51
|
+
status: 'active',
|
|
52
|
+
createdAt: new Date(),
|
|
53
|
+
updatedAt: new Date(),
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
const result = await this.collection.insertOne(doc);
|
|
57
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async updateIntegrationStatus(integrationId, status) {
|
|
61
|
+
await this.collection.updateOne(
|
|
62
|
+
{ _id: new ObjectId(integrationId) },
|
|
63
|
+
{ $set: { status, updatedAt: new Date() } }
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
return await this.findIntegrationById(integrationId);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async updateIntegrationMessages(integrationId, incomingMessages, outgoingMessages) {
|
|
70
|
+
const update = { updatedAt: new Date() };
|
|
71
|
+
|
|
72
|
+
if (incomingMessages !== undefined) {
|
|
73
|
+
update.incomingMessages = incomingMessages;
|
|
74
|
+
}
|
|
75
|
+
if (outgoingMessages !== undefined) {
|
|
76
|
+
update.outgoingMessages = outgoingMessages;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
await this.collection.updateOne(
|
|
80
|
+
{ _id: new ObjectId(integrationId) },
|
|
81
|
+
{ $set: update }
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
return await this.findIntegrationById(integrationId);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async updateIntegrationConfig(integrationId, config) {
|
|
88
|
+
await this.collection.updateOne(
|
|
89
|
+
{ _id: new ObjectId(integrationId) },
|
|
90
|
+
{ $set: { config, updatedAt: new Date() } }
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
return await this.findIntegrationById(integrationId);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
async deleteIntegrationById(integrationId) {
|
|
97
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(integrationId) });
|
|
98
|
+
return result.deletedCount > 0;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
module.exports = { IntegrationRepositoryDocumentDB };
|
|
103
|
+
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
const { IntegrationRepositoryMongo } = require('./integration-repository-mongo');
|
|
2
2
|
const { IntegrationRepositoryPostgres } = require('./integration-repository-postgres');
|
|
3
|
+
const { IntegrationRepositoryDocumentDB } = require('./integration-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
3
5
|
const config = require('../../database/config');
|
|
4
6
|
|
|
5
7
|
/**
|
|
@@ -20,6 +22,10 @@ const config = require('../../database/config');
|
|
|
20
22
|
* @throws {Error} If database type is not supported
|
|
21
23
|
*/
|
|
22
24
|
function createIntegrationRepository() {
|
|
25
|
+
if (isDocumentDB()) {
|
|
26
|
+
return new IntegrationRepositoryDocumentDB();
|
|
27
|
+
}
|
|
28
|
+
|
|
23
29
|
const dbType = config.DB_TYPE;
|
|
24
30
|
|
|
25
31
|
switch (dbType) {
|
|
@@ -38,7 +44,7 @@ function createIntegrationRepository() {
|
|
|
38
44
|
|
|
39
45
|
module.exports = {
|
|
40
46
|
createIntegrationRepository,
|
|
41
|
-
// Export adapters for direct testing
|
|
42
47
|
IntegrationRepositoryMongo,
|
|
43
48
|
IntegrationRepositoryPostgres,
|
|
49
|
+
IntegrationRepositoryDocumentDB,
|
|
44
50
|
};
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { ProcessRepositoryInterface } = require('./process-repository-interface');
|
|
8
|
+
|
|
9
|
+
class ProcessRepositoryDocumentDB extends ProcessRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
const collection = nativeClient.collection('Process');
|
|
15
|
+
|
|
16
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
17
|
+
const encryptionService = new FieldEncryptionService({
|
|
18
|
+
cryptor,
|
|
19
|
+
schema: { getEncryptedFields },
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'Process');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async createProcess(processData) {
|
|
26
|
+
const doc = {
|
|
27
|
+
...processData,
|
|
28
|
+
status: processData.status || 'pending',
|
|
29
|
+
createdAt: new Date(),
|
|
30
|
+
updatedAt: new Date(),
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
const result = await this.collection.insertOne(doc);
|
|
34
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async findProcessById(processId) {
|
|
38
|
+
return await this.collection.findOne({ _id: new ObjectId(processId) });
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async findProcessesByUserId(userId) {
|
|
42
|
+
return await this.collection.find({ userId });
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async updateProcess(processId, updates) {
|
|
46
|
+
await this.collection.updateOne(
|
|
47
|
+
{ _id: new ObjectId(processId) },
|
|
48
|
+
{ $set: { ...updates, updatedAt: new Date() } }
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
return await this.findProcessById(processId);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async deleteProcess(processId) {
|
|
55
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(processId) });
|
|
56
|
+
return result.deletedCount > 0;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
module.exports = { ProcessRepositoryDocumentDB };
|
|
61
|
+
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
const { ProcessRepositoryMongo } = require('./process-repository-mongo');
|
|
2
2
|
const { ProcessRepositoryPostgres } = require('./process-repository-postgres');
|
|
3
|
+
const { ProcessRepositoryDocumentDB } = require('./process-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
3
5
|
const config = require('../../database/config');
|
|
4
6
|
|
|
5
7
|
/**
|
|
@@ -21,6 +23,10 @@ const config = require('../../database/config');
|
|
|
21
23
|
* @throws {Error} If database type is not supported
|
|
22
24
|
*/
|
|
23
25
|
function createProcessRepository() {
|
|
26
|
+
if (isDocumentDB()) {
|
|
27
|
+
return new ProcessRepositoryDocumentDB();
|
|
28
|
+
}
|
|
29
|
+
|
|
24
30
|
const dbType = config.DB_TYPE;
|
|
25
31
|
|
|
26
32
|
switch (dbType) {
|
|
@@ -39,8 +45,8 @@ function createProcessRepository() {
|
|
|
39
45
|
|
|
40
46
|
module.exports = {
|
|
41
47
|
createProcessRepository,
|
|
42
|
-
// Export adapters for direct testing
|
|
43
48
|
ProcessRepositoryMongo,
|
|
44
49
|
ProcessRepositoryPostgres,
|
|
50
|
+
ProcessRepositoryDocumentDB,
|
|
45
51
|
};
|
|
46
52
|
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { ModuleRepositoryInterface } = require('./module-repository-interface');
|
|
8
|
+
|
|
9
|
+
class ModuleRepositoryDocumentDB extends ModuleRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
const collection = nativeClient.collection('Entity');
|
|
15
|
+
|
|
16
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
17
|
+
const encryptionService = new FieldEncryptionService({
|
|
18
|
+
cryptor,
|
|
19
|
+
schema: { getEncryptedFields },
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'Entity');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async findEntityById(entityId) {
|
|
26
|
+
return await this.collection.findOne({ _id: new ObjectId(entityId) });
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async findEntitiesByUserId(userId) {
|
|
30
|
+
return await this.collection.find({ userId });
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async findEntitiesByIds(entitiesIds) {
|
|
34
|
+
const objectIds = entitiesIds.map(id => new ObjectId(id));
|
|
35
|
+
return await this.collection.find({ _id: { $in: objectIds } });
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async findEntitiesByUserIdAndModuleName(userId, moduleName) {
|
|
39
|
+
return await this.collection.find({ userId, moduleName });
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async findEntity(filter) {
|
|
43
|
+
return await this.collection.findOne(filter);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async createEntity(entityData) {
|
|
47
|
+
const doc = {
|
|
48
|
+
...entityData,
|
|
49
|
+
createdAt: new Date(),
|
|
50
|
+
updatedAt: new Date(),
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
const result = await this.collection.insertOne(doc);
|
|
54
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async updateEntity(entityId, updates) {
|
|
58
|
+
await this.collection.updateOne(
|
|
59
|
+
{ _id: new ObjectId(entityId) },
|
|
60
|
+
{ $set: { ...updates, updatedAt: new Date() } }
|
|
61
|
+
);
|
|
62
|
+
|
|
63
|
+
return await this.findEntityById(entityId);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async unsetCredential(entityId) {
|
|
67
|
+
await this.collection.updateOne(
|
|
68
|
+
{ _id: new ObjectId(entityId) },
|
|
69
|
+
{ $unset: { credential: '' }, $set: { updatedAt: new Date() } }
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
return await this.findEntityById(entityId);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async deleteEntity(entityId) {
|
|
76
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(entityId) });
|
|
77
|
+
return result.deletedCount > 0;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
module.exports = { ModuleRepositoryDocumentDB };
|
|
82
|
+
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
const { ModuleRepositoryMongo } = require('./module-repository-mongo');
|
|
2
2
|
const { ModuleRepositoryPostgres } = require('./module-repository-postgres');
|
|
3
|
+
const { ModuleRepositoryDocumentDB } = require('./module-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
3
5
|
const config = require('../../database/config');
|
|
4
6
|
|
|
5
|
-
/**
|
|
6
|
-
* Module Repository Factory
|
|
7
|
-
* Creates the appropriate repository adapter based on database type
|
|
8
|
-
*
|
|
9
|
-
* @returns {ModuleRepositoryInterface} Configured repository adapter
|
|
10
|
-
*/
|
|
11
7
|
function createModuleRepository() {
|
|
8
|
+
if (isDocumentDB()) {
|
|
9
|
+
return new ModuleRepositoryDocumentDB();
|
|
10
|
+
}
|
|
11
|
+
|
|
12
12
|
const dbType = config.DB_TYPE;
|
|
13
13
|
|
|
14
14
|
switch (dbType) {
|
|
@@ -27,7 +27,7 @@ function createModuleRepository() {
|
|
|
27
27
|
|
|
28
28
|
module.exports = {
|
|
29
29
|
createModuleRepository,
|
|
30
|
-
// Export adapters for direct testing
|
|
31
30
|
ModuleRepositoryMongo,
|
|
32
31
|
ModuleRepositoryPostgres,
|
|
32
|
+
ModuleRepositoryDocumentDB,
|
|
33
33
|
};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@friggframework/core",
|
|
3
3
|
"prettier": "@friggframework/prettier-config",
|
|
4
|
-
"version": "2.0.0--canary.490.
|
|
4
|
+
"version": "2.0.0--canary.490.d8be1b9.0",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@aws-sdk/client-apigatewaymanagementapi": "^3.588.0",
|
|
7
7
|
"@aws-sdk/client-kms": "^3.588.0",
|
|
@@ -38,9 +38,9 @@
|
|
|
38
38
|
}
|
|
39
39
|
},
|
|
40
40
|
"devDependencies": {
|
|
41
|
-
"@friggframework/eslint-config": "2.0.0--canary.490.
|
|
42
|
-
"@friggframework/prettier-config": "2.0.0--canary.490.
|
|
43
|
-
"@friggframework/test": "2.0.0--canary.490.
|
|
41
|
+
"@friggframework/eslint-config": "2.0.0--canary.490.d8be1b9.0",
|
|
42
|
+
"@friggframework/prettier-config": "2.0.0--canary.490.d8be1b9.0",
|
|
43
|
+
"@friggframework/test": "2.0.0--canary.490.d8be1b9.0",
|
|
44
44
|
"@prisma/client": "^6.17.0",
|
|
45
45
|
"@types/lodash": "4.17.15",
|
|
46
46
|
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
|
@@ -80,5 +80,5 @@
|
|
|
80
80
|
"publishConfig": {
|
|
81
81
|
"access": "public"
|
|
82
82
|
},
|
|
83
|
-
"gitHead": "
|
|
83
|
+
"gitHead": "d8be1b9d24b459e074d7f39344386e843987625e"
|
|
84
84
|
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { SyncRepositoryInterface } = require('./sync-repository-interface');
|
|
8
|
+
|
|
9
|
+
class SyncRepositoryDocumentDB extends SyncRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
const collection = nativeClient.collection('Sync');
|
|
15
|
+
|
|
16
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
17
|
+
const encryptionService = new FieldEncryptionService({
|
|
18
|
+
cryptor,
|
|
19
|
+
schema: { getEncryptedFields },
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'Sync');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async getSyncObject(name, dataIdentifier, entity) {
|
|
26
|
+
const entityId = entity?.id || entity?._id?.toString() || entity;
|
|
27
|
+
|
|
28
|
+
return await this.collection.findOne({
|
|
29
|
+
name,
|
|
30
|
+
dataIdentifier,
|
|
31
|
+
entityId,
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async createSync(syncData) {
|
|
36
|
+
const doc = {
|
|
37
|
+
...syncData,
|
|
38
|
+
createdAt: new Date(),
|
|
39
|
+
updatedAt: new Date(),
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
const result = await this.collection.insertOne(doc);
|
|
43
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async updateSync(syncId, updates) {
|
|
47
|
+
await this.collection.updateOne(
|
|
48
|
+
{ _id: new ObjectId(syncId) },
|
|
49
|
+
{ $set: { ...updates, updatedAt: new Date() } }
|
|
50
|
+
);
|
|
51
|
+
|
|
52
|
+
return await this.collection.findOne({ _id: new ObjectId(syncId) });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async deleteSync(syncId) {
|
|
56
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(syncId) });
|
|
57
|
+
return result.deletedCount > 0;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
module.exports = { SyncRepositoryDocumentDB };
|
|
62
|
+
|
|
@@ -1,19 +1,14 @@
|
|
|
1
1
|
const { SyncRepositoryMongo } = require('./sync-repository-mongo');
|
|
2
2
|
const { SyncRepositoryPostgres } = require('./sync-repository-postgres');
|
|
3
|
+
const { SyncRepositoryDocumentDB } = require('./sync-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
3
5
|
const config = require('../../database/config');
|
|
4
6
|
|
|
5
|
-
/**
|
|
6
|
-
* Sync Repository Factory
|
|
7
|
-
* Creates the appropriate repository adapter based on database type
|
|
8
|
-
*
|
|
9
|
-
* Usage:
|
|
10
|
-
* ```javascript
|
|
11
|
-
* const repository = createSyncRepository();
|
|
12
|
-
* ```
|
|
13
|
-
*
|
|
14
|
-
* @returns {SyncRepositoryInterface} Configured repository adapter
|
|
15
|
-
*/
|
|
16
7
|
function createSyncRepository() {
|
|
8
|
+
if (isDocumentDB()) {
|
|
9
|
+
return new SyncRepositoryDocumentDB();
|
|
10
|
+
}
|
|
11
|
+
|
|
17
12
|
const dbType = config.DB_TYPE;
|
|
18
13
|
|
|
19
14
|
switch (dbType) {
|
|
@@ -32,7 +27,7 @@ function createSyncRepository() {
|
|
|
32
27
|
|
|
33
28
|
module.exports = {
|
|
34
29
|
createSyncRepository,
|
|
35
|
-
// Export adapters for direct testing
|
|
36
30
|
SyncRepositoryMongo,
|
|
37
31
|
SyncRepositoryPostgres,
|
|
32
|
+
SyncRepositoryDocumentDB,
|
|
38
33
|
};
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const bcrypt = require('bcryptjs');
|
|
3
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
4
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
5
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
6
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
7
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
8
|
+
const { TokenRepositoryInterface } = require('./token-repository-interface');
|
|
9
|
+
|
|
10
|
+
const BCRYPT_ROUNDS = 10;
|
|
11
|
+
|
|
12
|
+
class TokenRepositoryDocumentDB extends TokenRepositoryInterface {
|
|
13
|
+
constructor() {
|
|
14
|
+
super();
|
|
15
|
+
|
|
16
|
+
const nativeClient = getNativeMongoClient();
|
|
17
|
+
const collection = nativeClient.collection('Token');
|
|
18
|
+
|
|
19
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
20
|
+
const encryptionService = new FieldEncryptionService({
|
|
21
|
+
cryptor,
|
|
22
|
+
schema: { getEncryptedFields },
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'Token');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async createTokenWithExpire(userId, rawToken, minutes) {
|
|
29
|
+
const hashedToken = await bcrypt.hash(rawToken, BCRYPT_ROUNDS);
|
|
30
|
+
const expiresAt = new Date(Date.now() + minutes * 60 * 1000);
|
|
31
|
+
|
|
32
|
+
const doc = {
|
|
33
|
+
userId,
|
|
34
|
+
token: hashedToken,
|
|
35
|
+
expiresAt,
|
|
36
|
+
createdAt: new Date(),
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
const result = await this.collection.insertOne(doc);
|
|
40
|
+
const token = await this.collection.findOne({ _id: result.insertedId });
|
|
41
|
+
|
|
42
|
+
return Buffer.from(JSON.stringify({
|
|
43
|
+
tokenId: token._id.toString(),
|
|
44
|
+
rawToken
|
|
45
|
+
})).toString('base64');
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async validateAndGetToken(tokenObj) {
|
|
49
|
+
const token = await this.collection.findOne({ _id: new ObjectId(tokenObj.tokenId) });
|
|
50
|
+
|
|
51
|
+
if (!token) {
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (new Date() > new Date(token.expiresAt)) {
|
|
56
|
+
await this.deleteToken(tokenObj.tokenId);
|
|
57
|
+
return null;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const isValid = await bcrypt.compare(tokenObj.rawToken, token.token);
|
|
61
|
+
return isValid ? token : null;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async findTokenById(tokenId) {
|
|
65
|
+
return await this.collection.findOne({ _id: new ObjectId(tokenId) });
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async findTokensByUserId(userId) {
|
|
69
|
+
return await this.collection.find({ userId });
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
async deleteToken(tokenId) {
|
|
73
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(tokenId) });
|
|
74
|
+
return result.deletedCount > 0;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async deleteExpiredTokens() {
|
|
78
|
+
const result = await this.collection.deleteMany({
|
|
79
|
+
expiresAt: { $lt: new Date() }
|
|
80
|
+
});
|
|
81
|
+
return result.deletedCount;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async deleteTokensByUserId(userId) {
|
|
85
|
+
const result = await this.collection.deleteMany({ userId });
|
|
86
|
+
return result.deletedCount;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
module.exports = { TokenRepositoryDocumentDB };
|
|
91
|
+
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
const { TokenRepositoryMongo } = require('./token-repository-mongo');
|
|
2
2
|
const { TokenRepositoryPostgres } = require('./token-repository-postgres');
|
|
3
|
+
const { TokenRepositoryDocumentDB } = require('./token-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
3
5
|
const config = require('../../database/config');
|
|
4
6
|
|
|
5
|
-
/**
|
|
6
|
-
* Token Repository Factory
|
|
7
|
-
* Creates the appropriate repository adapter based on database type
|
|
8
|
-
*
|
|
9
|
-
* @returns {TokenRepositoryInterface} Configured repository adapter
|
|
10
|
-
*/
|
|
11
7
|
function createTokenRepository() {
|
|
8
|
+
if (isDocumentDB()) {
|
|
9
|
+
return new TokenRepositoryDocumentDB();
|
|
10
|
+
}
|
|
11
|
+
|
|
12
12
|
const dbType = config.DB_TYPE;
|
|
13
13
|
|
|
14
14
|
switch (dbType) {
|
|
@@ -27,7 +27,7 @@ function createTokenRepository() {
|
|
|
27
27
|
|
|
28
28
|
module.exports = {
|
|
29
29
|
createTokenRepository,
|
|
30
|
-
// Export adapters for direct testing
|
|
31
30
|
TokenRepositoryMongo,
|
|
32
31
|
TokenRepositoryPostgres,
|
|
32
|
+
TokenRepositoryDocumentDB,
|
|
33
33
|
};
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const bcrypt = require('bcryptjs');
|
|
3
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
4
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
5
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
6
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
7
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
8
|
+
const { UserRepositoryInterface } = require('./user-repository-interface');
|
|
9
|
+
const { createTokenRepository } = require('../../token/repositories/token-repository-factory');
|
|
10
|
+
|
|
11
|
+
const BCRYPT_ROUNDS = 10;
|
|
12
|
+
|
|
13
|
+
class UserRepositoryDocumentDB extends UserRepositoryInterface {
|
|
14
|
+
constructor() {
|
|
15
|
+
super();
|
|
16
|
+
|
|
17
|
+
const nativeClient = getNativeMongoClient();
|
|
18
|
+
const collection = nativeClient.collection('User');
|
|
19
|
+
|
|
20
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
21
|
+
const encryptionService = new FieldEncryptionService({
|
|
22
|
+
cryptor,
|
|
23
|
+
schema: { getEncryptedFields },
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'User');
|
|
27
|
+
this.tokenRepository = createTokenRepository();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async getSessionToken(token) {
|
|
31
|
+
return await this.tokenRepository.getSessionToken(token);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async findUserById(userId) {
|
|
35
|
+
return await this.collection.findOne({ _id: new ObjectId(userId) });
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async findIndividualUserById(userId) {
|
|
39
|
+
return await this.collection.findOne({
|
|
40
|
+
_id: new ObjectId(userId),
|
|
41
|
+
type: 'individual',
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async findOrganizationUserById(userId) {
|
|
46
|
+
return await this.collection.findOne({
|
|
47
|
+
_id: new ObjectId(userId),
|
|
48
|
+
type: 'organization',
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async findIndividualUserByUsername(username) {
|
|
53
|
+
return await this.collection.findOne({
|
|
54
|
+
username,
|
|
55
|
+
type: 'individual',
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async findIndividualUserByEmail(email) {
|
|
60
|
+
return await this.collection.findOne({
|
|
61
|
+
email,
|
|
62
|
+
type: 'individual',
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async findIndividualUserByAppUserId(appUserId) {
|
|
67
|
+
return await this.collection.findOne({
|
|
68
|
+
appUserId,
|
|
69
|
+
type: 'individual',
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async findOrganizationUserByAppOrgId(appOrgId) {
|
|
74
|
+
return await this.collection.findOne({
|
|
75
|
+
appOrgId,
|
|
76
|
+
type: 'organization',
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async createIndividualUser(params) {
|
|
81
|
+
const { username, email, password, appUserId } = params;
|
|
82
|
+
|
|
83
|
+
const hashedPassword = await bcrypt.hash(password, BCRYPT_ROUNDS);
|
|
84
|
+
|
|
85
|
+
const doc = {
|
|
86
|
+
type: 'individual',
|
|
87
|
+
username,
|
|
88
|
+
email,
|
|
89
|
+
hashword: hashedPassword,
|
|
90
|
+
appUserId: appUserId || null,
|
|
91
|
+
createdAt: new Date(),
|
|
92
|
+
updatedAt: new Date(),
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
const result = await this.collection.insertOne(doc);
|
|
96
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async createOrganizationUser(params) {
|
|
100
|
+
const { name, appOrgId } = params;
|
|
101
|
+
|
|
102
|
+
const doc = {
|
|
103
|
+
type: 'organization',
|
|
104
|
+
name,
|
|
105
|
+
appOrgId: appOrgId || null,
|
|
106
|
+
createdAt: new Date(),
|
|
107
|
+
updatedAt: new Date(),
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
const result = await this.collection.insertOne(doc);
|
|
111
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
async updateIndividualUser(userId, updates) {
|
|
115
|
+
if (updates.password) {
|
|
116
|
+
updates.hashword = await bcrypt.hash(updates.password, BCRYPT_ROUNDS);
|
|
117
|
+
delete updates.password;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
await this.collection.updateOne(
|
|
121
|
+
{ _id: new ObjectId(userId), type: 'individual' },
|
|
122
|
+
{ $set: { ...updates, updatedAt: new Date() } }
|
|
123
|
+
);
|
|
124
|
+
|
|
125
|
+
return await this.findIndividualUserById(userId);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
async updateOrganizationUser(userId, updates) {
|
|
129
|
+
await this.collection.updateOne(
|
|
130
|
+
{ _id: new ObjectId(userId), type: 'organization' },
|
|
131
|
+
{ $set: { ...updates, updatedAt: new Date() } }
|
|
132
|
+
);
|
|
133
|
+
|
|
134
|
+
return await this.findOrganizationUserById(userId);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async deleteUser(userId) {
|
|
138
|
+
const result = await this.collection.deleteOne({ _id: new ObjectId(userId) });
|
|
139
|
+
return result.deletedCount > 0;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
async createToken(userId, rawToken, minutes = 120) {
|
|
143
|
+
return await this.tokenRepository.createTokenWithExpire(userId, rawToken, minutes);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
module.exports = { UserRepositoryDocumentDB };
|
|
148
|
+
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
const { UserRepositoryMongo } = require('./user-repository-mongo');
|
|
2
2
|
const { UserRepositoryPostgres } = require('./user-repository-postgres');
|
|
3
|
+
const { UserRepositoryDocumentDB } = require('./user-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
3
5
|
const databaseConfig = require('../../database/config');
|
|
4
6
|
|
|
5
7
|
/**
|
|
@@ -22,6 +24,10 @@ const databaseConfig = require('../../database/config');
|
|
|
22
24
|
* @returns {UserRepositoryInterface} Configured repository adapter
|
|
23
25
|
*/
|
|
24
26
|
function createUserRepository() {
|
|
27
|
+
if (isDocumentDB()) {
|
|
28
|
+
return new UserRepositoryDocumentDB();
|
|
29
|
+
}
|
|
30
|
+
|
|
25
31
|
const dbType = databaseConfig.DB_TYPE;
|
|
26
32
|
|
|
27
33
|
switch (dbType) {
|
|
@@ -40,7 +46,7 @@ function createUserRepository() {
|
|
|
40
46
|
|
|
41
47
|
module.exports = {
|
|
42
48
|
createUserRepository,
|
|
43
|
-
// Export adapters for direct testing
|
|
44
49
|
UserRepositoryMongo,
|
|
45
50
|
UserRepositoryPostgres,
|
|
51
|
+
UserRepositoryDocumentDB,
|
|
46
52
|
};
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
const { ObjectId } = require('mongodb');
|
|
2
|
+
const { getNativeMongoClient } = require('../../database/mongodb-native-client');
|
|
3
|
+
const { EncryptedCollection } = require('../../database/encrypted-collection-wrapper');
|
|
4
|
+
const { FieldEncryptionService } = require('../../database/encryption/field-encryption-service');
|
|
5
|
+
const { getEncryptedFields } = require('../../database/encryption/encryption-schema-registry');
|
|
6
|
+
const { Cryptor } = require('../../encrypt/Cryptor');
|
|
7
|
+
const { WebsocketConnectionRepositoryInterface } = require('./websocket-connection-repository-interface');
|
|
8
|
+
|
|
9
|
+
class WebsocketConnectionRepositoryDocumentDB extends WebsocketConnectionRepositoryInterface {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
|
|
13
|
+
const nativeClient = getNativeMongoClient();
|
|
14
|
+
const collection = nativeClient.collection('WebsocketConnection');
|
|
15
|
+
|
|
16
|
+
const cryptor = new Cryptor({ shouldUseAws: !!process.env.KMS_KEY_ARN });
|
|
17
|
+
const encryptionService = new FieldEncryptionService({
|
|
18
|
+
cryptor,
|
|
19
|
+
schema: { getEncryptedFields },
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.collection = new EncryptedCollection(collection, encryptionService, 'WebsocketConnection');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async createConnection(connectionData) {
|
|
26
|
+
const doc = {
|
|
27
|
+
...connectionData,
|
|
28
|
+
active: true,
|
|
29
|
+
createdAt: new Date(),
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
const result = await this.collection.insertOne(doc);
|
|
33
|
+
return await this.collection.findOne({ _id: result.insertedId });
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async findActiveConnectionsByUserId(userId) {
|
|
37
|
+
return await this.collection.find({ userId, active: true });
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async findConnectionById(connectionId) {
|
|
41
|
+
return await this.collection.findOne({ _id: new ObjectId(connectionId) });
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async findConnectionByConnectionId(connectionId) {
|
|
45
|
+
return await this.collection.findOne({ connectionId });
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async deactivateConnection(connectionId) {
|
|
49
|
+
await this.collection.updateOne(
|
|
50
|
+
{ connectionId },
|
|
51
|
+
{ $set: { active: false } }
|
|
52
|
+
);
|
|
53
|
+
|
|
54
|
+
return await this.findConnectionByConnectionId(connectionId);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async deleteConnection(connectionId) {
|
|
58
|
+
const result = await this.collection.deleteOne({ connectionId });
|
|
59
|
+
return result.deletedCount > 0;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
module.exports = { WebsocketConnectionRepositoryDocumentDB };
|
|
64
|
+
|
|
@@ -1,18 +1,14 @@
|
|
|
1
|
-
const {
|
|
2
|
-
|
|
3
|
-
} = require('./websocket-connection-repository-
|
|
4
|
-
const {
|
|
5
|
-
WebsocketConnectionRepositoryPostgres,
|
|
6
|
-
} = require('./websocket-connection-repository-postgres');
|
|
1
|
+
const { WebsocketConnectionRepositoryMongo } = require('./websocket-connection-repository-mongo');
|
|
2
|
+
const { WebsocketConnectionRepositoryPostgres } = require('./websocket-connection-repository-postgres');
|
|
3
|
+
const { WebsocketConnectionRepositoryDocumentDB } = require('./websocket-connection-repository-documentdb');
|
|
4
|
+
const { isDocumentDB } = require('../../database/utils/documentdb-compatibility');
|
|
7
5
|
const config = require('../../database/config');
|
|
8
6
|
|
|
9
|
-
/**
|
|
10
|
-
* Websocket Connection Repository Factory
|
|
11
|
-
* Creates the appropriate repository adapter based on database type
|
|
12
|
-
*
|
|
13
|
-
* @returns {WebsocketConnectionRepositoryInterface} Configured repository adapter
|
|
14
|
-
*/
|
|
15
7
|
function createWebsocketConnectionRepository() {
|
|
8
|
+
if (isDocumentDB()) {
|
|
9
|
+
return new WebsocketConnectionRepositoryDocumentDB();
|
|
10
|
+
}
|
|
11
|
+
|
|
16
12
|
const dbType = config.DB_TYPE;
|
|
17
13
|
|
|
18
14
|
switch (dbType) {
|
|
@@ -31,7 +27,7 @@ function createWebsocketConnectionRepository() {
|
|
|
31
27
|
|
|
32
28
|
module.exports = {
|
|
33
29
|
createWebsocketConnectionRepository,
|
|
34
|
-
// Export adapters for direct testing
|
|
35
30
|
WebsocketConnectionRepositoryMongo,
|
|
36
31
|
WebsocketConnectionRepositoryPostgres,
|
|
32
|
+
WebsocketConnectionRepositoryDocumentDB,
|
|
37
33
|
};
|