@ductape/sdk 0.0.4-v42 → 0.0.4-v43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/apps/services/app.service.d.ts +10 -0
- package/dist/apps/services/app.service.js +22 -0
- package/dist/apps/services/app.service.js.map +1 -1
- package/dist/database/adapters/base.adapter.d.ts +176 -0
- package/dist/database/adapters/base.adapter.js +31 -0
- package/dist/database/adapters/base.adapter.js.map +1 -0
- package/dist/database/adapters/dynamodb.adapter.d.ts +83 -0
- package/dist/database/adapters/dynamodb.adapter.js +1237 -0
- package/dist/database/adapters/dynamodb.adapter.js.map +1 -0
- package/dist/database/adapters/mongodb.adapter.d.ts +70 -0
- package/dist/database/adapters/mongodb.adapter.js +717 -0
- package/dist/database/adapters/mongodb.adapter.js.map +1 -0
- package/dist/database/adapters/mysql.adapter.d.ts +141 -0
- package/dist/database/adapters/mysql.adapter.js +1221 -0
- package/dist/database/adapters/mysql.adapter.js.map +1 -0
- package/dist/database/adapters/postgresql.adapter.d.ts +142 -0
- package/dist/database/adapters/postgresql.adapter.js +1288 -0
- package/dist/database/adapters/postgresql.adapter.js.map +1 -0
- package/dist/database/database.service.d.ts +190 -0
- package/dist/database/database.service.js +552 -0
- package/dist/database/database.service.js.map +1 -0
- package/dist/database/index.d.ts +18 -0
- package/dist/database/index.js +98 -0
- package/dist/database/index.js.map +1 -0
- package/dist/database/types/aggregation.types.d.ts +202 -0
- package/dist/database/types/aggregation.types.js +21 -0
- package/dist/database/types/aggregation.types.js.map +1 -0
- package/dist/database/types/connection.types.d.ts +132 -0
- package/dist/database/types/connection.types.js +6 -0
- package/dist/database/types/connection.types.js.map +1 -0
- package/dist/database/types/database.types.d.ts +173 -0
- package/dist/database/types/database.types.js +73 -0
- package/dist/database/types/database.types.js.map +1 -0
- package/dist/database/types/index.d.ts +12 -0
- package/dist/database/types/index.js +37 -0
- package/dist/database/types/index.js.map +1 -0
- package/dist/database/types/index.types.d.ts +220 -0
- package/dist/database/types/index.types.js +27 -0
- package/dist/database/types/index.types.js.map +1 -0
- package/dist/database/types/migration.types.d.ts +205 -0
- package/dist/database/types/migration.types.js +44 -0
- package/dist/database/types/migration.types.js.map +1 -0
- package/dist/database/types/query.types.d.ts +274 -0
- package/dist/database/types/query.types.js +57 -0
- package/dist/database/types/query.types.js.map +1 -0
- package/dist/database/types/result.types.d.ts +218 -0
- package/dist/database/types/result.types.js +6 -0
- package/dist/database/types/result.types.js.map +1 -0
- package/dist/database/types/schema.types.d.ts +190 -0
- package/dist/database/types/schema.types.js +69 -0
- package/dist/database/types/schema.types.js.map +1 -0
- package/dist/database/utils/helpers.d.ts +66 -0
- package/dist/database/utils/helpers.js +501 -0
- package/dist/database/utils/helpers.js.map +1 -0
- package/dist/database/utils/migration.utils.d.ts +151 -0
- package/dist/database/utils/migration.utils.js +476 -0
- package/dist/database/utils/migration.utils.js.map +1 -0
- package/dist/database/utils/transaction.d.ts +64 -0
- package/dist/database/utils/transaction.js +130 -0
- package/dist/database/utils/transaction.js.map +1 -0
- package/dist/database/validators/connection.validator.d.ts +20 -0
- package/dist/database/validators/connection.validator.js +267 -0
- package/dist/database/validators/connection.validator.js.map +1 -0
- package/dist/database/validators/query.validator.d.ts +31 -0
- package/dist/database/validators/query.validator.js +305 -0
- package/dist/database/validators/query.validator.js.map +1 -0
- package/dist/database/validators/schema.validator.d.ts +31 -0
- package/dist/database/validators/schema.validator.js +334 -0
- package/dist/database/validators/schema.validator.js.map +1 -0
- package/dist/index.d.ts +25 -4
- package/dist/index.js +36 -4
- package/dist/index.js.map +1 -1
- package/dist/processor/services/processor.service.js +10 -8
- package/dist/processor/services/processor.service.js.map +1 -1
- package/dist/types/processor.types.d.ts +2 -2
- package/package.json +3 -1
|
@@ -0,0 +1,717 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* MongoDB Database Adapter
|
|
4
|
+
* Implements database operations for MongoDB
|
|
5
|
+
*/
|
|
6
|
+
var __rest = (this && this.__rest) || function (s, e) {
|
|
7
|
+
var t = {};
|
|
8
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
9
|
+
t[p] = s[p];
|
|
10
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
11
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
12
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
13
|
+
t[p[i]] = s[p[i]];
|
|
14
|
+
}
|
|
15
|
+
return t;
|
|
16
|
+
};
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.MongoDBAdapter = void 0;
|
|
19
|
+
const base_adapter_1 = require("./base.adapter");
|
|
20
|
+
const database_types_1 = require("../types/database.types");
|
|
21
|
+
const query_types_1 = require("../types/query.types");
|
|
22
|
+
/**
|
|
23
|
+
* MongoDB Connection wrapper
|
|
24
|
+
*/
|
|
25
|
+
class MongoDBConnection {
|
|
26
|
+
constructor(id, client, db) {
|
|
27
|
+
this.id = id;
|
|
28
|
+
this.type = database_types_1.DatabaseType.MONGODB;
|
|
29
|
+
this.status = database_types_1.ConnectionStatus.CONNECTED;
|
|
30
|
+
this.client = client;
|
|
31
|
+
this.db = db;
|
|
32
|
+
}
|
|
33
|
+
async connect() {
|
|
34
|
+
var _a;
|
|
35
|
+
if (this.client && !((_a = this.client.topology) === null || _a === void 0 ? void 0 : _a.isConnected())) {
|
|
36
|
+
await this.client.connect();
|
|
37
|
+
this.status = database_types_1.ConnectionStatus.CONNECTED;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
async disconnect() {
|
|
41
|
+
if (this.client) {
|
|
42
|
+
await this.client.close();
|
|
43
|
+
this.status = database_types_1.ConnectionStatus.DISCONNECTED;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
isConnected() {
|
|
47
|
+
var _a, _b;
|
|
48
|
+
return this.status === database_types_1.ConnectionStatus.CONNECTED && ((_b = (_a = this.client) === null || _a === void 0 ? void 0 : _a.topology) === null || _b === void 0 ? void 0 : _b.isConnected());
|
|
49
|
+
}
|
|
50
|
+
getClient() {
|
|
51
|
+
return this.db;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* MongoDB Adapter
|
|
56
|
+
*/
|
|
57
|
+
class MongoDBAdapter extends base_adapter_1.BaseDatabaseAdapter {
|
|
58
|
+
constructor() {
|
|
59
|
+
super(...arguments);
|
|
60
|
+
this.type = database_types_1.DatabaseType.MONGODB;
|
|
61
|
+
this.connectionPool = new Map();
|
|
62
|
+
}
|
|
63
|
+
// ==================== Connection Methods ====================
|
|
64
|
+
async connect(config) {
|
|
65
|
+
try {
|
|
66
|
+
// TODO: Import mongodb library dynamically
|
|
67
|
+
// const { MongoClient } = await import('mongodb');
|
|
68
|
+
// TODO: Create connection URI
|
|
69
|
+
// const uri = config.connectionString ||
|
|
70
|
+
// `mongodb://${config.user}:${config.password}@${config.host}:${config.port || 27017}`;
|
|
71
|
+
// TODO: Create MongoDB client
|
|
72
|
+
// const client = new MongoClient(uri, {
|
|
73
|
+
// ...config.options,
|
|
74
|
+
// });
|
|
75
|
+
// TODO: Connect to MongoDB
|
|
76
|
+
// await client.connect();
|
|
77
|
+
// TODO: Get database
|
|
78
|
+
// const db = client.db(config.database);
|
|
79
|
+
// TODO: Create connection wrapper
|
|
80
|
+
// const connectionId = `mongodb-${config.database}-${Date.now()}`;
|
|
81
|
+
// const connection = new MongoDBConnection(connectionId, client, db);
|
|
82
|
+
// return connection;
|
|
83
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.CONNECTION_ERROR, 'MongoDB adapter not fully implemented yet');
|
|
84
|
+
}
|
|
85
|
+
catch (error) {
|
|
86
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.CONNECTION_ERROR, `Failed to connect to MongoDB: ${error.message}`, error);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async disconnect(connection) {
|
|
90
|
+
await connection.disconnect();
|
|
91
|
+
this.connectionPool.delete(connection.id);
|
|
92
|
+
}
|
|
93
|
+
async testConnection(connection) {
|
|
94
|
+
const startTime = Date.now();
|
|
95
|
+
try {
|
|
96
|
+
const db = connection.getClient();
|
|
97
|
+
// Test with a simple ping command
|
|
98
|
+
await db.admin().ping();
|
|
99
|
+
const responseTime = Date.now() - startTime;
|
|
100
|
+
return {
|
|
101
|
+
connected: true,
|
|
102
|
+
message: 'Successfully connected to MongoDB',
|
|
103
|
+
databaseType: 'mongodb',
|
|
104
|
+
responseTime,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
return {
|
|
109
|
+
connected: false,
|
|
110
|
+
message: 'Failed to connect to MongoDB',
|
|
111
|
+
databaseType: 'mongodb',
|
|
112
|
+
responseTime: Date.now() - startTime,
|
|
113
|
+
error: error.message,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
// ==================== Transaction Methods ====================
|
|
118
|
+
async beginTransaction(connection) {
|
|
119
|
+
const client = connection.getClient();
|
|
120
|
+
const session = client.startSession();
|
|
121
|
+
session.startTransaction();
|
|
122
|
+
return session;
|
|
123
|
+
}
|
|
124
|
+
async commitTransaction(connection, transaction) {
|
|
125
|
+
await transaction.commitTransaction();
|
|
126
|
+
await transaction.endSession();
|
|
127
|
+
}
|
|
128
|
+
async rollbackTransaction(connection, transaction) {
|
|
129
|
+
await transaction.abortTransaction();
|
|
130
|
+
await transaction.endSession();
|
|
131
|
+
}
|
|
132
|
+
async createSavepoint(connection, transaction, savepointName) {
|
|
133
|
+
// MongoDB doesn't support savepoints in the same way as SQL databases
|
|
134
|
+
// We can simulate this by tracking state, but for now, this is a placeholder
|
|
135
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, 'MongoDB does not support savepoints');
|
|
136
|
+
}
|
|
137
|
+
async rollbackToSavepoint(connection, transaction, savepointName) {
|
|
138
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, 'MongoDB does not support savepoints');
|
|
139
|
+
}
|
|
140
|
+
async releaseSavepoint(connection, transaction, savepointName) {
|
|
141
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, 'MongoDB does not support savepoints');
|
|
142
|
+
}
|
|
143
|
+
// ==================== Query Methods ====================
|
|
144
|
+
async query(connection, options) {
|
|
145
|
+
const startTime = Date.now();
|
|
146
|
+
try {
|
|
147
|
+
const db = connection.getClient();
|
|
148
|
+
const collection = db.collection(options.table);
|
|
149
|
+
// Build MongoDB filter from where clause
|
|
150
|
+
const filter = this.buildMongoFilter(options.where);
|
|
151
|
+
// Build MongoDB query
|
|
152
|
+
let query = collection.find(filter);
|
|
153
|
+
// Apply projection (select)
|
|
154
|
+
if (options.select && options.select.length > 0) {
|
|
155
|
+
const projection = {};
|
|
156
|
+
options.select.forEach((field) => {
|
|
157
|
+
projection[field] = 1;
|
|
158
|
+
});
|
|
159
|
+
query = query.project(projection);
|
|
160
|
+
}
|
|
161
|
+
// Apply sorting
|
|
162
|
+
if (options.orderBy) {
|
|
163
|
+
const sort = {};
|
|
164
|
+
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
165
|
+
orderByArray.forEach((order) => {
|
|
166
|
+
sort[order.column] = order.order === query_types_1.SortOrder.DESC ? -1 : 1;
|
|
167
|
+
});
|
|
168
|
+
query = query.sort(sort);
|
|
169
|
+
}
|
|
170
|
+
// Apply limit and offset
|
|
171
|
+
if (options.limit) {
|
|
172
|
+
query = query.limit(options.limit);
|
|
173
|
+
}
|
|
174
|
+
if (options.offset) {
|
|
175
|
+
query = query.skip(options.offset);
|
|
176
|
+
}
|
|
177
|
+
const data = await query.toArray();
|
|
178
|
+
const executionTime = Date.now() - startTime;
|
|
179
|
+
return {
|
|
180
|
+
data,
|
|
181
|
+
count: data.length,
|
|
182
|
+
executionTime,
|
|
183
|
+
databaseType: 'mongodb',
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
catch (error) {
|
|
187
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB query failed: ${error.message}`, error);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
async insert(connection, options) {
|
|
191
|
+
const startTime = Date.now();
|
|
192
|
+
try {
|
|
193
|
+
const db = connection.getClient();
|
|
194
|
+
const collection = db.collection(options.table);
|
|
195
|
+
const dataArray = Array.isArray(options.data) ? options.data : [options.data];
|
|
196
|
+
const result = await collection.insertMany(dataArray);
|
|
197
|
+
const executionTime = Date.now() - startTime;
|
|
198
|
+
return {
|
|
199
|
+
insertedCount: result.insertedCount,
|
|
200
|
+
insertedIds: Object.values(result.insertedIds).map((id) => id.toString()),
|
|
201
|
+
data: options.returning ? dataArray : undefined,
|
|
202
|
+
executionTime,
|
|
203
|
+
success: true,
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
catch (error) {
|
|
207
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB insert failed: ${error.message}`, error);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
async update(connection, options) {
|
|
211
|
+
const startTime = Date.now();
|
|
212
|
+
try {
|
|
213
|
+
const db = connection.getClient();
|
|
214
|
+
const collection = db.collection(options.table);
|
|
215
|
+
const filter = this.buildMongoFilter(options.where);
|
|
216
|
+
const update = { $set: options.data };
|
|
217
|
+
const result = await collection.updateMany(filter, update);
|
|
218
|
+
const executionTime = Date.now() - startTime;
|
|
219
|
+
return {
|
|
220
|
+
updatedCount: result.modifiedCount,
|
|
221
|
+
matchedCount: result.matchedCount,
|
|
222
|
+
executionTime,
|
|
223
|
+
success: true,
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
catch (error) {
|
|
227
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB update failed: ${error.message}`, error);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
async delete(connection, options) {
|
|
231
|
+
const startTime = Date.now();
|
|
232
|
+
try {
|
|
233
|
+
const db = connection.getClient();
|
|
234
|
+
const collection = db.collection(options.table);
|
|
235
|
+
const filter = this.buildMongoFilter(options.where);
|
|
236
|
+
const result = await collection.deleteMany(filter);
|
|
237
|
+
const executionTime = Date.now() - startTime;
|
|
238
|
+
return {
|
|
239
|
+
deletedCount: result.deletedCount,
|
|
240
|
+
executionTime,
|
|
241
|
+
success: true,
|
|
242
|
+
};
|
|
243
|
+
}
|
|
244
|
+
catch (error) {
|
|
245
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB delete failed: ${error.message}`, error);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
async upsert(connection, options) {
|
|
249
|
+
const startTime = Date.now();
|
|
250
|
+
try {
|
|
251
|
+
const db = connection.getClient();
|
|
252
|
+
const collection = db.collection(options.table);
|
|
253
|
+
// Upsert only works with single records
|
|
254
|
+
const data = Array.isArray(options.data) ? options.data[0] : options.data;
|
|
255
|
+
// Build conflict filter
|
|
256
|
+
const filter = {};
|
|
257
|
+
options.uniqueColumns.forEach((col) => {
|
|
258
|
+
if (data[col] !== undefined) {
|
|
259
|
+
filter[col] = data[col];
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
const update = { $set: data };
|
|
263
|
+
const result = await collection.updateOne(filter, update, { upsert: true });
|
|
264
|
+
const executionTime = Date.now() - startTime;
|
|
265
|
+
return {
|
|
266
|
+
insertedCount: result.upsertedCount || 0,
|
|
267
|
+
updatedCount: result.modifiedCount,
|
|
268
|
+
affectedCount: result.upsertedCount + result.modifiedCount,
|
|
269
|
+
executionTime,
|
|
270
|
+
success: true,
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
catch (error) {
|
|
274
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB upsert failed: ${error.message}`, error);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
async executeRaw(connection, options) {
|
|
278
|
+
const startTime = Date.now();
|
|
279
|
+
try {
|
|
280
|
+
const db = connection.getClient();
|
|
281
|
+
// For MongoDB, raw queries might be aggregation pipelines or commands
|
|
282
|
+
// This is a simplified implementation
|
|
283
|
+
const result = await db.command(options.query);
|
|
284
|
+
const executionTime = Date.now() - startTime;
|
|
285
|
+
return {
|
|
286
|
+
rows: Array.isArray(result) ? result : [result],
|
|
287
|
+
rowCount: Array.isArray(result) ? result.length : 1,
|
|
288
|
+
executionTime,
|
|
289
|
+
};
|
|
290
|
+
}
|
|
291
|
+
catch (error) {
|
|
292
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB raw query failed: ${error.message}`, error);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
// ==================== Aggregation Methods ====================
|
|
296
|
+
async count(connection, options) {
|
|
297
|
+
try {
|
|
298
|
+
const db = connection.getClient();
|
|
299
|
+
const collection = db.collection(options.table);
|
|
300
|
+
const filter = this.buildMongoFilter(options.where);
|
|
301
|
+
return await collection.countDocuments(filter);
|
|
302
|
+
}
|
|
303
|
+
catch (error) {
|
|
304
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB count failed: ${error.message}`, error);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
async sum(connection, options) {
|
|
308
|
+
var _a;
|
|
309
|
+
try {
|
|
310
|
+
const db = connection.getClient();
|
|
311
|
+
const collection = db.collection(options.table);
|
|
312
|
+
const matchStage = this.buildMongoFilter(options.where);
|
|
313
|
+
const pipeline = [
|
|
314
|
+
{ $match: matchStage },
|
|
315
|
+
{
|
|
316
|
+
$group: {
|
|
317
|
+
_id: null,
|
|
318
|
+
total: { $sum: `$${options.column}` },
|
|
319
|
+
},
|
|
320
|
+
},
|
|
321
|
+
];
|
|
322
|
+
const result = await collection.aggregate(pipeline).toArray();
|
|
323
|
+
return ((_a = result[0]) === null || _a === void 0 ? void 0 : _a.total) || 0;
|
|
324
|
+
}
|
|
325
|
+
catch (error) {
|
|
326
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB sum failed: ${error.message}`, error);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
async avg(connection, options) {
|
|
330
|
+
var _a;
|
|
331
|
+
try {
|
|
332
|
+
const db = connection.getClient();
|
|
333
|
+
const collection = db.collection(options.table);
|
|
334
|
+
const matchStage = this.buildMongoFilter(options.where);
|
|
335
|
+
const pipeline = [
|
|
336
|
+
{ $match: matchStage },
|
|
337
|
+
{
|
|
338
|
+
$group: {
|
|
339
|
+
_id: null,
|
|
340
|
+
average: { $avg: `$${options.column}` },
|
|
341
|
+
},
|
|
342
|
+
},
|
|
343
|
+
];
|
|
344
|
+
const result = await collection.aggregate(pipeline).toArray();
|
|
345
|
+
return ((_a = result[0]) === null || _a === void 0 ? void 0 : _a.average) || 0;
|
|
346
|
+
}
|
|
347
|
+
catch (error) {
|
|
348
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB avg failed: ${error.message}`, error);
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
async min(connection, options) {
|
|
352
|
+
var _a;
|
|
353
|
+
try {
|
|
354
|
+
const db = connection.getClient();
|
|
355
|
+
const collection = db.collection(options.table);
|
|
356
|
+
const matchStage = this.buildMongoFilter(options.where);
|
|
357
|
+
const pipeline = [
|
|
358
|
+
{ $match: matchStage },
|
|
359
|
+
{
|
|
360
|
+
$group: {
|
|
361
|
+
_id: null,
|
|
362
|
+
minimum: { $min: `$${options.column}` },
|
|
363
|
+
},
|
|
364
|
+
},
|
|
365
|
+
];
|
|
366
|
+
const result = await collection.aggregate(pipeline).toArray();
|
|
367
|
+
return ((_a = result[0]) === null || _a === void 0 ? void 0 : _a.minimum) || 0;
|
|
368
|
+
}
|
|
369
|
+
catch (error) {
|
|
370
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB min failed: ${error.message}`, error);
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
async max(connection, options) {
|
|
374
|
+
var _a;
|
|
375
|
+
try {
|
|
376
|
+
const db = connection.getClient();
|
|
377
|
+
const collection = db.collection(options.table);
|
|
378
|
+
const matchStage = this.buildMongoFilter(options.where);
|
|
379
|
+
const pipeline = [
|
|
380
|
+
{ $match: matchStage },
|
|
381
|
+
{
|
|
382
|
+
$group: {
|
|
383
|
+
_id: null,
|
|
384
|
+
maximum: { $max: `$${options.column}` },
|
|
385
|
+
},
|
|
386
|
+
},
|
|
387
|
+
];
|
|
388
|
+
const result = await collection.aggregate(pipeline).toArray();
|
|
389
|
+
return ((_a = result[0]) === null || _a === void 0 ? void 0 : _a.maximum) || 0;
|
|
390
|
+
}
|
|
391
|
+
catch (error) {
|
|
392
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB max failed: ${error.message}`, error);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
async groupBy(connection, options) {
|
|
396
|
+
try {
|
|
397
|
+
const db = connection.getClient();
|
|
398
|
+
const collection = db.collection(options.table);
|
|
399
|
+
const matchStage = this.buildMongoFilter(options.where);
|
|
400
|
+
// Build group ID from columns
|
|
401
|
+
const groupId = {};
|
|
402
|
+
options.groupBy.forEach((col) => {
|
|
403
|
+
groupId[col] = `$${col}`;
|
|
404
|
+
});
|
|
405
|
+
// Build aggregations
|
|
406
|
+
const aggregations = {};
|
|
407
|
+
if (options.aggregate) {
|
|
408
|
+
Object.entries(options.aggregate).forEach(([key, agg]) => {
|
|
409
|
+
aggregations[key] = { [`$${agg.function}`]: `$${agg.column}` };
|
|
410
|
+
});
|
|
411
|
+
}
|
|
412
|
+
const pipeline = [
|
|
413
|
+
{ $match: matchStage },
|
|
414
|
+
{
|
|
415
|
+
$group: Object.assign({ _id: groupId }, aggregations),
|
|
416
|
+
},
|
|
417
|
+
];
|
|
418
|
+
const result = await collection.aggregate(pipeline).toArray();
|
|
419
|
+
// Transform result to expected format
|
|
420
|
+
return result.map((doc) => (Object.assign(Object.assign(Object.assign({}, doc._id), doc), { _id: undefined })));
|
|
421
|
+
}
|
|
422
|
+
catch (error) {
|
|
423
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB groupBy failed: ${error.message}`, error);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
async aggregate(connection, options) {
|
|
427
|
+
try {
|
|
428
|
+
const db = connection.getClient();
|
|
429
|
+
const collection = db.collection(options.table);
|
|
430
|
+
const matchStage = this.buildMongoFilter(options.where);
|
|
431
|
+
// Build aggregations
|
|
432
|
+
const aggregations = {};
|
|
433
|
+
Object.entries(options.operations).forEach(([key, agg]) => {
|
|
434
|
+
aggregations[key] = { [`$${agg.function}`]: `$${agg.column}` };
|
|
435
|
+
});
|
|
436
|
+
const pipeline = [
|
|
437
|
+
{ $match: matchStage },
|
|
438
|
+
{
|
|
439
|
+
$group: Object.assign({ _id: null }, aggregations),
|
|
440
|
+
},
|
|
441
|
+
];
|
|
442
|
+
const result = await collection.aggregate(pipeline).toArray();
|
|
443
|
+
const _a = result[0] || {}, { _id } = _a, aggregateResult = __rest(_a, ["_id"]);
|
|
444
|
+
return aggregateResult;
|
|
445
|
+
}
|
|
446
|
+
catch (error) {
|
|
447
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `MongoDB aggregate failed: ${error.message}`, error);
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
// ==================== Schema Methods ====================
|
|
451
|
+
async createTable(connection, schema, options) {
|
|
452
|
+
const startTime = Date.now();
|
|
453
|
+
try {
|
|
454
|
+
const db = connection.getClient();
|
|
455
|
+
// MongoDB creates collections automatically, but we can create it explicitly
|
|
456
|
+
await db.createCollection(schema.name);
|
|
457
|
+
// Create indexes if specified
|
|
458
|
+
if (schema.indexes && schema.indexes.length > 0) {
|
|
459
|
+
const collection = db.collection(schema.name);
|
|
460
|
+
for (const index of schema.indexes) {
|
|
461
|
+
const indexSpec = {};
|
|
462
|
+
index.columns.forEach((col) => {
|
|
463
|
+
indexSpec[col.name] = col.order === 'DESC' ? -1 : 1;
|
|
464
|
+
});
|
|
465
|
+
await collection.createIndex(indexSpec, { unique: index.unique });
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
const executionTime = Date.now() - startTime;
|
|
469
|
+
return {
|
|
470
|
+
success: true,
|
|
471
|
+
operation: 'create',
|
|
472
|
+
table: schema.name,
|
|
473
|
+
executionTime,
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
catch (error) {
|
|
477
|
+
return {
|
|
478
|
+
success: false,
|
|
479
|
+
operation: 'create',
|
|
480
|
+
table: schema.name,
|
|
481
|
+
error: error.message,
|
|
482
|
+
executionTime: Date.now() - startTime,
|
|
483
|
+
};
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
async dropTable(connection, tableName) {
|
|
487
|
+
const startTime = Date.now();
|
|
488
|
+
try {
|
|
489
|
+
const db = connection.getClient();
|
|
490
|
+
await db.dropCollection(tableName);
|
|
491
|
+
return {
|
|
492
|
+
success: true,
|
|
493
|
+
operation: 'drop',
|
|
494
|
+
table: tableName,
|
|
495
|
+
executionTime: Date.now() - startTime,
|
|
496
|
+
};
|
|
497
|
+
}
|
|
498
|
+
catch (error) {
|
|
499
|
+
return {
|
|
500
|
+
success: false,
|
|
501
|
+
operation: 'drop',
|
|
502
|
+
table: tableName,
|
|
503
|
+
error: error.message,
|
|
504
|
+
executionTime: Date.now() - startTime,
|
|
505
|
+
};
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
async alterTable(connection, tableName, alterations, options) {
|
|
509
|
+
// MongoDB doesn't have a schema alteration concept like SQL databases
|
|
510
|
+
// Schema changes happen implicitly when documents are inserted
|
|
511
|
+
return {
|
|
512
|
+
success: true,
|
|
513
|
+
operation: 'alter',
|
|
514
|
+
table: tableName,
|
|
515
|
+
executionTime: 0,
|
|
516
|
+
};
|
|
517
|
+
}
|
|
518
|
+
async getTableSchema(connection, tableName) {
|
|
519
|
+
try {
|
|
520
|
+
const db = connection.getClient();
|
|
521
|
+
const collection = db.collection(tableName);
|
|
522
|
+
// Get a sample document to infer schema
|
|
523
|
+
const sampleDoc = await collection.findOne({});
|
|
524
|
+
// TODO: Infer column definitions from sample document
|
|
525
|
+
// This is a simplified implementation
|
|
526
|
+
const columns = sampleDoc
|
|
527
|
+
? Object.keys(sampleDoc).map((key) => ({
|
|
528
|
+
name: key,
|
|
529
|
+
type: typeof sampleDoc[key],
|
|
530
|
+
}))
|
|
531
|
+
: [];
|
|
532
|
+
return {
|
|
533
|
+
name: tableName,
|
|
534
|
+
columns,
|
|
535
|
+
};
|
|
536
|
+
}
|
|
537
|
+
catch (error) {
|
|
538
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.SCHEMA_ERROR, `Failed to get MongoDB collection schema: ${error.message}`, error);
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
async listTables(connection) {
|
|
542
|
+
try {
|
|
543
|
+
const db = connection.getClient();
|
|
544
|
+
const collections = await db.listCollections().toArray();
|
|
545
|
+
return collections.map((col) => col.name);
|
|
546
|
+
}
|
|
547
|
+
catch (error) {
|
|
548
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.QUERY_ERROR, `Failed to list MongoDB collections: ${error.message}`, error);
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
async tableExists(connection, tableName) {
|
|
552
|
+
try {
|
|
553
|
+
const tables = await this.listTables(connection);
|
|
554
|
+
return tables.includes(tableName);
|
|
555
|
+
}
|
|
556
|
+
catch (error) {
|
|
557
|
+
return false;
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
// ==================== Index Methods ====================
|
|
561
|
+
async createIndex(connection, options) {
|
|
562
|
+
const startTime = Date.now();
|
|
563
|
+
try {
|
|
564
|
+
const db = connection.getClient();
|
|
565
|
+
const collection = db.collection(options.table);
|
|
566
|
+
const indexSpec = {};
|
|
567
|
+
options.index.columns.forEach((col) => {
|
|
568
|
+
indexSpec[col.name] = col.order === 'DESC' ? -1 : 1;
|
|
569
|
+
});
|
|
570
|
+
await collection.createIndex(indexSpec, {
|
|
571
|
+
name: options.index.name,
|
|
572
|
+
unique: options.index.unique,
|
|
573
|
+
});
|
|
574
|
+
return {
|
|
575
|
+
success: true,
|
|
576
|
+
operation: 'create',
|
|
577
|
+
indexName: options.index.name,
|
|
578
|
+
table: options.table,
|
|
579
|
+
executionTime: Date.now() - startTime,
|
|
580
|
+
};
|
|
581
|
+
}
|
|
582
|
+
catch (error) {
|
|
583
|
+
return {
|
|
584
|
+
success: false,
|
|
585
|
+
operation: 'create',
|
|
586
|
+
indexName: options.index.name,
|
|
587
|
+
table: options.table,
|
|
588
|
+
error: error.message,
|
|
589
|
+
executionTime: Date.now() - startTime,
|
|
590
|
+
};
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
async dropIndex(connection, options) {
|
|
594
|
+
const startTime = Date.now();
|
|
595
|
+
try {
|
|
596
|
+
const db = connection.getClient();
|
|
597
|
+
const collection = db.collection(options.table);
|
|
598
|
+
await collection.dropIndex(options.indexName);
|
|
599
|
+
return {
|
|
600
|
+
success: true,
|
|
601
|
+
operation: 'drop',
|
|
602
|
+
indexName: options.indexName,
|
|
603
|
+
table: options.table,
|
|
604
|
+
executionTime: Date.now() - startTime,
|
|
605
|
+
};
|
|
606
|
+
}
|
|
607
|
+
catch (error) {
|
|
608
|
+
return {
|
|
609
|
+
success: false,
|
|
610
|
+
operation: 'drop',
|
|
611
|
+
indexName: options.indexName,
|
|
612
|
+
table: options.table,
|
|
613
|
+
error: error.message,
|
|
614
|
+
executionTime: Date.now() - startTime,
|
|
615
|
+
};
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
async listIndexes(connection, options) {
|
|
619
|
+
try {
|
|
620
|
+
const db = connection.getClient();
|
|
621
|
+
const collection = db.collection(options.table);
|
|
622
|
+
const indexes = await collection.listIndexes().toArray();
|
|
623
|
+
return indexes.map((index) => ({
|
|
624
|
+
name: index.name,
|
|
625
|
+
table: options.table,
|
|
626
|
+
columns: Object.keys(index.key),
|
|
627
|
+
unique: index.unique || false,
|
|
628
|
+
type: index.type || 'BTREE',
|
|
629
|
+
}));
|
|
630
|
+
}
|
|
631
|
+
catch (error) {
|
|
632
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.INDEX_ERROR, `Failed to list MongoDB indexes: ${error.message}`, error);
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
async getIndexStatistics(connection, tableName, indexName) {
|
|
636
|
+
try {
|
|
637
|
+
const db = connection.getClient();
|
|
638
|
+
const collection = db.collection(tableName);
|
|
639
|
+
const stats = await collection.stats();
|
|
640
|
+
// TODO: Parse and return index statistics
|
|
641
|
+
// This is a simplified implementation
|
|
642
|
+
return [];
|
|
643
|
+
}
|
|
644
|
+
catch (error) {
|
|
645
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.INDEX_ERROR, `Failed to get MongoDB index statistics: ${error.message}`, error);
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
// ==================== Migration Methods ====================
|
|
649
|
+
async runMigration(connection, migration, options) {
|
|
650
|
+
// TODO: Implement migration logic
|
|
651
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.MIGRATION_ERROR, 'MongoDB migration not implemented yet');
|
|
652
|
+
}
|
|
653
|
+
async rollbackMigration(connection, migration, options) {
|
|
654
|
+
// TODO: Implement rollback logic
|
|
655
|
+
throw new database_types_1.DatabaseError(database_types_1.DatabaseErrorType.MIGRATION_ERROR, 'MongoDB migration rollback not implemented yet');
|
|
656
|
+
}
|
|
657
|
+
async getMigrationHistory(connection, options) {
|
|
658
|
+
// TODO: Implement migration history
|
|
659
|
+
return [];
|
|
660
|
+
}
|
|
661
|
+
// ==================== Helper Methods ====================
|
|
662
|
+
/**
|
|
663
|
+
* Build MongoDB filter from where clause
|
|
664
|
+
*/
|
|
665
|
+
buildMongoFilter(where) {
|
|
666
|
+
if (!where) {
|
|
667
|
+
return {};
|
|
668
|
+
}
|
|
669
|
+
// Simple implementation - can be extended for complex queries
|
|
670
|
+
// TODO: Handle operators like $and, $or, $gt, $lt, etc.
|
|
671
|
+
return where;
|
|
672
|
+
}
|
|
673
|
+
// ==================== Utility Methods ====================
|
|
674
|
+
/**
|
|
675
|
+
* Escape identifier (collection/field name)
|
|
676
|
+
*/
|
|
677
|
+
escapeIdentifier(identifier) {
|
|
678
|
+
// MongoDB doesn't require escaping in the same way as SQL
|
|
679
|
+
// But we can sanitize the identifier
|
|
680
|
+
return identifier.replace(/[.$]/g, '_');
|
|
681
|
+
}
|
|
682
|
+
/**
|
|
683
|
+
* Escape value
|
|
684
|
+
*/
|
|
685
|
+
escapeValue(value) {
|
|
686
|
+
// MongoDB uses BSON, not SQL, so escaping is different
|
|
687
|
+
// This is mainly for display/logging purposes
|
|
688
|
+
if (value === null || value === undefined) {
|
|
689
|
+
return 'null';
|
|
690
|
+
}
|
|
691
|
+
if (typeof value === 'string') {
|
|
692
|
+
return `"${value.replace(/"/g, '\\"')}"`;
|
|
693
|
+
}
|
|
694
|
+
if (value instanceof Date) {
|
|
695
|
+
return value.toISOString();
|
|
696
|
+
}
|
|
697
|
+
if (typeof value === 'object') {
|
|
698
|
+
return JSON.stringify(value);
|
|
699
|
+
}
|
|
700
|
+
return String(value);
|
|
701
|
+
}
|
|
702
|
+
/**
|
|
703
|
+
* Get database version
|
|
704
|
+
*/
|
|
705
|
+
async getDatabaseVersion(connection) {
|
|
706
|
+
try {
|
|
707
|
+
const db = connection.getClient();
|
|
708
|
+
const info = await db.admin().serverInfo();
|
|
709
|
+
return info.version || 'Unknown';
|
|
710
|
+
}
|
|
711
|
+
catch (error) {
|
|
712
|
+
return 'Unknown';
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
exports.MongoDBAdapter = MongoDBAdapter;
|
|
717
|
+
//# sourceMappingURL=mongodb.adapter.js.map
|