mongoplusplus 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/mongoplus.js CHANGED
@@ -1,320 +1,486 @@
1
- const mongoose = require('mongoose');
2
- class Mongoplus {
3
- constructor(mongoURI) {
4
-
5
- this.mongoURI = mongoURI;
6
- this.allConnections = [];
7
- this.currentIndex = 0;
8
- if (this.mongoURI.filter((uri) => uri.startsWith("readonly")).length == this.mongoURI.length) {
9
- throw new Error('Some of your URIs must be writable. If it is a mistake remove the `readonly:` flag from your urls')
10
- }
11
-
12
- }
13
- static readonlydbs = []
14
- static readonlymodels = [] // Define currentIndex to keep track of the current URI
15
- Schema(schema) {
16
- return mongoose.Schema(schema)
17
- }
18
- addIndex(schema, indextype) {
19
- return schema.index(indextype)
20
- }
21
- getNextMongoURI() {
22
- const uri = this.mongoURI[this.currentIndex];
23
- this.currentIndex = (this.currentIndex + 1) % this.mongoURI.length;
24
- return uri;
25
- }
26
-
27
- connectToAll() {
28
- for (let i = 0; i < this.mongoURI.length; i++) {
29
-
30
- const uri = this.mongoURI[i].replaceAll("readonly:", '');
31
- const con = mongoose.createConnection(uri, {
32
- useNewUrlParser: true,
33
- useUnifiedTopology: true,
34
- });
35
-
36
- this.allConnections.push(con);
37
- if (this.mongoURI[i].startsWith('readonly:')) {
38
-
39
- Mongoplus.readonlydbs.push(con)
40
- }
41
- }
42
-
43
- return this.allConnections;
44
- }
45
-
46
- buildModel(name, schema) {
47
- if (!Object.keys(schema.obj).includes("dbIndex")) {
48
- throw new Error(`[!]Error : < dbIndex > must be present in your schema like dbIndex:{
49
- type: Number,
50
- required: true
51
- } `)
52
- }
53
- if (this.allConnections.length <= 0) {
54
- throw new Error(`[!]Error : All connections should be made first use the code
55
- (async () => {await mongodb.connectToAll();})(); to init connections here mongodb is the class init variable`)
56
- }
57
- const allConnections = this.allConnections;
58
- const model = [];
59
- //console.groupCollapsed("====>",Mongoplus.readonlydbs);
60
- for (let i = 0; i < allConnections.length; i++) {
61
- const mongooseConnection = allConnections[i];
62
- var currentm = mongooseConnection.model(name, schema)
63
- model.push(currentm);
64
- //console.count(Mongoplus.readonlydbs[i]);
65
- if (Mongoplus.readonlydbs.includes(allConnections[i])) {
66
-
67
- Mongoplus.readonlymodels.push(currentm)
68
- }
69
- }
70
- console.log("REadonly ", Mongoplus.readonlymodels)
71
- return new MongoModel(model, schema, Mongoplus.readonlymodels);
72
- }
73
- }
74
-
75
- class MongoModel {
76
- constructor(model, s, readonlydbs) {
77
- if (!Array.isArray(model)) {
78
- throw new Error('Model should be an array');
79
- }
80
- this.model = model;
81
- this.readonlydbs = readonlydbs
82
- this.s = s
83
-
84
-
85
- }
86
- static currentIndex = 0
87
- //===================
88
-
89
- async findInAllDatabase(filter, chain = {}) {
90
- const dynamicComputationPromises = [];
91
- this.model.forEach((modelRef) => {
92
- dynamicComputationPromises.push({ fn: modelRef.find.bind(modelRef), params: [filter], chain: chain });
93
- });
94
- return await this.runLargeComputations(dynamicComputationPromises);
95
- }
96
- async aggregateInAllDatabase(filter, chain = {}) {
97
- const dynamicComputationPromises = [];
98
- this.model.forEach((modelRef) => {
99
- dynamicComputationPromises.push({ fn: modelRef.aggregate.bind(modelRef), params: [filter], chain: chain });
100
- });
101
- return await this.runLargeComputations(dynamicComputationPromises);
102
- }
103
- //==================
104
- async writeInAllDatabase(data) {
105
- data["dbIndex"] = -1
106
- const dynamicComputationPromises = [];
107
- modellist = this.model
108
- //this.readonlydbs.forEach((i)=>{modellist.splice(i,1,null)})
109
-
110
- for (let i = 0; i < this.model.length; i++) {
111
- if (Mongoplus.readonlymodels.includes(this.model[i])) continue;
112
- var x = new this.model[i](data)
113
-
114
- dynamicComputationPromises.push(await x.save());
115
-
116
-
117
- }
118
-
119
- return [].concat(dynamicComputationPromises);
120
-
121
- }
122
- //==================
123
- async UpdateOneInAllDatabase(filter, update) {
124
-
125
- const dynamicComputationPromises = [];
126
- this.model.forEach((modelRef) => {
127
-
128
- dynamicComputationPromises.push({ fn: modelRef.findOneAndUpdate.bind(modelRef), params: [filter, update, { new: true }], chain: {} });
129
- });
130
- return await this.runLargeComputations(dynamicComputationPromises);
131
-
132
- }
133
- //==================
134
- async UpdateByIdInAllDatabase(id, update) {
135
-
136
- const dynamicComputationPromises = [];
137
- this.model.forEach((modelRef) => {
138
-
139
- dynamicComputationPromises.push({ fn: modelRef.findByIdAndUpdate.bind(modelRef), params: [id, update, { new: true }], chain: {} });
140
- });
141
- return await this.runLargeComputations(dynamicComputationPromises);
142
-
143
- }
144
- async findByIdInAllDatabaseAndDelete(id) {
145
-
146
- const dynamicComputationPromises = [];
147
- this.model.forEach((modelRef) => {
148
-
149
- dynamicComputationPromises.push({ fn: modelRef.findByIdAndDelete.bind(modelRef), params: [id], chain: {} });
150
- });
151
- return await this.runLargeComputations(dynamicComputationPromises);
152
-
153
- }
154
- async findOneInAllDatabaseAndDelete(filter) {
155
-
156
- const dynamicComputationPromises = [];
157
- this.model.forEach((modelRef) => {
158
-
159
- dynamicComputationPromises.push({ fn: modelRef.findOneAndDelete.bind(modelRef), params: [filter], chain: {} });
160
- });
161
- return await this.runLargeComputations(dynamicComputationPromises);
162
-
163
- }
164
- //=======================
165
- async write(data) {
166
-
167
-
168
- const currentModel = this.model[MongoModel.currentIndex];
169
- data["dbIndex"] = MongoModel.currentIndex;
170
- MongoModel.currentIndex = (MongoModel.currentIndex + 1) % this.model.length;
171
- if (Mongoplus.readonlymodels.includes(currentModel)) {
172
- this.write(data)
173
- //("This model is readonly");
174
-
175
- }
176
-
177
-
178
- try {
179
-
180
- let dataToWrite = new currentModel(data)
181
- return await dataToWrite.save()
182
- } catch (error) {
183
- throw error
184
- }
185
-
186
- }
187
- //==================
188
-
189
- async findOne(dbIndex, filter, chain = {}) {
190
- var currentModel = this.model[dbIndex]
191
-
192
- if (chain.skip && chain.limit && chain.sort) {
193
- currentModel.findOne(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
194
- } else if (chain.skip && chain.limit) {
195
- return currentModel.findOne(filter).skip(chain.skip).limit(chain.limit)
196
- }
197
- else if (chain.skip) {
198
- return currentModel.findOne(filter).skip(chain.skip)
199
- }
200
-
201
- else if (chain.limit) {
202
- return currentModel.findOne(filter).limit(chain.limit)
203
- } else {
204
- return currentModel.findOne(filter);
205
- }
206
-
207
-
208
- }
209
-
210
- //===============
211
-
212
- async find(dbIndex, filter, chain = {}) {
213
- var currentModel = this.model[dbIndex]
214
- // Start with the base query
215
- let query = currentModel.find(filter);
216
-
217
- // Dynamically apply chain options if they exist
218
- for (const [key, value] of Object.entries(chain)) {
219
- if (query[key]) {
220
- query = query[key](value);
221
- }
222
- }
223
-
224
- return query;
225
-
226
-
227
-
228
- }
229
- //=======================
230
- async findById(dbIndex, filter, chain = {}) {
231
- const currentModel = this.model[dbIndex];
232
-
233
- // Start with the base query
234
- let query = currentModel.findById(filter);
235
-
236
- // Dynamically apply chain options if they exist
237
- for (const [key, value] of Object.entries(chain)) {
238
- if (query[key]) {
239
- query = query[key](value);
240
- }
241
- }
242
-
243
- return query;
244
- }
245
-
246
-
247
- //====================
248
- async findByIdAndUpdate(dbIndex, id, update) {
249
- var currentModel = this.model[dbIndex]
250
- return currentModel.findByIdAndUpdate(id, update, { new: true });
251
- }
252
- //===============
253
- async findByIdAndDelete(dbIndex, id, update) {
254
- var currentModel = this.model[dbIndex]
255
- return currentModel.findByIdAndRemove(id, update, { new: true });
256
- }
257
- //===========
258
- async findOneAndUpdate(dbIndex, filter, update) {
259
- var currentModel = this.model[dbIndex]
260
- return currentModel.findOneAndUpdate(filter, update, { new: true });
261
- }
262
- //=============
263
- async aggregate(dbIndex, filter, update) {
264
- var currentModel = this.model[dbIndex]
265
- return currentModel.aggregate(filter);
266
- }
267
- //===========
268
- async watch(dbIndex) {
269
- return this.model[dbIndex].watch()
270
- }
271
- //================
272
-
273
-
274
-
275
-
276
-
277
- getNextModel() {
278
- const currentModel = this.model[this.currentIndex];
279
- var writen = this.currentIndex
280
- this.currentIndex = (this.currentIndex + 1) % this.model.length;
281
- return [currentModel, writen];
282
- }
283
- async runLargeComputations(computationPairs) {
284
- try {
285
- const startTime = performance.now();
286
-
287
- // Execute all computation functions concurrently using Promise.all
288
- const results = await Promise.all(
289
- computationPairs.map(async pair => {
290
- var chain = pair.chain;
291
- var query = pair.fn(...pair.params);
292
- // Start with the base query
293
-
294
- // Dynamically apply chain options if they exist
295
- for (const [key, value] of Object.entries(chain)) {
296
- if (query[key]) {
297
- query = query[key](value);
298
- }
299
- }
300
-
301
- return query;
302
-
303
- })
304
- );
305
-
306
- const endTime = performance.now();
307
- const totalTime = endTime - startTime;
308
-
309
- // Process the results as needed
310
- // const sum = results.reduce((acc, result) => acc + result, 0);
311
-
312
- return { results: [].concat(...results), totalTime };
313
- } catch (error) {
314
- console.error('Error:', error);
315
- throw error; // Rethrow the error if needed
316
- }
317
- }
318
- }
319
-
320
- module.exports = Mongoplus;
1
+ const mongoose = require('mongoose');
2
+ const { performance } = require('perf_hooks');
3
+ class Mongoplus {
4
+ constructor(mongoURI) {
5
+
6
+ this.mongoURI = mongoURI;
7
+ this.allConnections = [];
8
+ this.currentIndex = 0;
9
+ if (this.mongoURI.filter((uri) => uri.startsWith("readonly")).length == this.mongoURI.length) {
10
+ throw new Error('Some of your URIs must be writable. If it is a mistake remove the `readonly:` flag from your urls')
11
+ }
12
+
13
+ }
14
+ static readonlydbs = []
15
+ static readonlymodels = [] // Define currentIndex to keep track of the current URI
16
+ Schema(schema) {
17
+ return mongoose.Schema(schema)
18
+ }
19
+ addIndex(schema, indextype) {
20
+ return schema.index(indextype)
21
+ }
22
+ getNextMongoURI() {
23
+ const uri = this.mongoURI[this.currentIndex];
24
+ this.currentIndex = (this.currentIndex + 1) % this.mongoURI.length;
25
+ return uri;
26
+ }
27
+
28
+ connectToAll() {
29
+ for (let i = 0; i < this.mongoURI.length; i++) {
30
+
31
+ const uri = this.mongoURI[i].replaceAll("readonly:", '');
32
+ const con = mongoose.createConnection(uri, {
33
+ useNewUrlParser: true,
34
+ useUnifiedTopology: true,
35
+ });
36
+
37
+ this.allConnections.push(con);
38
+ if (this.mongoURI[i].startsWith('readonly:')) {
39
+
40
+ Mongoplus.readonlydbs.push(con)
41
+ }
42
+ }
43
+
44
+ return this.allConnections;
45
+ }
46
+
47
+ buildModel(name, schema) {
48
+ if (!Object.keys(schema.obj).includes("dbIndex")) {
49
+ throw new Error(`[!]Error : < dbIndex > must be present in your schema like dbIndex:{
50
+ type: Number,
51
+ required: true
52
+ } `)
53
+ }
54
+ if (this.allConnections.length <= 0) {
55
+ throw new Error(`[!]Error : All connections should be made first use the code
56
+ (async () => {await mongodb.connectToAll();})(); to init connections here mongodb is the class init variable`)
57
+ }
58
+ const allConnections = this.allConnections;
59
+ const model = [];
60
+ //console.groupCollapsed("====>",Mongoplus.readonlydbs);
61
+ for (let i = 0; i < allConnections.length; i++) {
62
+ const mongooseConnection = allConnections[i];
63
+ var currentm = mongooseConnection.model(name, schema)
64
+ model.push(currentm);
65
+ //console.count(Mongoplus.readonlydbs[i]);
66
+ if (Mongoplus.readonlydbs.includes(allConnections[i])) {
67
+
68
+ Mongoplus.readonlymodels.push(currentm)
69
+ }
70
+ }
71
+ console.log("REadonly ", Mongoplus.readonlymodels)
72
+ return new MongoModel(model, schema, Mongoplus.readonlymodels);
73
+ }
74
+ }
75
+
76
+ class MongoModel {
77
+ constructor(model, s, readonlydbs) {
78
+ if (!Array.isArray(model)) {
79
+ throw new Error('Model should be an array');
80
+ }
81
+ this.model = model;
82
+ this.readonlydbs = readonlydbs
83
+ this.s = s
84
+
85
+
86
+ }
87
+ static currentIndex = 0
88
+ //===================
89
+
90
+ async findInAllDatabase(filter, chain = {}) {
91
+ const dynamicComputationPromises = [];
92
+ this.model.forEach((modelRef) => {
93
+ dynamicComputationPromises.push({ fn: modelRef.find.bind(modelRef), params: [filter], chain: chain });
94
+ });
95
+ return await this.runLargeComputations(dynamicComputationPromises);
96
+ }
97
+ async aggregateInAllDatabase(filter, chain = {}) {
98
+ const dynamicComputationPromises = [];
99
+ this.model.forEach((modelRef) => {
100
+ dynamicComputationPromises.push({ fn: modelRef.aggregate.bind(modelRef), params: [filter], chain: chain });
101
+ });
102
+ return await this.runLargeComputations(dynamicComputationPromises);
103
+ }
104
+ //==================
105
+ async writeInAllDatabase(data) {
106
+ data["dbIndex"] = -1
107
+ const dynamicComputationPromises = [];
108
+ const promises = [];
109
+ for (let i = 0; i < this.model.length; i++) {
110
+ if (Mongoplus.readonlymodels.includes(this.model[i])) continue;
111
+ const docData = Object.assign({}, data, { dbIndex: i });
112
+ const x = new this.model[i](docData);
113
+ promises.push(x.save());
114
+ }
115
+
116
+ return Promise.all(promises);
117
+
118
+ }
119
+ //==================
120
+ async UpdateOneInAllDatabase(filter, update) {
121
+
122
+ const dynamicComputationPromises = [];
123
+ this.model.forEach((modelRef) => {
124
+
125
+ dynamicComputationPromises.push({ fn: modelRef.findOneAndUpdate.bind(modelRef), params: [filter, update, { new: true }], chain: {} });
126
+ });
127
+ return await this.runLargeComputations(dynamicComputationPromises);
128
+
129
+ }
130
+ //==================
131
+ async UpdateByIdInAllDatabase(id, update) {
132
+
133
+ const dynamicComputationPromises = [];
134
+ this.model.forEach((modelRef) => {
135
+
136
+ dynamicComputationPromises.push({ fn: modelRef.findByIdAndUpdate.bind(modelRef), params: [id, update, { new: true }], chain: {} });
137
+ });
138
+ return await this.runLargeComputations(dynamicComputationPromises);
139
+
140
+ }
141
+ async findByIdInAllDatabaseAndDelete(id) {
142
+
143
+ const dynamicComputationPromises = [];
144
+ this.model.forEach((modelRef) => {
145
+
146
+ dynamicComputationPromises.push({ fn: modelRef.findByIdAndDelete.bind(modelRef), params: [id], chain: {} });
147
+ });
148
+ return await this.runLargeComputations(dynamicComputationPromises);
149
+
150
+ }
151
+ async findOneInAllDatabaseAndDelete(filter) {
152
+
153
+ const dynamicComputationPromises = [];
154
+ this.model.forEach((modelRef) => {
155
+
156
+ dynamicComputationPromises.push({ fn: modelRef.findOneAndDelete.bind(modelRef), params: [filter], chain: {} });
157
+ });
158
+ return await this.runLargeComputations(dynamicComputationPromises);
159
+
160
+ }
161
+ /**
162
+ * Delete many documents matching `filter` in all databases and return aggregated results.
163
+ */
164
+ async findManyInAllDatabaseAndDelete(filter) {
165
+ const dynamicComputationPromises = [];
166
+ this.model.forEach((modelRef) => {
167
+ dynamicComputationPromises.push({ fn: modelRef.deleteMany.bind(modelRef), params: [filter], chain: {} });
168
+ });
169
+ return await this.runLargeComputations(dynamicComputationPromises);
170
+ }
171
+ //=======================
172
+ async write(data) {
173
+
174
+
175
+ const currentModel = this.model[MongoModel.currentIndex];
176
+ data["dbIndex"] = MongoModel.currentIndex;
177
+ MongoModel.currentIndex = (MongoModel.currentIndex + 1) % this.model.length;
178
+ if (Mongoplus.readonlymodels.includes(currentModel)) {
179
+ return await this.write(data);
180
+ }
181
+
182
+
183
+ try {
184
+
185
+ let dataToWrite = new currentModel(data)
186
+ return await dataToWrite.save()
187
+ } catch (error) {
188
+ throw error
189
+ }
190
+
191
+ }
192
+ //==================
193
+
194
+ async bulkWrite(data, options = {}) {
195
+ // Default options
196
+ const {
197
+ batchSize = 1000, // Process 1000 items per batch by default
198
+ concurrentBatches = true // Run batches concurrently or sequentially
199
+ } = options;
200
+
201
+ if (!data || data.length === 0) return [];
202
+
203
+ // 1. Identify writable models
204
+ const writableModels = this.model.filter(m => !Mongoplus.readonlymodels.includes(m));
205
+ const numDBs = writableModels.length;
206
+
207
+ if (numDBs === 0) {
208
+ throw new Error("No writable databases available.");
209
+ }
210
+
211
+ // Split data into batches
212
+ const batches = [];
213
+ for (let i = 0; i < data.length; i += batchSize) {
214
+ batches.push(data.slice(i, i + batchSize));
215
+ }
216
+
217
+ console.log(`[Mongoplus] Processing ${data.length} items in ${batches.length} batch(es) of max ${batchSize} items`);
218
+
219
+ const finalRetryArray = [];
220
+ const allResults = [];
221
+
222
+ // Process function for a single batch
223
+ const processBatch = async (batchData, batchNumber) => {
224
+ // 2. Internal Bucket Distribution & Op Transformation
225
+ const buckets = Array.from({ length: numDBs }, () => []);
226
+
227
+ batchData.forEach((item, index) => {
228
+ const bucketIndex = index % numDBs;
229
+ const writableModel = writableModels[bucketIndex];
230
+
231
+ // Find the actual dbIndex in the full model array
232
+ const dbIdx = this.model.indexOf(writableModel);
233
+
234
+ // Clone to avoid mutating original data
235
+ const itemCopy = { ...item, dbIndex: dbIdx };
236
+
237
+ // Build filter for updateOne - require _id or explicit id field
238
+ let filter;
239
+ if (itemCopy._id) {
240
+ filter = { _id: itemCopy._id };
241
+ } else if (itemCopy.id) {
242
+ filter = { _id: itemCopy.id };
243
+ itemCopy._id = itemCopy.id; // Normalize to _id
244
+ } else {
245
+ // For new documents without ID, generate one
246
+ const mongoose = require('mongoose');
247
+ itemCopy._id = new mongoose.Types.ObjectId();
248
+ filter = { _id: itemCopy._id };
249
+ }
250
+
251
+ buckets[bucketIndex].push({
252
+ updateOne: {
253
+ filter: filter,
254
+ update: { $set: itemCopy },
255
+ upsert: true
256
+ }
257
+ });
258
+ });
259
+
260
+ // 3. Transaction Runner with Retry Logic
261
+ const runTransactionWithRetry = async (model, ops, modelIndex) => {
262
+ let session;
263
+ try {
264
+ session = await model.db.startSession();
265
+ } catch (sessionError) {
266
+ throw new Error(`[Mongoplus] Database ${modelIndex} is a standalone instance. Transactions (required for bulkWriteZipper) only work on Replica Sets. \nError: ${JSON.stringify(sessionError)}`);
267
+
268
+
269
+ }
270
+ const attemptWrite = async () => {
271
+ let result;
272
+ await session.withTransaction(async () => {
273
+ result = await model.bulkWrite(ops, { session, ordered: false });
274
+ });
275
+ return result;
276
+ };
277
+
278
+ try {
279
+ // First Attempt
280
+ return await attemptWrite();
281
+ } catch (firstError) {
282
+ console.warn(`[Mongoplus] Batch ${batchNumber} failed for ${model.modelName} (DB ${modelIndex}). Retrying once...`);
283
+ try {
284
+ // Second Attempt (Retry)
285
+ return await attemptWrite();
286
+ } catch (retryError) {
287
+ // Fail: Store in retry array for the final error report
288
+ finalRetryArray.push({
289
+ batch: batchNumber,
290
+ model: model.modelName,
291
+ dbIndex: modelIndex,
292
+ opsCount: ops.length,
293
+ data: ops.map(o => o.updateOne.update.$set),
294
+ error: retryError.message
295
+ });
296
+ throw retryError;
297
+ }
298
+ } finally {
299
+ await session.endSession();
300
+ }
301
+ };
302
+
303
+ // 4. Execute all "Zips" concurrently for this batch
304
+ const results = await Promise.all(
305
+ buckets.map((ops, i) => {
306
+ if (ops.length === 0) return Promise.resolve(null);
307
+ const dbIdx = this.model.indexOf(writableModels[i]);
308
+ return runTransactionWithRetry(writableModels[i], ops, dbIdx);
309
+ })
310
+ );
311
+
312
+ return results.filter(r => r !== null);
313
+ };
314
+
315
+ // Process all batches
316
+ try {
317
+ if (concurrentBatches && batches.length > 1) {
318
+ // Run all batches concurrently (faster but more resource intensive)
319
+ console.log(`[Mongoplus] Running ${batches.length} batches concurrently`);
320
+ const batchResults = await Promise.all(
321
+ batches.map((batch, idx) => processBatch(batch, idx + 1))
322
+ );
323
+ allResults.push(...batchResults.flat());
324
+ } else {
325
+ // Run batches sequentially (slower but safer for large datasets)
326
+ console.log(`[Mongoplus] Running ${batches.length} batches sequentially`);
327
+ for (let i = 0; i < batches.length; i++) {
328
+ const result = await processBatch(batches[i], i + 1);
329
+ allResults.push(...result);
330
+ console.log(`[Mongoplus] Completed batch ${i + 1}/${batches.length}`);
331
+ }
332
+ }
333
+
334
+ // Update global rotation index for write() method consistency
335
+ MongoModel.currentIndex = (MongoModel.currentIndex + data.length) % this.model.length;
336
+
337
+ console.log(`[Mongoplus] Successfully processed ${data.length} items across ${numDBs} databases`);
338
+ return allResults;
339
+
340
+ } catch (error) {
341
+ // Throw a comprehensive error containing the retry array
342
+ const exception = new Error(`Zipper Bulk Write failed after retries: ${error.message}`);
343
+ exception.failedBatches = finalRetryArray;
344
+ exception.originalError = error;
345
+ throw exception;
346
+ }
347
+ }
348
+ //===================
349
+
350
+ async findOne(dbIndex, filter, chain = {}) {
351
+ var currentModel = this.model[dbIndex]
352
+
353
+ if (chain.skip && chain.limit && chain.sort) {
354
+ return currentModel.findOne(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
355
+ } else if (chain.skip && chain.limit) {
356
+ return currentModel.findOne(filter).skip(chain.skip).limit(chain.limit)
357
+ }
358
+ else if (chain.skip) {
359
+ return currentModel.findOne(filter).skip(chain.skip)
360
+ }
361
+
362
+ else if (chain.limit) {
363
+ return currentModel.findOne(filter).limit(chain.limit)
364
+ } else {
365
+ return currentModel.findOne(filter);
366
+ }
367
+
368
+
369
+ }
370
+
371
+ //===============
372
+
373
+ async find(dbIndex, filter, chain = {}) {
374
+ var currentModel = this.model[dbIndex]
375
+ // Start with the base query
376
+ let query = currentModel.find(filter);
377
+
378
+ // Dynamically apply chain options if they exist
379
+ for (const [key, value] of Object.entries(chain)) {
380
+ if (query[key]) {
381
+ query = query[key](value);
382
+ }
383
+ }
384
+
385
+ return query;
386
+
387
+
388
+
389
+ }
390
+ //=======================
391
+ async findById(dbIndex, filter, chain = {}) {
392
+ const currentModel = this.model[dbIndex];
393
+
394
+ // Start with the base query
395
+ let query = currentModel.findById(filter);
396
+
397
+ // Dynamically apply chain options if they exist
398
+ for (const [key, value] of Object.entries(chain)) {
399
+ if (query[key]) {
400
+ query = query[key](value);
401
+ }
402
+ }
403
+
404
+ return query;
405
+ }
406
+
407
+
408
+ //====================
409
+ async findByIdAndUpdate(dbIndex, id, update) {
410
+ var currentModel = this.model[dbIndex]
411
+ return currentModel.findByIdAndUpdate(id, update, { new: true });
412
+ }
413
+ //===============
414
+ async findByIdAndDelete(dbIndex, id, update) {
415
+ var currentModel = this.model[dbIndex]
416
+ return currentModel.findByIdAndRemove(id, update, { new: true });
417
+ }
418
+ //===========
419
+ async findOneAndUpdate(dbIndex, filter, update) {
420
+ var currentModel = this.model[dbIndex]
421
+ return currentModel.findOneAndUpdate(filter, update, { new: true });
422
+ }
423
+ //=============
424
+ async aggregate(dbIndex, filter, update) {
425
+ var currentModel = this.model[dbIndex]
426
+ return currentModel.aggregate(filter);
427
+ }
428
+ //===========
429
+ async watch(dbIndex) {
430
+ return this.model[dbIndex].watch()
431
+ }
432
+ //================
433
+
434
+
435
+
436
+
437
+
438
+ getNextModel() {
439
+ const currentModel = this.model[this.currentIndex];
440
+ var writen = this.currentIndex
441
+ this.currentIndex = (this.currentIndex + 1) % this.model.length;
442
+ return [currentModel, writen];
443
+ }
444
+ async runLargeComputations(computationPairs) {
445
+ try {
446
+ const startTime = performance.now();
447
+
448
+ // Execute all computation functions concurrently using Promise.all
449
+ const results = await Promise.all(
450
+ computationPairs.map(async pair => {
451
+ var chain = pair.chain;
452
+ var query = pair.fn(...pair.params);
453
+ // Start with the base query
454
+
455
+ // Dynamically apply chain options if they exist
456
+ for (const [key, value] of Object.entries(chain)) {
457
+ if (query[key]) {
458
+ query = query[key](value);
459
+ }
460
+ }
461
+
462
+ return query;
463
+
464
+ })
465
+ );
466
+
467
+ const endTime = performance.now();
468
+ const totalTime = endTime - startTime;
469
+
470
+ // Process the results as needed
471
+ // const sum = results.reduce((acc, result) => acc + result, 0);
472
+
473
+ return { results: [].concat(...results), totalTime };
474
+ } catch (error) {
475
+ console.error('Error:', error);
476
+ throw error; // Rethrow the error if needed
477
+ }
478
+ }
479
+ }
480
+
481
+ // Attach named export and default for interoperability
482
+ Mongoplus.MongoModel = MongoModel;
483
+ module.exports = Mongoplus;
484
+ module.exports.default = Mongoplus;
485
+ module.exports.MongoModel = MongoModel;
486
+ exports.MongoModel = MongoModel;