mongoplusplus 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +170 -0
- package/mongoplus.js +354 -0
- package/package.json +15 -0
package/README.md
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
# Mongoplus
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
|
|
5
|
+
`mongoplus` is a Node.js package designed to facilitate load balancing of read and write operations across multiple MongoDB databases. It simplifies database connection management, schema definition, model building, and CRUD operations execution.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
Install Mongoplus via npm:
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
npm install mongoplus
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
importing
|
|
16
|
+
```node
|
|
17
|
+
const mongoplus = require('mongoplus');
|
|
18
|
+
```
|
|
19
|
+
##Initialing
|
|
20
|
+
```node
|
|
21
|
+
const dbname = 'testforUP';
|
|
22
|
+
|
|
23
|
+
const mongoURI1 = `mongodb+srv://xxxxx:xxxxxx@cluster0.xxxxx.mongodb.net/${dbname}?retryWrites=true&w=majority`;
|
|
24
|
+
const mongoURI2 = `readonly:mongodb+srv://xxxxxxx:xxxxxx@cluster0.xxxxxx.mongodb.net/${dbname}?retryWrites=true&w=majority`;
|
|
25
|
+
const mongodb = new mongoplus([mongoURI1, mongoURI2]);
|
|
26
|
+
|
|
27
|
+
```
|
|
28
|
+
##connecting database
|
|
29
|
+
this is the top level thing in your main page (in this test code, under mongodb variable declaration )
|
|
30
|
+
```node
|
|
31
|
+
(async () => {
|
|
32
|
+
await mongodb.connectToAll();
|
|
33
|
+
})();
|
|
34
|
+
```
|
|
35
|
+
##Schema defining
|
|
36
|
+
it is very similar to Mongoose schema definition.but only one mandatory field must be there in the schema which will act as a db identifier for that document
|
|
37
|
+
```node
|
|
38
|
+
const likeSH = mongodb.Schema({
|
|
39
|
+
user_id: { type: mongoose.Schema.Types.ObjectId, required: true, ref: "users" },
|
|
40
|
+
postId: { type: mongoose.Schema.Types.ObjectId, required: true },
|
|
41
|
+
dbIndex: { type: Number, required: true },
|
|
42
|
+
|
|
43
|
+
like_time: { type: Date, default: Date.now }
|
|
44
|
+
}
|
|
45
|
+
)
|
|
46
|
+
```
|
|
47
|
+
### model building
|
|
48
|
+
```node
|
|
49
|
+
const likes = mongodb.buildModel("likes", likeSH)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
##### Note that `dbIndex` is used to indicate which MongoDB server it belongs to.
|
|
54
|
+
###### it is a must otherwise it will throw an error.
|
|
55
|
+
```node
|
|
56
|
+
throw new Error(`[!]Error : < dbIndex > must be present in your schema like dbIndex:{
|
|
57
|
+
type: Number,
|
|
58
|
+
required: true
|
|
59
|
+
} `)
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
### usage methods
|
|
64
|
+
```node
|
|
65
|
+
findInAllDatabase(filter, chain = {})
|
|
66
|
+
```
|
|
67
|
+
Usage: Finds documents matching the filter in all MongoDB databases.
|
|
68
|
+
|
|
69
|
+
```node
|
|
70
|
+
const result = await likes.findInAllDatabase({ name: 'John' }, { limit: 10, skip: 0 });
|
|
71
|
+
Returns: An array of documents matching the filter from all databases.
|
|
72
|
+
```
|
|
73
|
+
```node
|
|
74
|
+
writeInAllDatabase(data)
|
|
75
|
+
|
|
76
|
+
Usage: Writes data to all MongoDB databases.
|
|
77
|
+
|
|
78
|
+
const result = await likes.writeInAllDatabase({ name: 'John', age: 30 });
|
|
79
|
+
Returns: An array of written documents from all databases.
|
|
80
|
+
```
|
|
81
|
+
```node
|
|
82
|
+
UpdateOneInAllDatabase(filter, update)
|
|
83
|
+
Usage: Updates a single document matching the filter in all MongoDB databases.
|
|
84
|
+
const updatedDocument = await likes.UpdateOneInAllDatabase({ name: 'John' }, { age: 35 });
|
|
85
|
+
Returns: The updated document.
|
|
86
|
+
```
|
|
87
|
+
```node
|
|
88
|
+
UpdateByIdInAllDatabase(id, update)
|
|
89
|
+
Usage: Updates a document by ID in all MongoDB databases.
|
|
90
|
+
const updatedDocument = await likes.UpdateByIdInAllDatabase('123456', { age: 35 });
|
|
91
|
+
Returns: The updated document.
|
|
92
|
+
```
|
|
93
|
+
```node
|
|
94
|
+
findByIdInAllDatabaseAndDelete(id)
|
|
95
|
+
Usage: Finds a document by ID in all MongoDB databases and deletes it.
|
|
96
|
+
const deletedDocument = await likes.findByIdInAllDatabaseAndDelete('123456');
|
|
97
|
+
Returns: The deleted document.
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
```node
|
|
101
|
+
findOneInAllDatabaseAndDelete(filter)
|
|
102
|
+
Usage: Finds a single document matching the filter in all MongoDB databases and deletes it.
|
|
103
|
+
const deletedDocument = await likes.findOneInAllDatabaseAndDelete({ name: 'John' });
|
|
104
|
+
Returns: The deleted document.
|
|
105
|
+
```
|
|
106
|
+
```node
|
|
107
|
+
write(data)
|
|
108
|
+
Usage: Writes data to a MongoDB database.
|
|
109
|
+
const result = await likes.write({ name: 'John', age: 30 });
|
|
110
|
+
Returns: The written document.
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
```node
|
|
114
|
+
findOne(dbIndex, filter, chain = {})
|
|
115
|
+
Usage: Finds a single document matching the filter in a specific MongoDB database.
|
|
116
|
+
const document = await likes.findOne(0, { name: 'John' }, { limit: 1 });
|
|
117
|
+
Returns: The found document.
|
|
118
|
+
```
|
|
119
|
+
```node
|
|
120
|
+
find(dbIndex, filter, chain = {})
|
|
121
|
+
Usage: Finds documents matching the filter in a specific MongoDB database.
|
|
122
|
+
const documents = await likes.find(0, { age: { $gt: 18 } }, { limit: 10 });
|
|
123
|
+
Returns: An array of found documents.
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
```node
|
|
127
|
+
findById(dbIndex, id, chain = {})
|
|
128
|
+
Usage: Finds a document by ID in a specific MongoDB database.
|
|
129
|
+
const document = await likes.findById(0, '123456');
|
|
130
|
+
Returns: The found document.
|
|
131
|
+
```
|
|
132
|
+
```node
|
|
133
|
+
findByIdAndUpdate(dbIndex, id, update)
|
|
134
|
+
Usage: Finds a document by ID in a specific MongoDB database and updates it.
|
|
135
|
+
const updatedDocument = await likes.findByIdAndUpdate(0, '123456', { age: 35 });
|
|
136
|
+
Returns: The updated document.
|
|
137
|
+
```
|
|
138
|
+
```node
|
|
139
|
+
findByIdAndDelete(dbIndex, id)
|
|
140
|
+
Usage: Finds a document by ID in a specific MongoDB database and deletes it.
|
|
141
|
+
const deletedDocument = await likes.findByIdAndDelete(0, '123456');
|
|
142
|
+
Returns: The deleted document.
|
|
143
|
+
```
|
|
144
|
+
```node
|
|
145
|
+
findOneAndUpdate(dbIndex, filter, update)
|
|
146
|
+
Usage: Finds a single document matching the filter in a specific MongoDB database and updates it.
|
|
147
|
+
|
|
148
|
+
const updatedDocument = await likes.findOneAndUpdate(0, { name: 'John' }, { age: 35 });
|
|
149
|
+
Returns: The updated document.
|
|
150
|
+
```
|
|
151
|
+
```node
|
|
152
|
+
aggregate(dbIndex, filter)
|
|
153
|
+
Usage: Aggregates documents in a specific MongoDB database based on the filter.
|
|
154
|
+
const aggregationResult = await likes.aggregate(0, [{ $group: { _id: '$name', total: { $sum: '$age' } } }]);
|
|
155
|
+
Returns: The aggregation result.
|
|
156
|
+
```
|
|
157
|
+
```node
|
|
158
|
+
watch(dbIndex)
|
|
159
|
+
Usage: Starts watching for changes in a specific MongoDB database.
|
|
160
|
+
const watcher = await likes.watch(0);
|
|
161
|
+
Returns: A watcher object for the database.
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
> now most of the functionality is as same as mongoose so you can use them directly like indexing a schema
|
|
165
|
+
```node
|
|
166
|
+
likeSH.index({ user_id: 1, postId: 1 }, { unique: true });
|
|
167
|
+
```
|
|
168
|
+
# Contributing
|
|
169
|
+
there are many things that are not ported. feel free to contribute!
|
|
170
|
+
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change. Please make sure to update.and also
|
package/mongoplus.js
ADDED
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
const mongoose = require('mongoose');
|
|
2
|
+
class Mongoplus {
|
|
3
|
+
constructor(mongoURI) {
|
|
4
|
+
|
|
5
|
+
this.mongoURI = mongoURI;
|
|
6
|
+
this.allConnections = [];
|
|
7
|
+
this.currentIndex = 0;
|
|
8
|
+
if(this.mongoURI.filter((uri)=>uri.startsWith("readonly")).length==this.mongoURI.length){
|
|
9
|
+
throw new Error('Some of your URIs must be writable. If it is a mistake remove the `readonly:` flag from your urls')
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
}
|
|
13
|
+
static readonlydbs=[]
|
|
14
|
+
static readonlymodels=[] // Define currentIndex to keep track of the current URI
|
|
15
|
+
Schema(schema) {
|
|
16
|
+
return mongoose.Schema(schema)
|
|
17
|
+
}
|
|
18
|
+
addIndex(schema,indextype){
|
|
19
|
+
return schema.index(indextype)
|
|
20
|
+
}
|
|
21
|
+
getNextMongoURI() {
|
|
22
|
+
const uri = this.mongoURI[this.currentIndex];
|
|
23
|
+
this.currentIndex = (this.currentIndex + 1) % this.mongoURI.length;
|
|
24
|
+
return uri;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
connectToAll() {
|
|
28
|
+
for (let i = 0; i < this.mongoURI.length; i++) {
|
|
29
|
+
|
|
30
|
+
const uri = this.mongoURI[i].replaceAll("readonly:",'');
|
|
31
|
+
const con = mongoose.createConnection(uri, {
|
|
32
|
+
useNewUrlParser: true,
|
|
33
|
+
useUnifiedTopology: true,
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
this.allConnections.push(con);
|
|
37
|
+
if(this.mongoURI[i].startsWith('readonly:')){
|
|
38
|
+
|
|
39
|
+
Mongoplus.readonlydbs.push(con)
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return this.allConnections;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
buildModel(name, schema) {
|
|
47
|
+
if (!Object.keys(schema.obj).includes("dbIndex")) {
|
|
48
|
+
throw new Error(`[!]Error : < dbIndex > must be present in your schema like dbIndex:{
|
|
49
|
+
type: Number,
|
|
50
|
+
required: true
|
|
51
|
+
} `)
|
|
52
|
+
}
|
|
53
|
+
if(this.allConnections.length<=0){
|
|
54
|
+
throw new Error(`[!]Error : All connections should be made first use the code
|
|
55
|
+
(async () => {await mongodb.connectToAll();})(); to init connections here mongodb is the class init variable`)
|
|
56
|
+
}
|
|
57
|
+
const allConnections = this.allConnections;
|
|
58
|
+
const model = [];
|
|
59
|
+
//console.groupCollapsed("====>",Mongoplus.readonlydbs);
|
|
60
|
+
for (let i = 0; i < allConnections.length; i++) {
|
|
61
|
+
const mongooseConnection = allConnections[i];
|
|
62
|
+
var currentm=mongooseConnection.model(name, schema)
|
|
63
|
+
model.push(currentm);
|
|
64
|
+
//console.count(Mongoplus.readonlydbs[i]);
|
|
65
|
+
if(Mongoplus.readonlydbs.includes(allConnections[i])){
|
|
66
|
+
|
|
67
|
+
Mongoplus.readonlymodels.push(currentm)
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
console.log("REadonly ",Mongoplus.readonlymodels)
|
|
71
|
+
return new MongoModel(model,schema,Mongoplus.readonlymodels);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
class MongoModel {
|
|
76
|
+
constructor(model,s,readonlydbs) {
|
|
77
|
+
if (!Array.isArray(model)) {
|
|
78
|
+
throw new Error('Model should be an array');
|
|
79
|
+
}
|
|
80
|
+
this.model = model;
|
|
81
|
+
this.readonlydbs=readonlydbs
|
|
82
|
+
this.s=s
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
}
|
|
86
|
+
static currentIndex = 0
|
|
87
|
+
//===================
|
|
88
|
+
|
|
89
|
+
async findInAllDatabase(filter, chain = {}) {
|
|
90
|
+
const dynamicComputationPromises = [];
|
|
91
|
+
this.model.forEach((modelRef) => {
|
|
92
|
+
dynamicComputationPromises.push({ fn: modelRef.find.bind(modelRef), params: [filter], chain: chain });
|
|
93
|
+
});
|
|
94
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
95
|
+
}
|
|
96
|
+
async aggregateInAllDatabase(filter, chain = {}) {
|
|
97
|
+
const dynamicComputationPromises = [];
|
|
98
|
+
this.model.forEach((modelRef) => {
|
|
99
|
+
dynamicComputationPromises.push({ fn: modelRef.aggregate.bind(modelRef), params: [filter], chain: chain });
|
|
100
|
+
});
|
|
101
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
102
|
+
}
|
|
103
|
+
//==================
|
|
104
|
+
async writeInAllDatabase(data) {
|
|
105
|
+
data["dbIndex"] = -1
|
|
106
|
+
const dynamicComputationPromises = [];
|
|
107
|
+
modellist=this.model
|
|
108
|
+
//this.readonlydbs.forEach((i)=>{modellist.splice(i,1,null)})
|
|
109
|
+
|
|
110
|
+
for (let i = 0; i < this.model.length; i++) {
|
|
111
|
+
if(Mongoplus.readonlymodels.includes(this.model[i])) continue;
|
|
112
|
+
var x=new this.model[i](data)
|
|
113
|
+
|
|
114
|
+
dynamicComputationPromises.push(await x.save());
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return [].concat(dynamicComputationPromises);
|
|
120
|
+
|
|
121
|
+
}
|
|
122
|
+
//==================
|
|
123
|
+
async UpdateOneInAllDatabase(filter, update) {
|
|
124
|
+
|
|
125
|
+
const dynamicComputationPromises = [];
|
|
126
|
+
this.model.forEach((modelRef) => {
|
|
127
|
+
|
|
128
|
+
dynamicComputationPromises.push({ fn: modelRef.findOneAndUpdate.bind(modelRef), params: [filter, update, { new: true }], chain: {} });
|
|
129
|
+
});
|
|
130
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
131
|
+
|
|
132
|
+
}
|
|
133
|
+
//==================
|
|
134
|
+
async UpdateByIdInAllDatabase(id, update) {
|
|
135
|
+
|
|
136
|
+
const dynamicComputationPromises = [];
|
|
137
|
+
this.model.forEach((modelRef) => {
|
|
138
|
+
|
|
139
|
+
dynamicComputationPromises.push({ fn: modelRef.findByIdAndUpdate.bind(modelRef), params: [id, update, { new: true }], chain: {} });
|
|
140
|
+
});
|
|
141
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
142
|
+
|
|
143
|
+
}
|
|
144
|
+
async findByIdInAllDatabaseAndDelete(id) {
|
|
145
|
+
|
|
146
|
+
const dynamicComputationPromises = [];
|
|
147
|
+
this.model.forEach((modelRef) => {
|
|
148
|
+
|
|
149
|
+
dynamicComputationPromises.push({ fn: modelRef.findByIdAndDelete.bind(modelRef), params: [id], chain: {} });
|
|
150
|
+
});
|
|
151
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
152
|
+
|
|
153
|
+
}
|
|
154
|
+
async findOneInAllDatabaseAndDelete(filter) {
|
|
155
|
+
|
|
156
|
+
const dynamicComputationPromises = [];
|
|
157
|
+
this.model.forEach((modelRef) => {
|
|
158
|
+
|
|
159
|
+
dynamicComputationPromises.push({ fn: modelRef.findOneAndDelete.bind(modelRef), params: [filter], chain: {} });
|
|
160
|
+
});
|
|
161
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
162
|
+
|
|
163
|
+
}
|
|
164
|
+
//=======================
|
|
165
|
+
async write(data) {
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
const currentModel = this.model[MongoModel.currentIndex];
|
|
169
|
+
data["dbIndex"] = MongoModel.currentIndex;
|
|
170
|
+
MongoModel.currentIndex = (MongoModel.currentIndex + 1) % this.model.length;
|
|
171
|
+
if(Mongoplus.readonlymodels.includes(currentModel)){
|
|
172
|
+
this.write(data)
|
|
173
|
+
//("This model is readonly");
|
|
174
|
+
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
|
|
180
|
+
let dataToWrite = new currentModel(data)
|
|
181
|
+
return await dataToWrite.save()
|
|
182
|
+
} catch (error) {
|
|
183
|
+
throw error
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
}
|
|
187
|
+
//==================
|
|
188
|
+
|
|
189
|
+
async findOne(dbIndex, filter, chain = {}) {
|
|
190
|
+
var currentModel = this.model[dbIndex]
|
|
191
|
+
var chain = pair.chain;
|
|
192
|
+
if (chain.skip && chain.limit && chain.sort) {
|
|
193
|
+
currentModel.findOne(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
|
|
194
|
+
} else if (chain.skip && chain.limit) {
|
|
195
|
+
return currentModel.findOne(filter).skip(chain.skip).limit(chain.limit)
|
|
196
|
+
}
|
|
197
|
+
else if (chain.skip) {
|
|
198
|
+
return currentModel.findOne(filter).skip(chain.skip)
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
else if (chain.limit) {
|
|
202
|
+
return currentModel.findOne(filter).limit(chain.limit)
|
|
203
|
+
} else {
|
|
204
|
+
return currentModel.findOne(filter);
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
//===============
|
|
211
|
+
|
|
212
|
+
async find(dbIndex, filter, chain = {}) {
|
|
213
|
+
var currentModel = this.model[dbIndex]
|
|
214
|
+
var chain = pair.chain;
|
|
215
|
+
if (chain.skip && chain.limit && chain.sort) {
|
|
216
|
+
currentModel.find(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
|
|
217
|
+
} else if (chain.skip && chain.limit) {
|
|
218
|
+
return currentModel.find(filter).skip(chain.skip).limit(chain.limit)
|
|
219
|
+
}
|
|
220
|
+
else if (chain.skip) {
|
|
221
|
+
return currentModel.find(filter).skip(chain.skip)
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
else if (chain.limit) {
|
|
225
|
+
return currentModel.find(filter).limit(chain.limit)
|
|
226
|
+
} else {
|
|
227
|
+
return currentModel.find(filter);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
}
|
|
232
|
+
//=======================
|
|
233
|
+
async findById(dbIndex, filter, chain = {}) {
|
|
234
|
+
var currentModel = this.model[dbIndex]
|
|
235
|
+
var chain = pair.chain;
|
|
236
|
+
if (chain.skip && chain.limit && chain.sort) {
|
|
237
|
+
currentModel.findById(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
|
|
238
|
+
} else if (chain.skip && chain.limit) {
|
|
239
|
+
return currentModel.findById(filter).skip(chain.skip).limit(chain.limit)
|
|
240
|
+
}
|
|
241
|
+
else if (chain.skip) {
|
|
242
|
+
return currentModel.findById(filter).skip(chain.skip)
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
else if (chain.limit) {
|
|
246
|
+
return currentModel.findById(filter).limit(chain.limit)
|
|
247
|
+
} else {
|
|
248
|
+
return currentModel.findById(filter);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
//==========================
|
|
255
|
+
async findById(dbIndex, filter, chain = {}) {
|
|
256
|
+
var currentModel = this.model[dbIndex]
|
|
257
|
+
var chain = pair.chain;
|
|
258
|
+
if (chain.skip && chain.limit && chain.sort) {
|
|
259
|
+
currentModel.findById(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
|
|
260
|
+
} else if (chain.skip && chain.limit) {
|
|
261
|
+
return currentModel.findById(filter).skip(chain.skip).limit(chain.limit)
|
|
262
|
+
}
|
|
263
|
+
else if (chain.skip) {
|
|
264
|
+
return currentModel.findById(filter).skip(chain.skip)
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
else if (chain.limit) {
|
|
268
|
+
return currentModel.findById(filter).limit(chain.limit)
|
|
269
|
+
} else {
|
|
270
|
+
return currentModel.findById(filter);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
}
|
|
275
|
+
//====================
|
|
276
|
+
async findByIdAndUpdate(dbIndex, id, update) {
|
|
277
|
+
var currentModel = this.model[dbIndex]
|
|
278
|
+
return currentModel.findByIdAndUpdate(id, update, { new: true });
|
|
279
|
+
}
|
|
280
|
+
//===============
|
|
281
|
+
async findByIdAndDelete(dbIndex, id, update) {
|
|
282
|
+
var currentModel = this.model[dbIndex]
|
|
283
|
+
return currentModel.findByIdAndRemove(id, update, { new: true });
|
|
284
|
+
}
|
|
285
|
+
//===========
|
|
286
|
+
async findOneAndUpdate(dbIndex, filter, update) {
|
|
287
|
+
var currentModel = this.model[dbIndex]
|
|
288
|
+
return currentModel.findOneAndUpdate(filter, update, { new: true });
|
|
289
|
+
}
|
|
290
|
+
//=============
|
|
291
|
+
async aggregate(dbIndex, filter, update) {
|
|
292
|
+
var currentModel = this.model[dbIndex]
|
|
293
|
+
return currentModel.aggregate(filter);
|
|
294
|
+
}
|
|
295
|
+
//===========
|
|
296
|
+
async watch(dbIndex) {
|
|
297
|
+
return this.model[dbIndex].watch()
|
|
298
|
+
}
|
|
299
|
+
//================
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
getNextModel() {
|
|
306
|
+
const currentModel = this.model[this.currentIndex];
|
|
307
|
+
var writen = this.currentIndex
|
|
308
|
+
this.currentIndex = (this.currentIndex + 1) % this.model.length;
|
|
309
|
+
return [currentModel, writen];
|
|
310
|
+
}
|
|
311
|
+
async runLargeComputations(computationPairs) {
|
|
312
|
+
try {
|
|
313
|
+
const startTime = performance.now();
|
|
314
|
+
|
|
315
|
+
// Execute all computation functions concurrently using Promise.all
|
|
316
|
+
const results = await Promise.all(
|
|
317
|
+
computationPairs.map(async pair => {
|
|
318
|
+
var chain = pair.chain;
|
|
319
|
+
if (chain.skip && chain.limit && chain.sort) {
|
|
320
|
+
pair.fn(...pair.params).skip(chain.skip).limit(chain.limit).sort(chain.sort)
|
|
321
|
+
} else if (chain.skip && chain.limit) {
|
|
322
|
+
return pair.fn(...pair.params).skip(chain.skip).limit(chain.limit)
|
|
323
|
+
}
|
|
324
|
+
else if (chain.skip) {
|
|
325
|
+
return pair.fn(...pair.params).skip(chain.skip)
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
else if (chain.limit) {
|
|
329
|
+
return pair.fn(...pair.params).limit(chain.limit)
|
|
330
|
+
} else {
|
|
331
|
+
return pair.fn(...pair.params);
|
|
332
|
+
}
|
|
333
|
+
})
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
);
|
|
339
|
+
|
|
340
|
+
const endTime = performance.now();
|
|
341
|
+
const totalTime = endTime - startTime;
|
|
342
|
+
|
|
343
|
+
// Process the results as needed
|
|
344
|
+
// const sum = results.reduce((acc, result) => acc + result, 0);
|
|
345
|
+
|
|
346
|
+
return { results: [].concat(...results), totalTime };
|
|
347
|
+
} catch (error) {
|
|
348
|
+
console.error('Error:', error);
|
|
349
|
+
throw error; // Rethrow the error if needed
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
module.exports = Mongoplus;
|
package/package.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "mongoplusplus",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "A mongoose extanded library for building complex data models.in a multi tanannt architecture ",
|
|
5
|
+
"main": "mongoplus.js",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
8
|
+
},
|
|
9
|
+
"author": "somen das(somen6562@gmail.com)",
|
|
10
|
+
"license": "ISC",
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"mongoose": "^6.3.5",
|
|
13
|
+
"mongoose-sequence": "^5.3.1"
|
|
14
|
+
}
|
|
15
|
+
}
|