mongoplusplus 1.0.4 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +187 -169
- package/index.d.ts +144 -0
- package/mongoplus.js +476 -320
- package/package.json +54 -41
- package/test.js +57 -0
- package/tsconfig.json +19 -0
package/mongoplus.js
CHANGED
|
@@ -1,320 +1,476 @@
|
|
|
1
|
-
const mongoose = require('mongoose');
|
|
2
|
-
class Mongoplus {
|
|
3
|
-
constructor(mongoURI) {
|
|
4
|
-
|
|
5
|
-
this.mongoURI = mongoURI;
|
|
6
|
-
this.allConnections = [];
|
|
7
|
-
this.currentIndex = 0;
|
|
8
|
-
if (this.mongoURI.filter((uri) => uri.startsWith("readonly")).length == this.mongoURI.length) {
|
|
9
|
-
throw new Error('Some of your URIs must be writable. If it is a mistake remove the `readonly:` flag from your urls')
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
}
|
|
13
|
-
static readonlydbs = []
|
|
14
|
-
static readonlymodels = [] // Define currentIndex to keep track of the current URI
|
|
15
|
-
Schema(schema) {
|
|
16
|
-
return mongoose.Schema(schema)
|
|
17
|
-
}
|
|
18
|
-
addIndex(schema, indextype) {
|
|
19
|
-
return schema.index(indextype)
|
|
20
|
-
}
|
|
21
|
-
getNextMongoURI() {
|
|
22
|
-
const uri = this.mongoURI[this.currentIndex];
|
|
23
|
-
this.currentIndex = (this.currentIndex + 1) % this.mongoURI.length;
|
|
24
|
-
return uri;
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
connectToAll() {
|
|
28
|
-
for (let i = 0; i < this.mongoURI.length; i++) {
|
|
29
|
-
|
|
30
|
-
const uri = this.mongoURI[i].replaceAll("readonly:", '');
|
|
31
|
-
const con = mongoose.createConnection(uri, {
|
|
32
|
-
useNewUrlParser: true,
|
|
33
|
-
useUnifiedTopology: true,
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
this.allConnections.push(con);
|
|
37
|
-
if (this.mongoURI[i].startsWith('readonly:')) {
|
|
38
|
-
|
|
39
|
-
Mongoplus.readonlydbs.push(con)
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
return this.allConnections;
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
buildModel(name, schema) {
|
|
47
|
-
if (!Object.keys(schema.obj).includes("dbIndex")) {
|
|
48
|
-
throw new Error(`[!]Error : < dbIndex > must be present in your schema like dbIndex:{
|
|
49
|
-
type: Number,
|
|
50
|
-
required: true
|
|
51
|
-
} `)
|
|
52
|
-
}
|
|
53
|
-
if (this.allConnections.length <= 0) {
|
|
54
|
-
throw new Error(`[!]Error : All connections should be made first use the code
|
|
55
|
-
(async () => {await mongodb.connectToAll();})(); to init connections here mongodb is the class init variable`)
|
|
56
|
-
}
|
|
57
|
-
const allConnections = this.allConnections;
|
|
58
|
-
const model = [];
|
|
59
|
-
//console.groupCollapsed("====>",Mongoplus.readonlydbs);
|
|
60
|
-
for (let i = 0; i < allConnections.length; i++) {
|
|
61
|
-
const mongooseConnection = allConnections[i];
|
|
62
|
-
var currentm = mongooseConnection.model(name, schema)
|
|
63
|
-
model.push(currentm);
|
|
64
|
-
//console.count(Mongoplus.readonlydbs[i]);
|
|
65
|
-
if (Mongoplus.readonlydbs.includes(allConnections[i])) {
|
|
66
|
-
|
|
67
|
-
Mongoplus.readonlymodels.push(currentm)
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
console.log("REadonly ", Mongoplus.readonlymodels)
|
|
71
|
-
return new MongoModel(model, schema, Mongoplus.readonlymodels);
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
class MongoModel {
|
|
76
|
-
constructor(model, s, readonlydbs) {
|
|
77
|
-
if (!Array.isArray(model)) {
|
|
78
|
-
throw new Error('Model should be an array');
|
|
79
|
-
}
|
|
80
|
-
this.model = model;
|
|
81
|
-
this.readonlydbs = readonlydbs
|
|
82
|
-
this.s = s
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
}
|
|
86
|
-
static currentIndex = 0
|
|
87
|
-
//===================
|
|
88
|
-
|
|
89
|
-
async findInAllDatabase(filter, chain = {}) {
|
|
90
|
-
const dynamicComputationPromises = [];
|
|
91
|
-
this.model.forEach((modelRef) => {
|
|
92
|
-
dynamicComputationPromises.push({ fn: modelRef.find.bind(modelRef), params: [filter], chain: chain });
|
|
93
|
-
});
|
|
94
|
-
return await this.runLargeComputations(dynamicComputationPromises);
|
|
95
|
-
}
|
|
96
|
-
async aggregateInAllDatabase(filter, chain = {}) {
|
|
97
|
-
const dynamicComputationPromises = [];
|
|
98
|
-
this.model.forEach((modelRef) => {
|
|
99
|
-
dynamicComputationPromises.push({ fn: modelRef.aggregate.bind(modelRef), params: [filter], chain: chain });
|
|
100
|
-
});
|
|
101
|
-
return await this.runLargeComputations(dynamicComputationPromises);
|
|
102
|
-
}
|
|
103
|
-
//==================
|
|
104
|
-
async writeInAllDatabase(data) {
|
|
105
|
-
data["dbIndex"] = -1
|
|
106
|
-
const dynamicComputationPromises = [];
|
|
107
|
-
modellist = this.model
|
|
108
|
-
//this.readonlydbs.forEach((i)=>{modellist.splice(i,1,null)})
|
|
109
|
-
|
|
110
|
-
for (let i = 0; i < this.model.length; i++) {
|
|
111
|
-
if (Mongoplus.readonlymodels.includes(this.model[i])) continue;
|
|
112
|
-
var x = new this.model[i](data)
|
|
113
|
-
|
|
114
|
-
dynamicComputationPromises.push(await x.save());
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
return [].concat(dynamicComputationPromises);
|
|
120
|
-
|
|
121
|
-
}
|
|
122
|
-
//==================
|
|
123
|
-
async UpdateOneInAllDatabase(filter, update) {
|
|
124
|
-
|
|
125
|
-
const dynamicComputationPromises = [];
|
|
126
|
-
this.model.forEach((modelRef) => {
|
|
127
|
-
|
|
128
|
-
dynamicComputationPromises.push({ fn: modelRef.findOneAndUpdate.bind(modelRef), params: [filter, update, { new: true }], chain: {} });
|
|
129
|
-
});
|
|
130
|
-
return await this.runLargeComputations(dynamicComputationPromises);
|
|
131
|
-
|
|
132
|
-
}
|
|
133
|
-
//==================
|
|
134
|
-
async UpdateByIdInAllDatabase(id, update) {
|
|
135
|
-
|
|
136
|
-
const dynamicComputationPromises = [];
|
|
137
|
-
this.model.forEach((modelRef) => {
|
|
138
|
-
|
|
139
|
-
dynamicComputationPromises.push({ fn: modelRef.findByIdAndUpdate.bind(modelRef), params: [id, update, { new: true }], chain: {} });
|
|
140
|
-
});
|
|
141
|
-
return await this.runLargeComputations(dynamicComputationPromises);
|
|
142
|
-
|
|
143
|
-
}
|
|
144
|
-
async findByIdInAllDatabaseAndDelete(id) {
|
|
145
|
-
|
|
146
|
-
const dynamicComputationPromises = [];
|
|
147
|
-
this.model.forEach((modelRef) => {
|
|
148
|
-
|
|
149
|
-
dynamicComputationPromises.push({ fn: modelRef.findByIdAndDelete.bind(modelRef), params: [id], chain: {} });
|
|
150
|
-
});
|
|
151
|
-
return await this.runLargeComputations(dynamicComputationPromises);
|
|
152
|
-
|
|
153
|
-
}
|
|
154
|
-
async findOneInAllDatabaseAndDelete(filter) {
|
|
155
|
-
|
|
156
|
-
const dynamicComputationPromises = [];
|
|
157
|
-
this.model.forEach((modelRef) => {
|
|
158
|
-
|
|
159
|
-
dynamicComputationPromises.push({ fn: modelRef.findOneAndDelete.bind(modelRef), params: [filter], chain: {} });
|
|
160
|
-
});
|
|
161
|
-
return await this.runLargeComputations(dynamicComputationPromises);
|
|
162
|
-
|
|
163
|
-
}
|
|
164
|
-
//=======================
|
|
165
|
-
async write(data) {
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
const currentModel = this.model[MongoModel.currentIndex];
|
|
169
|
-
data["dbIndex"] = MongoModel.currentIndex;
|
|
170
|
-
MongoModel.currentIndex = (MongoModel.currentIndex + 1) % this.model.length;
|
|
171
|
-
if (Mongoplus.readonlymodels.includes(currentModel)) {
|
|
172
|
-
this.write(data)
|
|
173
|
-
//("This model is readonly");
|
|
174
|
-
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
try {
|
|
179
|
-
|
|
180
|
-
let dataToWrite = new currentModel(data)
|
|
181
|
-
return await dataToWrite.save()
|
|
182
|
-
} catch (error) {
|
|
183
|
-
throw error
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
}
|
|
187
|
-
//==================
|
|
188
|
-
|
|
189
|
-
async
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
//
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
return
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
1
|
+
const mongoose = require('mongoose');
|
|
2
|
+
class Mongoplus {
|
|
3
|
+
constructor(mongoURI) {
|
|
4
|
+
|
|
5
|
+
this.mongoURI = mongoURI;
|
|
6
|
+
this.allConnections = [];
|
|
7
|
+
this.currentIndex = 0;
|
|
8
|
+
if (this.mongoURI.filter((uri) => uri.startsWith("readonly")).length == this.mongoURI.length) {
|
|
9
|
+
throw new Error('Some of your URIs must be writable. If it is a mistake remove the `readonly:` flag from your urls')
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
}
|
|
13
|
+
static readonlydbs = []
|
|
14
|
+
static readonlymodels = [] // Define currentIndex to keep track of the current URI
|
|
15
|
+
Schema(schema) {
|
|
16
|
+
return mongoose.Schema(schema)
|
|
17
|
+
}
|
|
18
|
+
addIndex(schema, indextype) {
|
|
19
|
+
return schema.index(indextype)
|
|
20
|
+
}
|
|
21
|
+
getNextMongoURI() {
|
|
22
|
+
const uri = this.mongoURI[this.currentIndex];
|
|
23
|
+
this.currentIndex = (this.currentIndex + 1) % this.mongoURI.length;
|
|
24
|
+
return uri;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
connectToAll() {
|
|
28
|
+
for (let i = 0; i < this.mongoURI.length; i++) {
|
|
29
|
+
|
|
30
|
+
const uri = this.mongoURI[i].replaceAll("readonly:", '');
|
|
31
|
+
const con = mongoose.createConnection(uri, {
|
|
32
|
+
useNewUrlParser: true,
|
|
33
|
+
useUnifiedTopology: true,
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
this.allConnections.push(con);
|
|
37
|
+
if (this.mongoURI[i].startsWith('readonly:')) {
|
|
38
|
+
|
|
39
|
+
Mongoplus.readonlydbs.push(con)
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return this.allConnections;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
buildModel(name, schema) {
|
|
47
|
+
if (!Object.keys(schema.obj).includes("dbIndex")) {
|
|
48
|
+
throw new Error(`[!]Error : < dbIndex > must be present in your schema like dbIndex:{
|
|
49
|
+
type: Number,
|
|
50
|
+
required: true
|
|
51
|
+
} `)
|
|
52
|
+
}
|
|
53
|
+
if (this.allConnections.length <= 0) {
|
|
54
|
+
throw new Error(`[!]Error : All connections should be made first use the code
|
|
55
|
+
(async () => {await mongodb.connectToAll();})(); to init connections here mongodb is the class init variable`)
|
|
56
|
+
}
|
|
57
|
+
const allConnections = this.allConnections;
|
|
58
|
+
const model = [];
|
|
59
|
+
//console.groupCollapsed("====>",Mongoplus.readonlydbs);
|
|
60
|
+
for (let i = 0; i < allConnections.length; i++) {
|
|
61
|
+
const mongooseConnection = allConnections[i];
|
|
62
|
+
var currentm = mongooseConnection.model(name, schema)
|
|
63
|
+
model.push(currentm);
|
|
64
|
+
//console.count(Mongoplus.readonlydbs[i]);
|
|
65
|
+
if (Mongoplus.readonlydbs.includes(allConnections[i])) {
|
|
66
|
+
|
|
67
|
+
Mongoplus.readonlymodels.push(currentm)
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
console.log("REadonly ", Mongoplus.readonlymodels)
|
|
71
|
+
return new MongoModel(model, schema, Mongoplus.readonlymodels);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
class MongoModel {
|
|
76
|
+
constructor(model, s, readonlydbs) {
|
|
77
|
+
if (!Array.isArray(model)) {
|
|
78
|
+
throw new Error('Model should be an array');
|
|
79
|
+
}
|
|
80
|
+
this.model = model;
|
|
81
|
+
this.readonlydbs = readonlydbs
|
|
82
|
+
this.s = s
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
}
|
|
86
|
+
static currentIndex = 0
|
|
87
|
+
//===================
|
|
88
|
+
|
|
89
|
+
async findInAllDatabase(filter, chain = {}) {
|
|
90
|
+
const dynamicComputationPromises = [];
|
|
91
|
+
this.model.forEach((modelRef) => {
|
|
92
|
+
dynamicComputationPromises.push({ fn: modelRef.find.bind(modelRef), params: [filter], chain: chain });
|
|
93
|
+
});
|
|
94
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
95
|
+
}
|
|
96
|
+
async aggregateInAllDatabase(filter, chain = {}) {
|
|
97
|
+
const dynamicComputationPromises = [];
|
|
98
|
+
this.model.forEach((modelRef) => {
|
|
99
|
+
dynamicComputationPromises.push({ fn: modelRef.aggregate.bind(modelRef), params: [filter], chain: chain });
|
|
100
|
+
});
|
|
101
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
102
|
+
}
|
|
103
|
+
//==================
|
|
104
|
+
async writeInAllDatabase(data) {
|
|
105
|
+
data["dbIndex"] = -1
|
|
106
|
+
const dynamicComputationPromises = [];
|
|
107
|
+
modellist = this.model
|
|
108
|
+
//this.readonlydbs.forEach((i)=>{modellist.splice(i,1,null)})
|
|
109
|
+
|
|
110
|
+
for (let i = 0; i < this.model.length; i++) {
|
|
111
|
+
if (Mongoplus.readonlymodels.includes(this.model[i])) continue;
|
|
112
|
+
var x = new this.model[i](data)
|
|
113
|
+
|
|
114
|
+
dynamicComputationPromises.push(await x.save());
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return [].concat(dynamicComputationPromises);
|
|
120
|
+
|
|
121
|
+
}
|
|
122
|
+
//==================
|
|
123
|
+
async UpdateOneInAllDatabase(filter, update) {
|
|
124
|
+
|
|
125
|
+
const dynamicComputationPromises = [];
|
|
126
|
+
this.model.forEach((modelRef) => {
|
|
127
|
+
|
|
128
|
+
dynamicComputationPromises.push({ fn: modelRef.findOneAndUpdate.bind(modelRef), params: [filter, update, { new: true }], chain: {} });
|
|
129
|
+
});
|
|
130
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
131
|
+
|
|
132
|
+
}
|
|
133
|
+
//==================
|
|
134
|
+
async UpdateByIdInAllDatabase(id, update) {
|
|
135
|
+
|
|
136
|
+
const dynamicComputationPromises = [];
|
|
137
|
+
this.model.forEach((modelRef) => {
|
|
138
|
+
|
|
139
|
+
dynamicComputationPromises.push({ fn: modelRef.findByIdAndUpdate.bind(modelRef), params: [id, update, { new: true }], chain: {} });
|
|
140
|
+
});
|
|
141
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
142
|
+
|
|
143
|
+
}
|
|
144
|
+
async findByIdInAllDatabaseAndDelete(id) {
|
|
145
|
+
|
|
146
|
+
const dynamicComputationPromises = [];
|
|
147
|
+
this.model.forEach((modelRef) => {
|
|
148
|
+
|
|
149
|
+
dynamicComputationPromises.push({ fn: modelRef.findByIdAndDelete.bind(modelRef), params: [id], chain: {} });
|
|
150
|
+
});
|
|
151
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
152
|
+
|
|
153
|
+
}
|
|
154
|
+
async findOneInAllDatabaseAndDelete(filter) {
|
|
155
|
+
|
|
156
|
+
const dynamicComputationPromises = [];
|
|
157
|
+
this.model.forEach((modelRef) => {
|
|
158
|
+
|
|
159
|
+
dynamicComputationPromises.push({ fn: modelRef.findOneAndDelete.bind(modelRef), params: [filter], chain: {} });
|
|
160
|
+
});
|
|
161
|
+
return await this.runLargeComputations(dynamicComputationPromises);
|
|
162
|
+
|
|
163
|
+
}
|
|
164
|
+
//=======================
|
|
165
|
+
async write(data) {
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
const currentModel = this.model[MongoModel.currentIndex];
|
|
169
|
+
data["dbIndex"] = MongoModel.currentIndex;
|
|
170
|
+
MongoModel.currentIndex = (MongoModel.currentIndex + 1) % this.model.length;
|
|
171
|
+
if (Mongoplus.readonlymodels.includes(currentModel)) {
|
|
172
|
+
this.write(data)
|
|
173
|
+
//("This model is readonly");
|
|
174
|
+
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
|
|
180
|
+
let dataToWrite = new currentModel(data)
|
|
181
|
+
return await dataToWrite.save()
|
|
182
|
+
} catch (error) {
|
|
183
|
+
throw error
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
}
|
|
187
|
+
//==================
|
|
188
|
+
|
|
189
|
+
async bulkWrite(data, options = {}) {
|
|
190
|
+
// Default options
|
|
191
|
+
const {
|
|
192
|
+
batchSize = 1000, // Process 1000 items per batch by default
|
|
193
|
+
concurrentBatches = true // Run batches concurrently or sequentially
|
|
194
|
+
} = options;
|
|
195
|
+
|
|
196
|
+
if (!data || data.length === 0) return [];
|
|
197
|
+
|
|
198
|
+
// 1. Identify writable models
|
|
199
|
+
const writableModels = this.model.filter(m => !Mongoplus.readonlymodels.includes(m));
|
|
200
|
+
const numDBs = writableModels.length;
|
|
201
|
+
|
|
202
|
+
if (numDBs === 0) {
|
|
203
|
+
throw new Error("No writable databases available.");
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Split data into batches
|
|
207
|
+
const batches = [];
|
|
208
|
+
for (let i = 0; i < data.length; i += batchSize) {
|
|
209
|
+
batches.push(data.slice(i, i + batchSize));
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
console.log(`[Mongoplus] Processing ${data.length} items in ${batches.length} batch(es) of max ${batchSize} items`);
|
|
213
|
+
|
|
214
|
+
const finalRetryArray = [];
|
|
215
|
+
const allResults = [];
|
|
216
|
+
|
|
217
|
+
// Process function for a single batch
|
|
218
|
+
const processBatch = async (batchData, batchNumber) => {
|
|
219
|
+
// 2. Internal Bucket Distribution & Op Transformation
|
|
220
|
+
const buckets = Array.from({ length: numDBs }, () => []);
|
|
221
|
+
|
|
222
|
+
batchData.forEach((item, index) => {
|
|
223
|
+
const bucketIndex = index % numDBs;
|
|
224
|
+
const writableModel = writableModels[bucketIndex];
|
|
225
|
+
|
|
226
|
+
// Find the actual dbIndex in the full model array
|
|
227
|
+
const dbIdx = this.model.indexOf(writableModel);
|
|
228
|
+
|
|
229
|
+
// Clone to avoid mutating original data
|
|
230
|
+
const itemCopy = { ...item, dbIndex: dbIdx };
|
|
231
|
+
|
|
232
|
+
// Build filter for updateOne - require _id or explicit id field
|
|
233
|
+
let filter;
|
|
234
|
+
if (itemCopy._id) {
|
|
235
|
+
filter = { _id: itemCopy._id };
|
|
236
|
+
} else if (itemCopy.id) {
|
|
237
|
+
filter = { _id: itemCopy.id };
|
|
238
|
+
itemCopy._id = itemCopy.id; // Normalize to _id
|
|
239
|
+
} else {
|
|
240
|
+
// For new documents without ID, generate one
|
|
241
|
+
const mongoose = require('mongoose');
|
|
242
|
+
itemCopy._id = new mongoose.Types.ObjectId();
|
|
243
|
+
filter = { _id: itemCopy._id };
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
buckets[bucketIndex].push({
|
|
247
|
+
updateOne: {
|
|
248
|
+
filter: filter,
|
|
249
|
+
update: { $set: itemCopy },
|
|
250
|
+
upsert: true
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
// 3. Transaction Runner with Retry Logic
|
|
256
|
+
const runTransactionWithRetry = async (model, ops, modelIndex) => {
|
|
257
|
+
let session;
|
|
258
|
+
try {
|
|
259
|
+
session = await model.db.startSession();
|
|
260
|
+
} catch (sessionError) {
|
|
261
|
+
throw new Error(`[Mongoplus] Database ${modelIndex} is a standalone instance. Transactions (required for bulkWriteZipper) only work on Replica Sets. \nError: ${JSON.stringify(sessionError)}`);
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
}
|
|
265
|
+
const attemptWrite = async () => {
|
|
266
|
+
let result;
|
|
267
|
+
await session.withTransaction(async () => {
|
|
268
|
+
result = await model.bulkWrite(ops, { session, ordered: false });
|
|
269
|
+
});
|
|
270
|
+
return result;
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
try {
|
|
274
|
+
// First Attempt
|
|
275
|
+
return await attemptWrite();
|
|
276
|
+
} catch (firstError) {
|
|
277
|
+
console.warn(`[Mongoplus] Batch ${batchNumber} failed for ${model.modelName} (DB ${modelIndex}). Retrying once...`);
|
|
278
|
+
try {
|
|
279
|
+
// Second Attempt (Retry)
|
|
280
|
+
return await attemptWrite();
|
|
281
|
+
} catch (retryError) {
|
|
282
|
+
// Fail: Store in retry array for the final error report
|
|
283
|
+
finalRetryArray.push({
|
|
284
|
+
batch: batchNumber,
|
|
285
|
+
model: model.modelName,
|
|
286
|
+
dbIndex: modelIndex,
|
|
287
|
+
opsCount: ops.length,
|
|
288
|
+
data: ops.map(o => o.updateOne.update.$set),
|
|
289
|
+
error: retryError.message
|
|
290
|
+
});
|
|
291
|
+
throw retryError;
|
|
292
|
+
}
|
|
293
|
+
} finally {
|
|
294
|
+
await session.endSession();
|
|
295
|
+
}
|
|
296
|
+
};
|
|
297
|
+
|
|
298
|
+
// 4. Execute all "Zips" concurrently for this batch
|
|
299
|
+
const results = await Promise.all(
|
|
300
|
+
buckets.map((ops, i) => {
|
|
301
|
+
if (ops.length === 0) return Promise.resolve(null);
|
|
302
|
+
const dbIdx = this.model.indexOf(writableModels[i]);
|
|
303
|
+
return runTransactionWithRetry(writableModels[i], ops, dbIdx);
|
|
304
|
+
})
|
|
305
|
+
);
|
|
306
|
+
|
|
307
|
+
return results.filter(r => r !== null);
|
|
308
|
+
};
|
|
309
|
+
|
|
310
|
+
// Process all batches
|
|
311
|
+
try {
|
|
312
|
+
if (concurrentBatches && batches.length > 1) {
|
|
313
|
+
// Run all batches concurrently (faster but more resource intensive)
|
|
314
|
+
console.log(`[Mongoplus] Running ${batches.length} batches concurrently`);
|
|
315
|
+
const batchResults = await Promise.all(
|
|
316
|
+
batches.map((batch, idx) => processBatch(batch, idx + 1))
|
|
317
|
+
);
|
|
318
|
+
allResults.push(...batchResults.flat());
|
|
319
|
+
} else {
|
|
320
|
+
// Run batches sequentially (slower but safer for large datasets)
|
|
321
|
+
console.log(`[Mongoplus] Running ${batches.length} batches sequentially`);
|
|
322
|
+
for (let i = 0; i < batches.length; i++) {
|
|
323
|
+
const result = await processBatch(batches[i], i + 1);
|
|
324
|
+
allResults.push(...result);
|
|
325
|
+
console.log(`[Mongoplus] Completed batch ${i + 1}/${batches.length}`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// Update global rotation index for write() method consistency
|
|
330
|
+
MongoModel.currentIndex = (MongoModel.currentIndex + data.length) % this.model.length;
|
|
331
|
+
|
|
332
|
+
console.log(`[Mongoplus] Successfully processed ${data.length} items across ${numDBs} databases`);
|
|
333
|
+
return allResults;
|
|
334
|
+
|
|
335
|
+
} catch (error) {
|
|
336
|
+
// Throw a comprehensive error containing the retry array
|
|
337
|
+
const exception = new Error(`Zipper Bulk Write failed after retries: ${error.message}`);
|
|
338
|
+
exception.failedBatches = finalRetryArray;
|
|
339
|
+
exception.originalError = error;
|
|
340
|
+
throw exception;
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
//===================
|
|
344
|
+
|
|
345
|
+
async findOne(dbIndex, filter, chain = {}) {
|
|
346
|
+
var currentModel = this.model[dbIndex]
|
|
347
|
+
|
|
348
|
+
if (chain.skip && chain.limit && chain.sort) {
|
|
349
|
+
currentModel.findOne(filter).skip(chain.skip).limit(chain.limit).sort(chain.sort)
|
|
350
|
+
} else if (chain.skip && chain.limit) {
|
|
351
|
+
return currentModel.findOne(filter).skip(chain.skip).limit(chain.limit)
|
|
352
|
+
}
|
|
353
|
+
else if (chain.skip) {
|
|
354
|
+
return currentModel.findOne(filter).skip(chain.skip)
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
else if (chain.limit) {
|
|
358
|
+
return currentModel.findOne(filter).limit(chain.limit)
|
|
359
|
+
} else {
|
|
360
|
+
return currentModel.findOne(filter);
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
//===============
|
|
367
|
+
|
|
368
|
+
async find(dbIndex, filter, chain = {}) {
|
|
369
|
+
var currentModel = this.model[dbIndex]
|
|
370
|
+
// Start with the base query
|
|
371
|
+
let query = currentModel.find(filter);
|
|
372
|
+
|
|
373
|
+
// Dynamically apply chain options if they exist
|
|
374
|
+
for (const [key, value] of Object.entries(chain)) {
|
|
375
|
+
if (query[key]) {
|
|
376
|
+
query = query[key](value);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
return query;
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
}
|
|
385
|
+
//=======================
|
|
386
|
+
async findById(dbIndex, filter, chain = {}) {
|
|
387
|
+
const currentModel = this.model[dbIndex];
|
|
388
|
+
|
|
389
|
+
// Start with the base query
|
|
390
|
+
let query = currentModel.findById(filter);
|
|
391
|
+
|
|
392
|
+
// Dynamically apply chain options if they exist
|
|
393
|
+
for (const [key, value] of Object.entries(chain)) {
|
|
394
|
+
if (query[key]) {
|
|
395
|
+
query = query[key](value);
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
return query;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
//====================
|
|
404
|
+
async findByIdAndUpdate(dbIndex, id, update) {
|
|
405
|
+
var currentModel = this.model[dbIndex]
|
|
406
|
+
return currentModel.findByIdAndUpdate(id, update, { new: true });
|
|
407
|
+
}
|
|
408
|
+
//===============
|
|
409
|
+
async findByIdAndDelete(dbIndex, id, update) {
|
|
410
|
+
var currentModel = this.model[dbIndex]
|
|
411
|
+
return currentModel.findByIdAndRemove(id, update, { new: true });
|
|
412
|
+
}
|
|
413
|
+
//===========
|
|
414
|
+
async findOneAndUpdate(dbIndex, filter, update) {
|
|
415
|
+
var currentModel = this.model[dbIndex]
|
|
416
|
+
return currentModel.findOneAndUpdate(filter, update, { new: true });
|
|
417
|
+
}
|
|
418
|
+
//=============
|
|
419
|
+
async aggregate(dbIndex, filter, update) {
|
|
420
|
+
var currentModel = this.model[dbIndex]
|
|
421
|
+
return currentModel.aggregate(filter);
|
|
422
|
+
}
|
|
423
|
+
//===========
|
|
424
|
+
async watch(dbIndex) {
|
|
425
|
+
return this.model[dbIndex].watch()
|
|
426
|
+
}
|
|
427
|
+
//================
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
getNextModel() {
|
|
434
|
+
const currentModel = this.model[this.currentIndex];
|
|
435
|
+
var writen = this.currentIndex
|
|
436
|
+
this.currentIndex = (this.currentIndex + 1) % this.model.length;
|
|
437
|
+
return [currentModel, writen];
|
|
438
|
+
}
|
|
439
|
+
async runLargeComputations(computationPairs) {
|
|
440
|
+
try {
|
|
441
|
+
const startTime = performance.now();
|
|
442
|
+
|
|
443
|
+
// Execute all computation functions concurrently using Promise.all
|
|
444
|
+
const results = await Promise.all(
|
|
445
|
+
computationPairs.map(async pair => {
|
|
446
|
+
var chain = pair.chain;
|
|
447
|
+
var query = pair.fn(...pair.params);
|
|
448
|
+
// Start with the base query
|
|
449
|
+
|
|
450
|
+
// Dynamically apply chain options if they exist
|
|
451
|
+
for (const [key, value] of Object.entries(chain)) {
|
|
452
|
+
if (query[key]) {
|
|
453
|
+
query = query[key](value);
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
return query;
|
|
458
|
+
|
|
459
|
+
})
|
|
460
|
+
);
|
|
461
|
+
|
|
462
|
+
const endTime = performance.now();
|
|
463
|
+
const totalTime = endTime - startTime;
|
|
464
|
+
|
|
465
|
+
// Process the results as needed
|
|
466
|
+
// const sum = results.reduce((acc, result) => acc + result, 0);
|
|
467
|
+
|
|
468
|
+
return { results: [].concat(...results), totalTime };
|
|
469
|
+
} catch (error) {
|
|
470
|
+
console.error('Error:', error);
|
|
471
|
+
throw error; // Rethrow the error if needed
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
module.exports = Mongoplus;
|