@nebulae/event-store-tpi-rx6 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +0 -0
- package/.jshintrc +4 -0
- package/.vscode/settings.json +6 -0
- package/README.md +23 -0
- package/docs/images/microservices_platform.png +0 -0
- package/docs/images/nebula.png +0 -0
- package/docs/images/read_workflow.png +0 -0
- package/docs/images/service_interaction.png +0 -0
- package/docs/images/shell_composition.png +0 -0
- package/docs/images/write_workflow_crud_es.png +0 -0
- package/docs/images/write_workflow_pure_cqrs_es.png +0 -0
- package/docs/tmp/microservices_platform.png +0 -0
- package/docs/tmp/ms-dashboard-devices_intro.png +0 -0
- package/docs/tmp/ms-devices-location-dev-env.png +0 -0
- package/docs/tmp/nebula.png +0 -0
- package/docs/tmp/read_workflow.png +0 -0
- package/docs/tmp/service_interaction.png +0 -0
- package/docs/tmp/shell_composition.png +0 -0
- package/docs/tmp/write_workflow_crud_es.png +0 -0
- package/docs/tmp/write_workflow_pure_cqrs_es.png +0 -0
- package/index.js +9 -0
- package/lib/EventStore.js +172 -0
- package/lib/broker/MqttBroker.js +116 -0
- package/lib/broker/PubSubBroker.js +144 -0
- package/lib/entities/Event.js +47 -0
- package/lib/store/MongoStore.js +455 -0
- package/package.json +68 -0
- package/test/EventStore.js +114 -0
- package/test/broker/MqttBroker.js +129 -0
- package/test/broker/PubSubBroker.js +125 -0
- package/test/store/MongoStore.js +509 -0
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const Rx = require('rxjs');
|
|
4
|
+
const {
|
|
5
|
+
mergeMap,
|
|
6
|
+
mapTo,
|
|
7
|
+
switchMap,
|
|
8
|
+
pluck,
|
|
9
|
+
map,
|
|
10
|
+
concatMap,
|
|
11
|
+
concatAll,
|
|
12
|
+
filter,
|
|
13
|
+
take,
|
|
14
|
+
toArray
|
|
15
|
+
} = require('rxjs/operators');
|
|
16
|
+
const MongoClient = require('mongodb').MongoClient;
|
|
17
|
+
const Event = require('../entities/Event');
|
|
18
|
+
|
|
19
|
+
class MongoStore {
|
|
20
|
+
constructor({ url, eventStoreDbName, aggregatesDbName }) {
|
|
21
|
+
this.url = url;
|
|
22
|
+
this.eventStoreDbName = eventStoreDbName;
|
|
23
|
+
this.aggregatesDbName = aggregatesDbName;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Starts DB connections
|
|
28
|
+
* Returns an Obserable that resolves to each coneection result
|
|
29
|
+
*/
|
|
30
|
+
start$() {
|
|
31
|
+
return Rx.bindNodeCallback(MongoClient.connect)(this.url).pipe(
|
|
32
|
+
map(client => {
|
|
33
|
+
this.mongoClient = client;
|
|
34
|
+
this.aggregatesDb = this.mongoClient.db(this.aggregatesDbName);
|
|
35
|
+
return `MongoStore DB connected`;
|
|
36
|
+
})
|
|
37
|
+
);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* stops DB connections
|
|
42
|
+
* returns an observable that resolves to text result of each closing db
|
|
43
|
+
*/
|
|
44
|
+
stop$() {
|
|
45
|
+
return Rx.Observable.create(observer => {
|
|
46
|
+
this.mongoClient.close();
|
|
47
|
+
observer.next('Mongo DB client closed');
|
|
48
|
+
observer.complete();
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Push an event into the store
|
|
54
|
+
* Returns an observable that resolves to {aggregate,event,versionTimeStr}
|
|
55
|
+
* where:
|
|
56
|
+
* - aggregate = current aggregate state
|
|
57
|
+
* - event = persisted event
|
|
58
|
+
* - versionTimeStr = EventStore date index where the event was store
|
|
59
|
+
*
|
|
60
|
+
* @param {Event} event
|
|
61
|
+
*/
|
|
62
|
+
pushEvent$(event) {
|
|
63
|
+
if (!event.timestamp) {
|
|
64
|
+
event.timestamp = Date.now();
|
|
65
|
+
}
|
|
66
|
+
return this.incrementAggregateVersionAndGet$(
|
|
67
|
+
event.at,
|
|
68
|
+
event.aid,
|
|
69
|
+
event.timestamp
|
|
70
|
+
).pipe(
|
|
71
|
+
mergeMap(([aggregate, versionTimeStr]) => {
|
|
72
|
+
event.av = aggregate.version;
|
|
73
|
+
const eventStoreDb = this.mongoClient.db(
|
|
74
|
+
`${this.eventStoreDbName}_${versionTimeStr}`
|
|
75
|
+
);
|
|
76
|
+
const collection = eventStoreDb.collection('Events');
|
|
77
|
+
return event.ephemeral
|
|
78
|
+
? Rx.of({ aggregate, event, versionTimeStr })
|
|
79
|
+
: Rx.defer(() =>
|
|
80
|
+
collection.insertOne(event, {
|
|
81
|
+
writeConcern: { w: '1', wtimeout: 2000, j: true }
|
|
82
|
+
})
|
|
83
|
+
).pipe(mapTo({ aggregate, event, versionTimeStr }));
|
|
84
|
+
})
|
|
85
|
+
);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Increments the aggregate version and return the aggregate itself
|
|
90
|
+
* Returns an observable that resolve to the an array: [Aggregate, TimeString]
|
|
91
|
+
* the TimeString is the name postfix of the EventStore DB where this aggregate version must be persisted
|
|
92
|
+
* @param {string} type
|
|
93
|
+
* @param {string} id
|
|
94
|
+
* @param {number} versionTime
|
|
95
|
+
*/
|
|
96
|
+
incrementAggregateVersionAndGet$(type, id, versionTime) {
|
|
97
|
+
// Get the documents collection
|
|
98
|
+
const collection = this.aggregatesDb.collection('Aggregates');
|
|
99
|
+
//if the versionTime is not provided (production), then we generate with the current date time
|
|
100
|
+
if (!versionTime) {
|
|
101
|
+
versionTime = Date.now();
|
|
102
|
+
}
|
|
103
|
+
const versionDate = new Date(versionTime);
|
|
104
|
+
const versionTimeStr =
|
|
105
|
+
versionDate.getFullYear() +
|
|
106
|
+
('0' + (versionDate.getMonth() + 1)).slice(-2);
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
return Rx.of([
|
|
110
|
+
{version: parseInt(Date.now()/1000)}, // aggregate object with only version field calculated
|
|
111
|
+
versionTimeStr
|
|
112
|
+
]);
|
|
113
|
+
|
|
114
|
+
/* EXCESS DB USAGE * /
|
|
115
|
+
return this.getAggreate$(type, id, true, versionTime).pipe(
|
|
116
|
+
switchMap(findResult => {
|
|
117
|
+
const index =
|
|
118
|
+
findResult.index && findResult.index[versionTimeStr]
|
|
119
|
+
? findResult.index[versionTimeStr]
|
|
120
|
+
: {
|
|
121
|
+
initVersion: findResult.version ? findResult.version + 1 : 1,
|
|
122
|
+
initTime: findResult.versionTime
|
|
123
|
+
};
|
|
124
|
+
index.endVersion = findResult.version + 1;
|
|
125
|
+
index.endTime = findResult.versionTime;
|
|
126
|
+
|
|
127
|
+
const update = {
|
|
128
|
+
$inc: { version: 1 },
|
|
129
|
+
$set: {
|
|
130
|
+
versionTime
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
update['$set'][`index.${versionTimeStr}`] = index;
|
|
134
|
+
return Rx.bindNodeCallback(
|
|
135
|
+
collection.findOneAndUpdate.bind(collection)
|
|
136
|
+
)({ type, id }, update, {
|
|
137
|
+
upsert: true,
|
|
138
|
+
returnOriginal: false
|
|
139
|
+
});
|
|
140
|
+
}),
|
|
141
|
+
pluck('value'),
|
|
142
|
+
map(aggregate => [aggregate, versionTimeStr])
|
|
143
|
+
);
|
|
144
|
+
/**/
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Query an Aggregate in the store
|
|
149
|
+
* Returns an observable that resolve to the Aggregate
|
|
150
|
+
* @param {string} type
|
|
151
|
+
* @param {string} id
|
|
152
|
+
* @param {boolean} createIfNotExists if true, creates the aggregate if not found
|
|
153
|
+
* @param {number} versionTime create time to set, ONLY FOR TESTING
|
|
154
|
+
*/
|
|
155
|
+
getAggreate$(type, id, createIfNotExists = false, versionTime) {
|
|
156
|
+
//if the versionTime is not provided (production), then we generate with the current date time
|
|
157
|
+
if (!versionTime) {
|
|
158
|
+
versionTime = Date.now();
|
|
159
|
+
}
|
|
160
|
+
// Get the documents collection
|
|
161
|
+
const collection = this.aggregatesDb.collection('Aggregates');
|
|
162
|
+
return Rx.bindNodeCallback(collection.findOneAndUpdate.bind(collection))(
|
|
163
|
+
{
|
|
164
|
+
type,
|
|
165
|
+
id
|
|
166
|
+
},
|
|
167
|
+
{
|
|
168
|
+
$setOnInsert: {
|
|
169
|
+
creationTime: versionTime
|
|
170
|
+
}
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
upsert: createIfNotExists,
|
|
174
|
+
returnOriginal: false
|
|
175
|
+
}
|
|
176
|
+
).pipe(map(result => (result && result.value ? result.value : undefined)));
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Find all events of an especific aggregate
|
|
181
|
+
* @param {String} aggregateType Aggregate type
|
|
182
|
+
* @param {String} aggregateId Aggregate Id
|
|
183
|
+
* @param {number} version version to recover from (exclusive), defualt = 0
|
|
184
|
+
* @param {limit} limit max number of events to return, default = 20
|
|
185
|
+
*
|
|
186
|
+
* Returns an Observable that emits each found event one by one
|
|
187
|
+
*/
|
|
188
|
+
getEvents$(aggregateType, aggregateId, version = 0, limit = 20) {
|
|
189
|
+
const minVersion = version + 1;
|
|
190
|
+
const maxVersion = version + limit;
|
|
191
|
+
//console.log(`====== getEvents$: minVersion=${minVersion}, maxVersion=${maxVersion}`);
|
|
192
|
+
return this.getAggreate$(aggregateType, aggregateId).pipe(
|
|
193
|
+
map(aggregate => {
|
|
194
|
+
if (!aggregate) {
|
|
195
|
+
throw new Error(
|
|
196
|
+
`Aggregate not found: aggregateType=${aggregateType} aggregateId=${aggregateId}`
|
|
197
|
+
);
|
|
198
|
+
}
|
|
199
|
+
return aggregate;
|
|
200
|
+
}),
|
|
201
|
+
switchMap(aggregate =>
|
|
202
|
+
Rx.from(Object.entries(aggregate.index)).pipe(
|
|
203
|
+
filter(([time, index]) => minVersion <= index.endVersion),
|
|
204
|
+
//.do(([time, index]) => console.log(`======== selected time frame: ${time}`))
|
|
205
|
+
map(([time, index]) => {
|
|
206
|
+
const eventStoreDb = this.mongoClient.db(
|
|
207
|
+
`${this.eventStoreDbName}_${time}`
|
|
208
|
+
);
|
|
209
|
+
const collection = eventStoreDb.collection('Events');
|
|
210
|
+
const lowLimit =
|
|
211
|
+
minVersion > index.initVersion ? minVersion : index.initVersion;
|
|
212
|
+
const highLimit =
|
|
213
|
+
maxVersion < index.endVersion ? maxVersion : index.endVersion;
|
|
214
|
+
const realLimit = highLimit - lowLimit + 1;
|
|
215
|
+
//console.log(`========== ${time}: lowLimit=${lowLimit} highLimit=${highLimit} realLimit=${realLimit} `);
|
|
216
|
+
return Rx.bindNodeCallback(collection.find.bind(collection))({
|
|
217
|
+
at: aggregateType,
|
|
218
|
+
aid: aggregateId,
|
|
219
|
+
av: { $gt: version }
|
|
220
|
+
}).pipe(
|
|
221
|
+
concatMap(cursor =>
|
|
222
|
+
Rx.range(lowLimit, realLimit).pipe(mapTo(cursor))
|
|
223
|
+
),
|
|
224
|
+
concatMap(cursor =>
|
|
225
|
+
Rx.defer(() => this.extractNextFromMongoCursor(cursor))
|
|
226
|
+
)
|
|
227
|
+
);
|
|
228
|
+
}),
|
|
229
|
+
concatAll(),
|
|
230
|
+
//.do(data => console.log(`============ ${data ? data.av : 'null'}`))
|
|
231
|
+
filter(data => data),
|
|
232
|
+
take(limit)
|
|
233
|
+
)
|
|
234
|
+
)
|
|
235
|
+
);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Find all events of an especific aggregate having taken place but not acknowledged,
|
|
240
|
+
* @param {String} aggregateType Aggregate type
|
|
241
|
+
* @param {string} key process key (eg. microservice name) that acknowledged the events
|
|
242
|
+
*
|
|
243
|
+
* Returns an Observable that emits each found event one by one
|
|
244
|
+
*/
|
|
245
|
+
retrieveUnacknowledgedEvents$(aggregateType, key) {
|
|
246
|
+
/*
|
|
247
|
+
1 - retrieves the latest acknowledged event for the given aggregateType and key
|
|
248
|
+
1.1 - if the key exists, extracts the timestamp
|
|
249
|
+
1.2 - if the key does not exists, set the timestamp to zero
|
|
250
|
+
2 - extract all the existing DBs for event storing, sort it by date, and filter after the timestamp extracted at (1)
|
|
251
|
+
3 - iterates by every DB and emmits every event of the given Aggregate type after the timestamp extracted at (1)
|
|
252
|
+
*/
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
//Observable that resolves to the databases dates available from the eventstore
|
|
256
|
+
const findAllDatabases$ = Rx.defer(() =>
|
|
257
|
+
this.aggregatesDb.admin().listDatabases()
|
|
258
|
+
).pipe(
|
|
259
|
+
mergeMap(response => Rx.from(response.databases)),
|
|
260
|
+
pluck('name'),
|
|
261
|
+
filter(dbName => dbName.indexOf(this.eventStoreDbName) !== -1),
|
|
262
|
+
map(dbName => dbName.replace(`${this.eventStoreDbName}_`, '')),
|
|
263
|
+
map(dbName => parseInt(dbName)),
|
|
264
|
+
toArray(),
|
|
265
|
+
map(arr => arr.sort()),
|
|
266
|
+
mergeMap(array => Rx.from(array))
|
|
267
|
+
);
|
|
268
|
+
|
|
269
|
+
return this.findLatestAcknowledgedTimestamp$(aggregateType, key).pipe(
|
|
270
|
+
// get latest ack timestamp
|
|
271
|
+
mergeMap(latestAckTimeStamp => {
|
|
272
|
+
const date = new Date(latestAckTimeStamp);
|
|
273
|
+
const strDate =
|
|
274
|
+
date.getFullYear() + ('0' + (date.getMonth() + 1)).slice(-2);
|
|
275
|
+
const intDate = parseInt(strDate);
|
|
276
|
+
// Find all DATABASES that hava events after the latest ack timestamp, with ASC order
|
|
277
|
+
// resolving to the Events collection of each DB
|
|
278
|
+
return findAllDatabases$.pipe(
|
|
279
|
+
filter(dbDate => dbDate >= intDate),
|
|
280
|
+
map(dbDate =>
|
|
281
|
+
this.mongoClient.db(`${this.eventStoreDbName}_${dbDate}`)
|
|
282
|
+
),
|
|
283
|
+
map(db => db.collection('Events')),
|
|
284
|
+
map(evtCollection => {
|
|
285
|
+
return { evtCollection, latestAckTimeStamp };
|
|
286
|
+
})
|
|
287
|
+
);
|
|
288
|
+
}),
|
|
289
|
+
map(({ evtCollection, latestAckTimeStamp }) => {
|
|
290
|
+
//Trasform the collection observable to an Stream of Observables, each of these Observables will iterate over the collection and emmiting each event in order
|
|
291
|
+
return Rx.of(
|
|
292
|
+
evtCollection.find({
|
|
293
|
+
at: aggregateType,
|
|
294
|
+
timestamp: { $gt: latestAckTimeStamp }
|
|
295
|
+
})
|
|
296
|
+
).pipe(mergeMap(cursor => this.extractAllFromMongoCursor$(cursor)));
|
|
297
|
+
}),
|
|
298
|
+
concatAll()
|
|
299
|
+
); // using concat we can asure to iterate over each database only if all the records on the database are exhausted. this can be seen as a sync forEach
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
/**
|
|
303
|
+
* Observable that resolves to latest timestamp Acknowledged for the given key
|
|
304
|
+
* @param {*} findSearchQuery
|
|
305
|
+
* @param {*} findSearchProjection
|
|
306
|
+
*/
|
|
307
|
+
findLatestAcknowledgedTimestamp$(aggregateType, key) {
|
|
308
|
+
//FIND document queries
|
|
309
|
+
const findSearchQuery = { at: aggregateType, key };
|
|
310
|
+
//lets build all const queries so the RxJS stream is more legible
|
|
311
|
+
return Rx.defer(() =>
|
|
312
|
+
this.aggregatesDb
|
|
313
|
+
.collection('Acknowledges')
|
|
314
|
+
.findOne(findSearchQuery)
|
|
315
|
+
).pipe(
|
|
316
|
+
map(findResult => {
|
|
317
|
+
return findResult // checks if document found
|
|
318
|
+
? findResult.ts // if found resolves to latest acknowledged timestamp
|
|
319
|
+
: 0; // if not found resolves to zero as timestamp
|
|
320
|
+
})
|
|
321
|
+
);
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
/**
|
|
325
|
+
* Find Aggregates that were created after the given date
|
|
326
|
+
* Returns an observable that publish every aggregate found
|
|
327
|
+
* @param {string} type
|
|
328
|
+
* @param {number} createTimestamp
|
|
329
|
+
* @param {Object} ops {offset,pageSize}
|
|
330
|
+
*
|
|
331
|
+
*/
|
|
332
|
+
findAgregatesCreatedAfter$(type, createTimestamp = 0) {
|
|
333
|
+
return Rx.Observable.create(async observer => {
|
|
334
|
+
const collection = this.aggregatesDb.collection('Aggregates');
|
|
335
|
+
const cursor = collection.find({
|
|
336
|
+
creationTime: { $gt: createTimestamp },
|
|
337
|
+
type: type
|
|
338
|
+
});
|
|
339
|
+
let obj = await this.extractNextFromMongoCursor(cursor);
|
|
340
|
+
while (obj) {
|
|
341
|
+
observer.next(obj);
|
|
342
|
+
obj = await this.extractNextFromMongoCursor(cursor);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
observer.complete();
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
/**
|
|
350
|
+
* Ensure the existence of a registry on the ack database for an aggregate/key pair
|
|
351
|
+
* @param {string} aggregateType aggregate type
|
|
352
|
+
* @param {string} key backend key
|
|
353
|
+
*/
|
|
354
|
+
ensureAcknowledgeRegistry$(aggregateType, key) {
|
|
355
|
+
//lets build all const queries so the RxJS stream is more legible
|
|
356
|
+
// collection to use
|
|
357
|
+
const collection = this.aggregatesDb.collection('Acknowledges');
|
|
358
|
+
|
|
359
|
+
//UPDATE KEY queries
|
|
360
|
+
const updateSearchQuery = {
|
|
361
|
+
at: aggregateType,
|
|
362
|
+
key,
|
|
363
|
+
};
|
|
364
|
+
const updateQuery = {
|
|
365
|
+
$setOnInsert: { ts: 0 },
|
|
366
|
+
$set: { ets: Date.now() }
|
|
367
|
+
};
|
|
368
|
+
|
|
369
|
+
const writeConcern = { w: 1, wtimeout: 500, j: true };
|
|
370
|
+
const updateOps = {
|
|
371
|
+
upsert: true,
|
|
372
|
+
writeConcern
|
|
373
|
+
};
|
|
374
|
+
|
|
375
|
+
return Rx.defer(() =>
|
|
376
|
+
collection.updateOne(updateSearchQuery, updateQuery, updateOps)
|
|
377
|
+
).pipe(
|
|
378
|
+
map(updateResult => `ensured Acknowledge Registry: ${JSON.stringify({ aggregateType, key })}`)
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
/**
|
|
383
|
+
* persist the event acknowledge
|
|
384
|
+
* return an observable that resolves to the same given event
|
|
385
|
+
* @param {Event} event event to acknowledge
|
|
386
|
+
* @param {string} key process key (eg. microservice name) that is acknowledging the event
|
|
387
|
+
*/
|
|
388
|
+
acknowledgeEvent$(event, key) {
|
|
389
|
+
return Rx.of(event);
|
|
390
|
+
/* EXCESIVE DB USAGE * /
|
|
391
|
+
|
|
392
|
+
// tries to update the latest Acknowledged event if the aggregate version is higher than the current version
|
|
393
|
+
// if no modification was made is because:
|
|
394
|
+
// a) the given event aggregate version is lower or equals to the current version. in this case no oparation is done
|
|
395
|
+
// b) the record did no exists. in this case the record will be created
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
//lets build all const queries so the RxJS stream is more legible
|
|
399
|
+
// collection to use
|
|
400
|
+
const collection = this.aggregatesDb.collection('Acknowledges');
|
|
401
|
+
|
|
402
|
+
//UPDATE KEY queries
|
|
403
|
+
const updateSearchQuery = {
|
|
404
|
+
at: event.at,
|
|
405
|
+
key,
|
|
406
|
+
ts: { $lt: event.timestamp }
|
|
407
|
+
};
|
|
408
|
+
const updateQuery = {
|
|
409
|
+
$set: { ts: event.timestamp }
|
|
410
|
+
};
|
|
411
|
+
|
|
412
|
+
const writeConcern = { w: 1, wtimeout: 500, j: true };
|
|
413
|
+
const updateOps = {
|
|
414
|
+
upsert: false,
|
|
415
|
+
writeConcern
|
|
416
|
+
};
|
|
417
|
+
|
|
418
|
+
return Rx.defer(() =>
|
|
419
|
+
collection.updateOne(updateSearchQuery, updateQuery, updateOps)
|
|
420
|
+
).pipe(
|
|
421
|
+
map(updateResult => event)
|
|
422
|
+
);
|
|
423
|
+
/**/
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
/**
|
|
427
|
+
* extracts every item in the mongo cursor, one by one
|
|
428
|
+
* @param {*} cursor
|
|
429
|
+
*/
|
|
430
|
+
extractAllFromMongoCursor$(cursor) {
|
|
431
|
+
return Rx.Observable.create(async observer => {
|
|
432
|
+
let obj = await this.extractNextFromMongoCursor(cursor);
|
|
433
|
+
while (obj) {
|
|
434
|
+
observer.next(obj);
|
|
435
|
+
obj = await this.extractNextFromMongoCursor(cursor);
|
|
436
|
+
}
|
|
437
|
+
observer.complete();
|
|
438
|
+
});
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
/**
|
|
442
|
+
* Extracts the next value from a mongo cursos if available, returns undefined otherwise
|
|
443
|
+
* @param {*} cursor
|
|
444
|
+
*/
|
|
445
|
+
async extractNextFromMongoCursor(cursor) {
|
|
446
|
+
const hasNext = await cursor.hasNext();
|
|
447
|
+
if (hasNext) {
|
|
448
|
+
const obj = await cursor.next();
|
|
449
|
+
return obj;
|
|
450
|
+
}
|
|
451
|
+
return undefined;
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
module.exports = MongoStore;
|
package/package.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"_args": [
|
|
3
|
+
[
|
|
4
|
+
"@nebulae/event-store-tpi-rx6@1.1.2",
|
|
5
|
+
"/Users/sebastianmolano/NebulaE/Projects/TPI_LEGACY/ms-service/backend/service-core"
|
|
6
|
+
]
|
|
7
|
+
],
|
|
8
|
+
"_from": "@nebulae/event-store-tpi-rx6@1.1.2",
|
|
9
|
+
"_id": "@nebulae/event-store-tpi-rx6@1.1.2",
|
|
10
|
+
"_inBundle": false,
|
|
11
|
+
"_integrity": "sha512-HrV+dbSFQ/C9AC0cpKz3k8cHMSrkMDBmDVqpM2P/en+WRMw6mPNVxNbcwTkCCZxXDxli/oFXBXQKmSpdi6LPGw==",
|
|
12
|
+
"_location": "/@nebulae/event-store-tpi-rx6",
|
|
13
|
+
"_phantomChildren": {},
|
|
14
|
+
"_requested": {
|
|
15
|
+
"type": "version",
|
|
16
|
+
"registry": true,
|
|
17
|
+
"raw": "@nebulae/event-store-tpi-rx6@1.1.2",
|
|
18
|
+
"name": "@nebulae/event-store-tpi-rx6",
|
|
19
|
+
"escapedName": "@nebulae%2fevent-store-tpi-rx6",
|
|
20
|
+
"scope": "@nebulae",
|
|
21
|
+
"rawSpec": "1.1.2",
|
|
22
|
+
"saveSpec": null,
|
|
23
|
+
"fetchSpec": "1.1.2"
|
|
24
|
+
},
|
|
25
|
+
"_requiredBy": [
|
|
26
|
+
"/"
|
|
27
|
+
],
|
|
28
|
+
"_resolved": "https://registry.npmjs.org/@nebulae/event-store-tpi-rx6/-/event-store-tpi-rx6-1.1.2.tgz",
|
|
29
|
+
"_spec": "1.1.2",
|
|
30
|
+
"_where": "/Users/sebastianmolano/NebulaE/Projects/TPI_LEGACY/ms-service/backend/service-core",
|
|
31
|
+
"author": {
|
|
32
|
+
"name": "sebastian.molano@nebulae.com.co"
|
|
33
|
+
},
|
|
34
|
+
"bugs": {
|
|
35
|
+
"url": "https://github.com/NebulaEngineering/event-store-tpi-rx6/issues"
|
|
36
|
+
},
|
|
37
|
+
"dependencies": {
|
|
38
|
+
"@google-cloud/pubsub": "^0.18.0",
|
|
39
|
+
"async-mqtt": "^1.0.1",
|
|
40
|
+
"dotenv": "^5.0.1",
|
|
41
|
+
"mongodb": "^3.0.5",
|
|
42
|
+
"rxjs": "^6.3.3",
|
|
43
|
+
"uuid": "^3.2.1"
|
|
44
|
+
},
|
|
45
|
+
"description": "Event Store Lib for NebulaE Microservices",
|
|
46
|
+
"devDependencies": {
|
|
47
|
+
"array-intersection": "^0.1.2",
|
|
48
|
+
"chai": "^4.1.2",
|
|
49
|
+
"mocha": "^5.0.5"
|
|
50
|
+
},
|
|
51
|
+
"homepage": "https://github.com/NebulaEngineering/event-store-tpi-rx6#readme",
|
|
52
|
+
"keywords": [
|
|
53
|
+
"EventSourcing",
|
|
54
|
+
"EventStore",
|
|
55
|
+
"MicroServices"
|
|
56
|
+
],
|
|
57
|
+
"license": "MIT",
|
|
58
|
+
"main": "index.js",
|
|
59
|
+
"name": "@nebulae/event-store-tpi-rx6",
|
|
60
|
+
"repository": {
|
|
61
|
+
"type": "git",
|
|
62
|
+
"url": "git+https://github.com/NebulaEngineering/event-store-tpi-rx6.git"
|
|
63
|
+
},
|
|
64
|
+
"scripts": {
|
|
65
|
+
"test": "mocha --recursive --reporter spec"
|
|
66
|
+
},
|
|
67
|
+
"version": "1.1.2"
|
|
68
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
// // TEST LIBS
|
|
2
|
+
// const assert = require('assert');
|
|
3
|
+
// const Rx = require('rxjs');
|
|
4
|
+
|
|
5
|
+
// //LIBS FOR TESTING
|
|
6
|
+
// const EventStore = require('../lib/EventStore');
|
|
7
|
+
// const Event = require('../lib/entities/Event');
|
|
8
|
+
|
|
9
|
+
// //GLOABAL VARS to use between tests
|
|
10
|
+
// let eventStore = {};
|
|
11
|
+
// let event = new Event('Test', 1, 'TestCreated', { id: 1, name: 'x' }, 'Mocha');
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
// /*
|
|
15
|
+
// NOTES:
|
|
16
|
+
// before run please start mqtt:
|
|
17
|
+
// docker run -it -p 1883:1883 -p 9001:9001 eclipse-mosquitto
|
|
18
|
+
// */
|
|
19
|
+
|
|
20
|
+
// describe('EventStore', function () {
|
|
21
|
+
// describe('Prepare EventStore', function () {
|
|
22
|
+
// it('instance EventStore with MQTT', function (done) {
|
|
23
|
+
// //ENVIRONMENT VARS
|
|
24
|
+
// const brokerUrl = 'mqtt://localhost:1883';
|
|
25
|
+
// const projectId = 'test';
|
|
26
|
+
// const eventsTopic = 'events-store-test';
|
|
27
|
+
// const dBUrl = 'mongodb://localhost:27017';
|
|
28
|
+
// const dBName = 'Test';
|
|
29
|
+
// eventStore = new EventStore(
|
|
30
|
+
// {
|
|
31
|
+
// type: "MQTT",
|
|
32
|
+
// eventsTopic,
|
|
33
|
+
// brokerUrl,
|
|
34
|
+
// projectId,
|
|
35
|
+
// },
|
|
36
|
+
// {
|
|
37
|
+
// type: 'MONGO',
|
|
38
|
+
// connString: dBUrl,
|
|
39
|
+
// databaseName: dBName
|
|
40
|
+
// }
|
|
41
|
+
// );
|
|
42
|
+
// assert.ok(true, 'EventStore constructor worked');
|
|
43
|
+
// return done();
|
|
44
|
+
// });
|
|
45
|
+
// });
|
|
46
|
+
// describe('Publish', function () {
|
|
47
|
+
// it('Publish event', function (done) {
|
|
48
|
+
// let event = new Event('TestCreated', 1, 'Test', 1, 1, { id: 1, name: 'x' }, 'Mocha');
|
|
49
|
+
// eventStore.emitEvent(event)
|
|
50
|
+
// .then(result => {
|
|
51
|
+
// assert.ok(true, 'Event sent');
|
|
52
|
+
// return done();
|
|
53
|
+
// }).catch(error => {
|
|
54
|
+
// return done(error);
|
|
55
|
+
// });
|
|
56
|
+
// });
|
|
57
|
+
// });
|
|
58
|
+
|
|
59
|
+
// describe('retrieve aggregates', function () {
|
|
60
|
+
// it('Retrieve aggregates from beginning', function (done) {
|
|
61
|
+
// eventStore.retrieveNewAggregates('Device', 0, {offset: 0, pageSize: 100})
|
|
62
|
+
// .then(result => {
|
|
63
|
+
// assert.ok(true, 'Aggregates retrieved ');
|
|
64
|
+
// console.log("Succes");
|
|
65
|
+
// return done()
|
|
66
|
+
// })
|
|
67
|
+
// .catch(error => {
|
|
68
|
+
// console.log("Fail");
|
|
69
|
+
// return done(error);
|
|
70
|
+
// });
|
|
71
|
+
// //let event = new Event('TestCreated', 1, 'Test', 1, 1, { id: 1, name: 'x' }, 'Mocha');
|
|
72
|
+
// //eventStore.retrieveEvents()
|
|
73
|
+
|
|
74
|
+
// // let event = new Event('TestCreated', 1, 'Test', 1, 1, { id: 1, name: 'x' }, 'Mocha');
|
|
75
|
+
// // eventStore.emitEvent(event)
|
|
76
|
+
// // .then(result => {
|
|
77
|
+
// // assert.ok(true, 'Event sent');
|
|
78
|
+
// // return done();
|
|
79
|
+
// // }).catch(error => {
|
|
80
|
+
// // return done(error);
|
|
81
|
+
// // });
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
// });
|
|
85
|
+
// });
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
// describe('Retrieve', function () {
|
|
89
|
+
// it('Retrieve events', function (done) {
|
|
90
|
+
// return done();
|
|
91
|
+
// //let event = new Event('TestCreated', 1, 'Test', 1, 1, { id: 1, name: 'x' }, 'Mocha');
|
|
92
|
+
// //eventStore.retrieveEvents()
|
|
93
|
+
|
|
94
|
+
// // let event = new Event('TestCreated', 1, 'Test', 1, 1, { id: 1, name: 'x' }, 'Mocha');
|
|
95
|
+
// // eventStore.emitEvent(event)
|
|
96
|
+
// // .then(result => {
|
|
97
|
+
// // assert.ok(true, 'Event sent');
|
|
98
|
+
// // return done();
|
|
99
|
+
// // }).catch(error => {
|
|
100
|
+
// // return done(error);
|
|
101
|
+
// // });
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
// });
|
|
105
|
+
// });
|
|
106
|
+
|
|
107
|
+
// describe('de-prepare Event Store', function () {
|
|
108
|
+
// it('stop EventStore broker', function (done) {
|
|
109
|
+
// eventStore.broker.stopListening();
|
|
110
|
+
// assert.ok(true, 'Broker stoped');
|
|
111
|
+
// return done();
|
|
112
|
+
// });
|
|
113
|
+
// });
|
|
114
|
+
// });
|