@live-change/framework 0.4.42 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/legacy-env-config.js +14 -0
- package/lib/App.js +33 -46
- package/lib/processes/commandExecutor.js +74 -0
- package/lib/processes/eventListener.js +40 -0
- package/lib/processes/triggerExecutor.js +93 -0
- package/lib/runtime/Dao.js +1 -1
- package/lib/runtime/Service.js +1 -339
- package/lib/utils/EventSourcing.js +1 -1
- package/lib/utils/SingleEmitQueue.js +44 -0
- package/lib/utils/SplitEmitQueue.js +67 -0
- package/package.json +2 -1
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
|
|
2
|
+
function legacyEnvConfig(env = process.env) {
|
|
3
|
+
return {
|
|
4
|
+
db: {
|
|
5
|
+
url: env.DB_URL,
|
|
6
|
+
name: env.DB_NAME,
|
|
7
|
+
requestTimeout: (+env.DB_REQUEST_TIMEOUT),
|
|
8
|
+
cache: env.DB_CACHE == "YES",
|
|
9
|
+
//unobserveDebug: env.UNOBSERVE_DEBUG == "YES",
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
module.exports = legacyEnvConfig
|
package/lib/App.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
const
|
|
1
|
+
const { uidGenerator, randomString } = require('@live-change/uid')
|
|
2
2
|
|
|
3
3
|
const ReactiveDao = require("@live-change/dao")
|
|
4
|
-
const ReactiveDaoWebsocket = require("@live-change/dao-websocket")
|
|
5
4
|
|
|
6
5
|
const ServiceDefinition = require("./definition/ServiceDefinition.js")
|
|
7
6
|
|
|
@@ -28,65 +27,52 @@ const databaseUpdater = require("./updaters/database.js")
|
|
|
28
27
|
const accessControlFilter = require("./clientSideFilters/accessControlFilter.js")
|
|
29
28
|
const clientSideFilter = require("./clientSideFilters/clientSideFilter.js")
|
|
30
29
|
|
|
30
|
+
const commandExecutor = require("./processes/commandExecutor.js")
|
|
31
|
+
const triggerExecutor = require("./processes/triggerExecutor.js")
|
|
32
|
+
const eventListener = require('./processes/eventListener.js')
|
|
33
|
+
|
|
31
34
|
const utils = require('./utils.js')
|
|
32
35
|
|
|
33
36
|
class App {
|
|
34
37
|
|
|
35
|
-
constructor(
|
|
36
|
-
this.env = env
|
|
38
|
+
constructor(config = {}) {
|
|
37
39
|
this.config = config
|
|
38
40
|
this.splitEvents = false
|
|
39
41
|
|
|
40
|
-
this.requestTimeout =
|
|
42
|
+
this.requestTimeout = config?.db?.requestTimeout || 10*1000
|
|
41
43
|
|
|
42
44
|
this.defaultProcessors = [
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
45
|
+
crudGenerator,
|
|
46
|
+
draftGenerator,
|
|
47
|
+
reverseRelationProcessor,
|
|
48
|
+
indexListProcessor,
|
|
49
|
+
daoPathView,
|
|
50
|
+
fetchView,
|
|
51
|
+
accessControl,
|
|
52
|
+
autoValidation,
|
|
53
|
+
indexCode
|
|
52
54
|
]
|
|
53
55
|
this.defaultUpdaters = [
|
|
54
|
-
|
|
56
|
+
databaseUpdater
|
|
55
57
|
]
|
|
56
58
|
this.defaultClientSideFilters = [
|
|
57
|
-
|
|
58
|
-
|
|
59
|
+
accessControlFilter,
|
|
60
|
+
clientSideFilter
|
|
61
|
+
]
|
|
62
|
+
this.defaultProcesses = [
|
|
63
|
+
commandExecutor,
|
|
64
|
+
triggerExecutor,
|
|
65
|
+
eventListener
|
|
59
66
|
]
|
|
60
|
-
const dbDao = new ReactiveDao(process.cwd()+' '+process.argv.join(' '), {
|
|
61
|
-
remoteUrl: env.DB_URL || "http://localhost:9417/api/ws",
|
|
62
|
-
protocols: {
|
|
63
|
-
'ws': ReactiveDaoWebsocket.client
|
|
64
|
-
},
|
|
65
|
-
connectionSettings: {
|
|
66
|
-
queueRequestsWhenDisconnected: true,
|
|
67
|
-
requestSendTimeout: 2000,
|
|
68
|
-
requestTimeout: this.requestTimeout,
|
|
69
|
-
queueActiveRequestsOnDisconnect: false,
|
|
70
|
-
autoReconnectDelay: 200,
|
|
71
|
-
logLevel: 1,
|
|
72
|
-
unobserveDebug: env.UNOBSERVE_DEBUG == "YES"
|
|
73
|
-
},
|
|
74
|
-
database: {
|
|
75
|
-
type: 'remote',
|
|
76
|
-
generator: ReactiveDao.ObservableList
|
|
77
|
-
},
|
|
78
|
-
store: {
|
|
79
|
-
type: 'remote',
|
|
80
|
-
generator: ReactiveDao.ObservableList
|
|
81
|
-
}
|
|
82
|
-
})
|
|
83
67
|
|
|
84
|
-
this.dao =
|
|
85
|
-
if(process.env.DB_CACHE == "YES") this.dao = new ReactiveDao.DaoCache(dbDao)
|
|
68
|
+
this.dao = null
|
|
86
69
|
|
|
87
70
|
this.profileLog = profileLog
|
|
88
71
|
|
|
89
|
-
this.databaseName =
|
|
72
|
+
this.databaseName = config?.db?.name || 'test'
|
|
73
|
+
|
|
74
|
+
this.instanceId = randomString(4)
|
|
75
|
+
this.uidGenerator = uidGenerator(this.instanceId)
|
|
90
76
|
}
|
|
91
77
|
|
|
92
78
|
createServiceDefinition( definition ) {
|
|
@@ -137,7 +123,8 @@ class App {
|
|
|
137
123
|
await this.profileLog.end(profileOp)
|
|
138
124
|
}
|
|
139
125
|
|
|
140
|
-
async startService( serviceDefinition, config ) {
|
|
126
|
+
async startService( serviceDefinition, config = {}) {
|
|
127
|
+
if(!config.processes) config.processes = this.defaultProcesses
|
|
141
128
|
console.log("Starting service", serviceDefinition.name, "!")
|
|
142
129
|
const profileOp = await this.profileLog.begin({
|
|
143
130
|
operation: "startService", serviceName: serviceDefinition.name, config
|
|
@@ -145,7 +132,7 @@ class App {
|
|
|
145
132
|
if(!(serviceDefinition instanceof ServiceDefinition))
|
|
146
133
|
serviceDefinition = new ServiceDefinition(serviceDefinition)
|
|
147
134
|
let service = new Service(serviceDefinition, this)
|
|
148
|
-
await service.start(config
|
|
135
|
+
await service.start(config)
|
|
149
136
|
console.log("service started", serviceDefinition.name, "!")
|
|
150
137
|
await this.profileLog.end(profileOp)
|
|
151
138
|
return service
|
|
@@ -164,7 +151,7 @@ class App {
|
|
|
164
151
|
}
|
|
165
152
|
|
|
166
153
|
generateUid() {
|
|
167
|
-
return
|
|
154
|
+
return this.uidGenerator()
|
|
168
155
|
}
|
|
169
156
|
|
|
170
157
|
async clientSideDefinition( service, client, filters ) {
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
const KeyBasedExecutionQueues = require('../utils/KeyBasedExecutionQueues.js')
|
|
2
|
+
const CommandQueue = require('../utils/CommandQueue.js')
|
|
3
|
+
const SingleEmitQueue = require('../utils/SingleEmitQueue.js')
|
|
4
|
+
const SplitEmitQueue = require('../utils/SplitEmitQueue.js')
|
|
5
|
+
|
|
6
|
+
async function startCommandExecutor(service, config) {
|
|
7
|
+
if(!config.runCommands) return
|
|
8
|
+
|
|
9
|
+
service.keyBasedExecutionQueues = service.keyBasedExecutionQueues || new KeyBasedExecutionQueues(r => r.key)
|
|
10
|
+
|
|
11
|
+
service.commandQueue = new CommandQueue(service.dao, service.databaseName,
|
|
12
|
+
service.app.splitCommands ? `${service.name}_commands` : 'commands', service.name)
|
|
13
|
+
for (let actionName in service.actions) {
|
|
14
|
+
const action = service.actions[actionName]
|
|
15
|
+
if (action.definition.queuedBy) {
|
|
16
|
+
const queuedBy = action.definition.queuedBy
|
|
17
|
+
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
18
|
+
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k => c[k])) :
|
|
19
|
+
(c) => JSON.stringify(c[queuedBy]))
|
|
20
|
+
service.commandQueue.addCommandHandler(actionName, async (command) => {
|
|
21
|
+
const profileOp = await service.profileLog.begin({
|
|
22
|
+
operation: 'queueCommand', commandType: actionName,
|
|
23
|
+
commandId: command.id, client: command.client
|
|
24
|
+
})
|
|
25
|
+
const reportFinished = action.definition.waitForEvents ? 'command_' + command.id : undefined
|
|
26
|
+
const flags = {commandId: command.id, reportFinished}
|
|
27
|
+
const emit = service.app.splitEvents
|
|
28
|
+
? new SplitEmitQueue(service, flags)
|
|
29
|
+
: new SingleEmitQueue(service, flags)
|
|
30
|
+
const routine = () => service.profileLog.profile({
|
|
31
|
+
operation: 'runCommand', commandType: actionName,
|
|
32
|
+
commandId: command.id, client: command.client
|
|
33
|
+
}, async () => {
|
|
34
|
+
const result = await service.app.assertTime('command ' + action.definition.name,
|
|
35
|
+
action.definition.timeout || 10000,
|
|
36
|
+
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
37
|
+
const events = await emit.commit()
|
|
38
|
+
if (action.definition.waitForEvents)
|
|
39
|
+
await service.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
40
|
+
return result
|
|
41
|
+
})
|
|
42
|
+
routine.key = keyFunction(command)
|
|
43
|
+
const promise = service.keyBasedExecutionQueues.queue(routine)
|
|
44
|
+
await service.profileLog.endPromise(profileOp, promise)
|
|
45
|
+
return promise
|
|
46
|
+
})
|
|
47
|
+
} else {
|
|
48
|
+
service.commandQueue.addCommandHandler(actionName,
|
|
49
|
+
(command) => service.profileLog.profile({
|
|
50
|
+
operation: 'runCommand', commandType: actionName,
|
|
51
|
+
commandId: command.id, client: command.client
|
|
52
|
+
}, async () => {
|
|
53
|
+
const reportFinished = action.definition.waitForEvents ? 'command_' + command.id : undefined
|
|
54
|
+
const flags = {commandId: command.id, reportFinished}
|
|
55
|
+
const emit = service.app.splitEvents
|
|
56
|
+
? new SplitEmitQueue(service, flags)
|
|
57
|
+
: new SingleEmitQueue(service, flags)
|
|
58
|
+
const result = await service.app.assertTime('command ' + action.definition.name,
|
|
59
|
+
action.definition.timeout || 10000,
|
|
60
|
+
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
61
|
+
const events = await emit.commit()
|
|
62
|
+
if (action.definition.waitForEvents)
|
|
63
|
+
await service.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
64
|
+
return result
|
|
65
|
+
})
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
service.commandQueue.start()
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
module.exports = startCommandExecutor
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
const EventSourcing = require('../utils/EventSourcing.js')
|
|
2
|
+
|
|
3
|
+
async function startEventListener(service, config) {
|
|
4
|
+
if(!config.handleEvents) return
|
|
5
|
+
|
|
6
|
+
if(service.app.splitEvents) {
|
|
7
|
+
service.eventSourcing = new EventSourcing(service.dao, service.databaseName,
|
|
8
|
+
'events_'+service.name, service.name,
|
|
9
|
+
{ filter: (event) => event.service == service.name })
|
|
10
|
+
} else {
|
|
11
|
+
service.eventSourcing = new EventSourcing(service.dao, service.databaseName,
|
|
12
|
+
'events', service.name,
|
|
13
|
+
{ filter: (event) => event.service == service.name })
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
for (let eventName in service.events) {
|
|
18
|
+
const event = service.events[eventName]
|
|
19
|
+
service.eventSourcing.addEventHandler(eventName, async (ev, bucket) => {
|
|
20
|
+
return await service.profileLog.profile({ operation: "handleEvent", eventName, id: ev.id,
|
|
21
|
+
bucketId: bucket.id, triggerId: bucket.triggerId, commandId: bucket.commandId },
|
|
22
|
+
() => {
|
|
23
|
+
console.log("EXECUTING EVENT", ev)
|
|
24
|
+
return event.execute(ev, bucket)
|
|
25
|
+
}
|
|
26
|
+
)
|
|
27
|
+
})
|
|
28
|
+
service.eventSourcing.onBucketEnd = async (bucket, handledEvents) => {
|
|
29
|
+
if(bucket.reportFinished && handledEvents.length > 0) {
|
|
30
|
+
await service.dao.request(['database', 'update'], service.databaseName, 'eventReports', bucket.reportFinished,[
|
|
31
|
+
{ op: "mergeSets", property: 'finished', values: handledEvents.map(ev => ({ id: ev.id, type: ev.type })) }
|
|
32
|
+
])
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
service.eventSourcing.start()
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
module.exports = startEventListener
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
const KeyBasedExecutionQueues = require('../utils/KeyBasedExecutionQueues.js')
|
|
2
|
+
const CommandQueue = require('../utils/CommandQueue.js')
|
|
3
|
+
const SingleEmitQueue = require('../utils/SingleEmitQueue.js')
|
|
4
|
+
const SplitEmitQueue = require('../utils/SplitEmitQueue.js')
|
|
5
|
+
|
|
6
|
+
async function startTriggerExecutor(service, config) {
|
|
7
|
+
if(!config.runCommands) return
|
|
8
|
+
|
|
9
|
+
service.keyBasedExecutionQueues = service.keyBasedExecutionQueues || new KeyBasedExecutionQueues(r => r.key)
|
|
10
|
+
|
|
11
|
+
await service.dao.request(['database', 'createTable'], service.databaseName, 'triggerRoutes').catch(e => 'ok')
|
|
12
|
+
|
|
13
|
+
service.triggerQueue = new CommandQueue(service.dao, service.databaseName,
|
|
14
|
+
service.app.splitTriggers ? `${service.name}_triggers` : 'triggers', service.name )
|
|
15
|
+
for (let triggerName in service.triggers) {
|
|
16
|
+
const trigger = service.triggers[triggerName]
|
|
17
|
+
await service.dao.request(['database', 'put'], service.databaseName, 'triggerRoutes',
|
|
18
|
+
{ id: triggerName + '=>' + service.name, trigger: triggerName, service: service.name })
|
|
19
|
+
if(trigger.definition.queuedBy) {
|
|
20
|
+
const queuedBy = trigger.definition.queuedBy
|
|
21
|
+
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
22
|
+
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k=>c[k])) :
|
|
23
|
+
(c) => JSON.stringify(c[queuedBy]) )
|
|
24
|
+
service.triggerQueue.addCommandHandler(triggerName, async (trig) => {
|
|
25
|
+
const profileOp = await service.profileLog.begin({ operation: 'queueTrigger', triggerType: triggerName,
|
|
26
|
+
triggerId: trig.id, by: trig.by })
|
|
27
|
+
console.log("QUEUED TRIGGER STARTED", trig)
|
|
28
|
+
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
29
|
+
const flags = { triggerId: trig.id, reportFinished }
|
|
30
|
+
const emit = service.app.splitEvents
|
|
31
|
+
? new SplitEmitQueue(service, flags)
|
|
32
|
+
: new SingleEmitQueue(service, flags)
|
|
33
|
+
const routine = () => service.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
34
|
+
commandId: trig.id, by: trig.by }, async () => {
|
|
35
|
+
let result
|
|
36
|
+
try {
|
|
37
|
+
console.log("TRIGGERED!!", trig)
|
|
38
|
+
result = await service.app.assertTime('trigger '+trigger.definition.name,
|
|
39
|
+
trigger.definition.timeout || 10000,
|
|
40
|
+
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
41
|
+
console.log("TRIGGER DONE!", trig)
|
|
42
|
+
} catch (e) {
|
|
43
|
+
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
44
|
+
throw e
|
|
45
|
+
}
|
|
46
|
+
const events = await emit.commit()
|
|
47
|
+
if(trigger.definition.waitForEvents)
|
|
48
|
+
await service.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
49
|
+
return result
|
|
50
|
+
})
|
|
51
|
+
try {
|
|
52
|
+
routine.key = keyFunction(trig)
|
|
53
|
+
} catch(e) {
|
|
54
|
+
console.error("QUEUE KEY FUNCTION ERROR", e)
|
|
55
|
+
}
|
|
56
|
+
console.log("TRIGGER QUEUE KEY", routine.key)
|
|
57
|
+
const promise = service.keyBasedExecutionQueues.queue(routine)
|
|
58
|
+
await service.profileLog.endPromise(profileOp, promise)
|
|
59
|
+
return promise
|
|
60
|
+
})
|
|
61
|
+
} else {
|
|
62
|
+
service.triggerQueue.addCommandHandler(triggerName,
|
|
63
|
+
(trig) => service.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
64
|
+
commandId: trig.id, by: trig.by }, async () => {
|
|
65
|
+
console.log("NOT QUEUED TRIGGER STARTED", trig)
|
|
66
|
+
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
67
|
+
const flags = { triggerId: trig.id, reportFinished }
|
|
68
|
+
const emit = service.app.splitEvents
|
|
69
|
+
? new SplitEmitQueue(service, flags)
|
|
70
|
+
: new SingleEmitQueue(service, flags)
|
|
71
|
+
let result
|
|
72
|
+
try {
|
|
73
|
+
result = await service.app.assertTime('trigger '+trigger.definition.name,
|
|
74
|
+
trigger.definition.timeout || 10000,
|
|
75
|
+
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
76
|
+
console.log("TRIGGER DONE!", trig)
|
|
77
|
+
} catch (e) {
|
|
78
|
+
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
79
|
+
throw e
|
|
80
|
+
}
|
|
81
|
+
const events = await emit.commit()
|
|
82
|
+
if(trigger.definition.waitForEvents)
|
|
83
|
+
await service.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
84
|
+
return result
|
|
85
|
+
})
|
|
86
|
+
)
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
service.triggerQueue.start()
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
module.exports = startTriggerExecutor
|
package/lib/runtime/Dao.js
CHANGED
|
@@ -38,7 +38,7 @@ function prepareReactiveDaoDefinition(config, clientData) {
|
|
|
38
38
|
}
|
|
39
39
|
for(let viewName in service.views) {
|
|
40
40
|
let view = service.views[viewName]
|
|
41
|
-
if(
|
|
41
|
+
if(config.profileReads) {
|
|
42
42
|
values[viewName] = {
|
|
43
43
|
async observable(parameters) {
|
|
44
44
|
const observable = await view.observable(parameters, clientData)
|
package/lib/runtime/Service.js
CHANGED
|
@@ -5,12 +5,6 @@ const View = require("./View.js")
|
|
|
5
5
|
const Action = require("./Action.js")
|
|
6
6
|
const EventHandler = require("./EventHandler.js")
|
|
7
7
|
const TriggerHandler = require("./TriggerHandler.js")
|
|
8
|
-
const SearchIndexer = require("./SearchIndexer.js")
|
|
9
|
-
const ReactiveDao = require("@live-change/dao")
|
|
10
|
-
|
|
11
|
-
const EventSourcing = require('../utils/EventSourcing.js')
|
|
12
|
-
const CommandQueue = require('../utils/CommandQueue.js')
|
|
13
|
-
const KeyBasedExecutionQueues = require('../utils/KeyBasedExecutionQueues.js')
|
|
14
8
|
|
|
15
9
|
class Service {
|
|
16
10
|
|
|
@@ -79,11 +73,7 @@ class Service {
|
|
|
79
73
|
//console.log("DEFN", this.definition)
|
|
80
74
|
//console.log("DEFN JSON", JSON.stringify(this.definition.toJSON(), null, " "))
|
|
81
75
|
|
|
82
|
-
let promises =
|
|
83
|
-
if(config.runCommands) promises.push(this.startCommandExecutor())
|
|
84
|
-
if(config.handleEvents) promises.push(this.startEventListener())
|
|
85
|
-
if(config.indexSearch) promises.push(this.startSearchIndexer())
|
|
86
|
-
|
|
76
|
+
let promises = config.processes.map(proc => proc(this, config))
|
|
87
77
|
await Promise.all(promises)
|
|
88
78
|
|
|
89
79
|
//if(config.startEventListener) this.startEventListener()
|
|
@@ -99,335 +89,7 @@ class Service {
|
|
|
99
89
|
return this.app.triggerService(service, data)
|
|
100
90
|
}
|
|
101
91
|
|
|
102
|
-
async startEventListener() {
|
|
103
|
-
if(this.app.splitEvents) {
|
|
104
|
-
this.eventSourcing = new EventSourcing(this.dao, this.databaseName,
|
|
105
|
-
'events_'+this.name, this.name,
|
|
106
|
-
{ filter: (event) => event.service == this.name })
|
|
107
|
-
} else {
|
|
108
|
-
this.eventSourcing = new EventSourcing(this.dao, this.databaseName,
|
|
109
|
-
'events', this.name,
|
|
110
|
-
{ filter: (event) => event.service == this.name })
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
for (let eventName in this.events) {
|
|
115
|
-
const event = this.events[eventName]
|
|
116
|
-
this.eventSourcing.addEventHandler(eventName, async (ev, bucket) => {
|
|
117
|
-
return await this.profileLog.profile({ operation: "handleEvent", eventName, id: ev.id,
|
|
118
|
-
bucketId: bucket.id, triggerId: bucket.triggerId, commandId: bucket.commandId },
|
|
119
|
-
() => {
|
|
120
|
-
console.log("EXECUTING EVENT", ev)
|
|
121
|
-
return event.execute(ev, bucket)
|
|
122
|
-
}
|
|
123
|
-
)
|
|
124
|
-
})
|
|
125
|
-
this.eventSourcing.onBucketEnd = async (bucket, handledEvents) => {
|
|
126
|
-
if(bucket.reportFinished && handledEvents.length > 0) {
|
|
127
|
-
await this.dao.request(['database', 'update'], this.databaseName, 'eventReports', bucket.reportFinished,[
|
|
128
|
-
{ op: "mergeSets", property: 'finished', values: handledEvents.map(ev => ({ id: ev.id, type: ev.type })) }
|
|
129
|
-
])
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
this.eventSourcing.start()
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
async startCommandExecutor() {
|
|
138
|
-
this.commandQueue = new CommandQueue(this.dao, this.databaseName,
|
|
139
|
-
this.app.splitCommands ? `${this.name}_commands` : 'commands', this.name)
|
|
140
|
-
this.keyBasedCommandQueues = new KeyBasedExecutionQueues(r => r.key)
|
|
141
|
-
for (let actionName in this.actions) {
|
|
142
|
-
const action = this.actions[actionName]
|
|
143
|
-
if(action.definition.queuedBy) {
|
|
144
|
-
const queuedBy = action.definition.queuedBy
|
|
145
|
-
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
146
|
-
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k=>c[k])) :
|
|
147
|
-
(c) => JSON.stringify(c[queuedBy]) )
|
|
148
|
-
this.commandQueue.addCommandHandler(actionName, async (command) => {
|
|
149
|
-
const profileOp = await this.profileLog.begin({ operation: 'queueCommand', commandType: actionName,
|
|
150
|
-
commandId: command.id, client: command.client })
|
|
151
|
-
const reportFinished = action.definition.waitForEvents ? 'command_'+command.id : undefined
|
|
152
|
-
const flags = { commandId: command.id, reportFinished }
|
|
153
|
-
const emit = this.app.splitEvents
|
|
154
|
-
? new SplitEmitQueue(this, flags)
|
|
155
|
-
: new SingleEmitQueue(this, flags)
|
|
156
|
-
const routine = () => this.profileLog.profile({ operation: 'runCommand', commandType: actionName,
|
|
157
|
-
commandId: command.id, client: command.client }, async () => {
|
|
158
|
-
const result = await this.app.assertTime('command '+action.definition.name,
|
|
159
|
-
action.definition.timeout || 10000,
|
|
160
|
-
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
161
|
-
const events = await emit.commit()
|
|
162
|
-
if(action.definition.waitForEvents)
|
|
163
|
-
await this.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
164
|
-
return result
|
|
165
|
-
})
|
|
166
|
-
routine.key = keyFunction(command)
|
|
167
|
-
const promise = this.keyBasedCommandQueues.queue(routine)
|
|
168
|
-
await this.profileLog.endPromise(profileOp, promise)
|
|
169
|
-
return promise
|
|
170
|
-
})
|
|
171
|
-
} else {
|
|
172
|
-
this.commandQueue.addCommandHandler(actionName,
|
|
173
|
-
(command) => this.profileLog.profile({ operation: 'runCommand', commandType: actionName,
|
|
174
|
-
commandId: command.id, client: command.client }, async () => {
|
|
175
|
-
const reportFinished = action.definition.waitForEvents ? 'command_'+command.id : undefined
|
|
176
|
-
const flags = { commandId: command.id, reportFinished }
|
|
177
|
-
const emit = this.app.splitEvents
|
|
178
|
-
? new SplitEmitQueue(this, flags)
|
|
179
|
-
: new SingleEmitQueue(this, flags)
|
|
180
|
-
const result = await this.app.assertTime('command '+action.definition.name,
|
|
181
|
-
action.definition.timeout || 10000,
|
|
182
|
-
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
183
|
-
const events = await emit.commit()
|
|
184
|
-
if(action.definition.waitForEvents)
|
|
185
|
-
await this.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
186
|
-
return result
|
|
187
|
-
})
|
|
188
|
-
)
|
|
189
|
-
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
await this.dao.request(['database', 'createTable'], this.databaseName, 'triggerRoutes').catch(e => 'ok')
|
|
194
|
-
|
|
195
|
-
this.triggerQueue = new CommandQueue(this.dao, this.databaseName,
|
|
196
|
-
this.app.splitTriggers ? `${this.name}_triggers` : 'triggers', this.name )
|
|
197
|
-
this.keyBasedTriggerQueues = new KeyBasedExecutionQueues(r => r.key)
|
|
198
|
-
for (let triggerName in this.triggers) {
|
|
199
|
-
const trigger = this.triggers[triggerName]
|
|
200
|
-
await this.dao.request(['database', 'put'], this.databaseName, 'triggerRoutes',
|
|
201
|
-
{ id: triggerName + '=>' + this.name, trigger: triggerName, service: this.name })
|
|
202
|
-
if(trigger.definition.queuedBy) {
|
|
203
|
-
const queuedBy = trigger.definition.queuedBy
|
|
204
|
-
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
205
|
-
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k=>c[k])) :
|
|
206
|
-
(c) => JSON.stringify(c[queuedBy]) )
|
|
207
|
-
this.triggerQueue.addCommandHandler(triggerName, async (trig) => {
|
|
208
|
-
const profileOp = await this.profileLog.begin({ operation: 'queueTrigger', triggerType: triggerName,
|
|
209
|
-
triggerId: trig.id, by: trig.by })
|
|
210
|
-
console.log("QUEUED TRIGGER STARTED", trig)
|
|
211
|
-
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
212
|
-
const flags = { triggerId: trig.id, reportFinished }
|
|
213
|
-
const emit = this.app.splitEvents
|
|
214
|
-
? new SplitEmitQueue(this, flags)
|
|
215
|
-
: new SingleEmitQueue(this, flags)
|
|
216
|
-
const routine = () => this.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
217
|
-
commandId: trig.id, by: trig.by }, async () => {
|
|
218
|
-
let result
|
|
219
|
-
try {
|
|
220
|
-
console.log("TRIGGERED!!", trig)
|
|
221
|
-
result = await this.app.assertTime('trigger '+trigger.definition.name,
|
|
222
|
-
trigger.definition.timeout || 10000,
|
|
223
|
-
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
224
|
-
console.log("TRIGGER DONE!", trig)
|
|
225
|
-
} catch (e) {
|
|
226
|
-
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
227
|
-
throw e
|
|
228
|
-
}
|
|
229
|
-
const events = await emit.commit()
|
|
230
|
-
if(trigger.definition.waitForEvents)
|
|
231
|
-
await this.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
232
|
-
return result
|
|
233
|
-
})
|
|
234
|
-
try {
|
|
235
|
-
routine.key = keyFunction(trig)
|
|
236
|
-
} catch(e) {
|
|
237
|
-
console.error("QUEUE KEY FUNCTION ERROR", e)
|
|
238
|
-
}
|
|
239
|
-
console.log("TRIGGER QUEUE KEY", routine.key)
|
|
240
|
-
const promise = this.keyBasedTriggerQueues.queue(routine)
|
|
241
|
-
await this.profileLog.endPromise(profileOp, promise)
|
|
242
|
-
return promise
|
|
243
|
-
})
|
|
244
|
-
} else {
|
|
245
|
-
this.triggerQueue.addCommandHandler(triggerName,
|
|
246
|
-
(trig) => this.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
247
|
-
commandId: trig.id, by: trig.by }, async () => {
|
|
248
|
-
console.log("NOT QUEUED TRIGGER STARTED", trig)
|
|
249
|
-
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
250
|
-
const flags = { triggerId: trig.id, reportFinished }
|
|
251
|
-
const emit = this.app.splitEvents
|
|
252
|
-
? new SplitEmitQueue(this, flags)
|
|
253
|
-
: new SingleEmitQueue(this, flags)
|
|
254
|
-
let result
|
|
255
|
-
try {
|
|
256
|
-
result = await this.app.assertTime('trigger '+trigger.definition.name,
|
|
257
|
-
trigger.definition.timeout || 10000,
|
|
258
|
-
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
259
|
-
console.log("TRIGGER DONE!", trig)
|
|
260
|
-
} catch (e) {
|
|
261
|
-
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
262
|
-
throw e
|
|
263
|
-
}
|
|
264
|
-
const events = await emit.commit()
|
|
265
|
-
if(trigger.definition.waitForEvents)
|
|
266
|
-
await this.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
267
|
-
return result
|
|
268
|
-
})
|
|
269
|
-
)
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
|
|
273
|
-
this.commandQueue.start()
|
|
274
|
-
this.triggerQueue.start()
|
|
275
|
-
}
|
|
276
|
-
|
|
277
|
-
async startSearchIndexer() {
|
|
278
|
-
let anyIndex = false
|
|
279
|
-
for(const name in this.models) if(this.models[name].definition.searchIndex) anyIndex = true
|
|
280
|
-
for(const name in this.indexes) if(this.indexes[name].definition.searchIndex) anyIndex = true
|
|
281
|
-
if(!anyIndex) {
|
|
282
|
-
console.log("not starting search indexer - nothing to index!")
|
|
283
|
-
return
|
|
284
|
-
}
|
|
285
|
-
console.log("starting search indexer!")
|
|
286
|
-
await this.dao.request(['database', 'createTable'], this.databaseName, 'searchIndexes').catch(e => 'ok')
|
|
287
|
-
|
|
288
|
-
this.searchIndexers = []
|
|
289
|
-
|
|
290
|
-
const elasticsearch = this.app.connectToSearch()
|
|
291
|
-
|
|
292
|
-
for(const modelName in this.models) {
|
|
293
|
-
const model = this.models[modelName]
|
|
294
|
-
const indexName = model.definition.searchIndex
|
|
295
|
-
if(!indexName) continue
|
|
296
|
-
const indexer = new SearchIndexer(
|
|
297
|
-
this.dao, this.databaseName, 'Table', model.tableName, elasticsearch, indexName, model.definition
|
|
298
|
-
)
|
|
299
|
-
this.searchIndexers.push(indexer)
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
for(const indexName in this.indexes) {
|
|
303
|
-
const index = this.indexes[indexName]
|
|
304
|
-
const indexName = index.definition.searchIndex
|
|
305
|
-
if(!indexName) continue
|
|
306
|
-
const indexer = new SearchIndexer(
|
|
307
|
-
this.dao, this.databaseName, 'Index', model.tableName, elasticsearch, indexName, index.definition
|
|
308
|
-
)
|
|
309
|
-
this.searchIndexers.push(indexer)
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
const promises = []
|
|
313
|
-
for(const searchIndexer of this.searchIndexers) {
|
|
314
|
-
promises.push(this.profileLog.profile({
|
|
315
|
-
operation: "startIndexer", serviceName: this.name, indexName: searchIndexer.indexName
|
|
316
|
-
}, () => searchIndexer.start()))
|
|
317
|
-
}
|
|
318
|
-
await Promise.all(promises)
|
|
319
|
-
console.log("search indexer started!")
|
|
320
|
-
}
|
|
321
|
-
|
|
322
92
|
}
|
|
323
93
|
|
|
324
|
-
class SplitEmitQueue {
|
|
325
|
-
constructor(service, flags = {}) {
|
|
326
|
-
this.service = service
|
|
327
|
-
this.flags = flags
|
|
328
|
-
this.emittedEvents = new Map()
|
|
329
|
-
this.commited = false
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
emit(service, event) {
|
|
333
|
-
if(!event) {
|
|
334
|
-
event = service
|
|
335
|
-
if(Array.isArray(event)) {
|
|
336
|
-
let hasServices = false
|
|
337
|
-
for(let ev of event) {
|
|
338
|
-
if(ev.service) hasServices = true
|
|
339
|
-
}
|
|
340
|
-
if(hasServices) {
|
|
341
|
-
for(let ev of event) {
|
|
342
|
-
this.emit(ev)
|
|
343
|
-
}
|
|
344
|
-
return
|
|
345
|
-
}
|
|
346
|
-
} else {
|
|
347
|
-
service = event.service || this.service.name
|
|
348
|
-
}
|
|
349
|
-
}
|
|
350
|
-
let events
|
|
351
|
-
if(!this.commited) {
|
|
352
|
-
events = this.emittedEvents.get(service)
|
|
353
|
-
if(!events) {
|
|
354
|
-
events = []
|
|
355
|
-
this.emittedEvents.set(service, events)
|
|
356
|
-
}
|
|
357
|
-
} else {
|
|
358
|
-
events = []
|
|
359
|
-
}
|
|
360
|
-
if(Array.isArray(event)) {
|
|
361
|
-
for(let ev of event) ev.service = service
|
|
362
|
-
events.push(...event)
|
|
363
|
-
} else {
|
|
364
|
-
event.service = service
|
|
365
|
-
events.push(event)
|
|
366
|
-
}
|
|
367
|
-
if(this.commited) {
|
|
368
|
-
if(events.length == 0) return
|
|
369
|
-
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
370
|
-
this.service.name+'_events', { type: 'bucket', events, ...this.flags })
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
async commit() {
|
|
375
|
-
let promises = []
|
|
376
|
-
this.commited = true
|
|
377
|
-
if(this.emittedEvents.length == 0) return []
|
|
378
|
-
let allEvents = []
|
|
379
|
-
for(const [service, events] of this.emittedEvents.keys()) {
|
|
380
|
-
promises.push(this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
381
|
-
this.service.name+'_events', { type: 'bucket', events, ...this.flags }))
|
|
382
|
-
allEvents.push(...events)
|
|
383
|
-
}
|
|
384
|
-
await Promise.all(promises)
|
|
385
|
-
return allEvents
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
|
|
389
|
-
class SingleEmitQueue {
|
|
390
|
-
constructor(service, flags = {}) {
|
|
391
|
-
this.service = service
|
|
392
|
-
this.flags = flags
|
|
393
|
-
this.emittedEvents = []
|
|
394
|
-
this.commited = false
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
emit(service, event) {
|
|
398
|
-
if(!event) {
|
|
399
|
-
event = service
|
|
400
|
-
service = this.service.name
|
|
401
|
-
}
|
|
402
|
-
let events
|
|
403
|
-
if(!this.commited) {
|
|
404
|
-
events = this.emittedEvents
|
|
405
|
-
} else {
|
|
406
|
-
events = []
|
|
407
|
-
}
|
|
408
|
-
if(Array.isArray(event)) {
|
|
409
|
-
for(let ev of event) if(!ev.service) ev.service = service
|
|
410
|
-
events.push(...event)
|
|
411
|
-
} else {
|
|
412
|
-
if(!event.service) event.service = service
|
|
413
|
-
events.push(event)
|
|
414
|
-
}
|
|
415
|
-
if(this.commited) {
|
|
416
|
-
if(events.length == 0) return
|
|
417
|
-
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
418
|
-
'events', { type: 'bucket', events, ...this.flags })
|
|
419
|
-
}
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
async commit() {
|
|
423
|
-
this.commited = true
|
|
424
|
-
if(this.emittedEvents.length == 0) return []
|
|
425
|
-
await this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
426
|
-
'events', { type: 'bucket', events: this.emittedEvents, ...this.flags })
|
|
427
|
-
return this.emittedEvents
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
|
|
432
94
|
|
|
433
95
|
module.exports = Service
|
|
@@ -78,7 +78,7 @@ class EventSourcing {
|
|
|
78
78
|
if (this.config.filter && !this.config.filter(event)) return []
|
|
79
79
|
let done = false
|
|
80
80
|
let retry = 0
|
|
81
|
-
const maxRetry =
|
|
81
|
+
const maxRetry = this.config.maxRetryCount || 10
|
|
82
82
|
while(!done && maxRetry) {
|
|
83
83
|
try {
|
|
84
84
|
await this.doHandleEvent(event, mainEvent)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
|
|
2
|
+
class SingleEmitQueue {
|
|
3
|
+
constructor(service, flags = {}) {
|
|
4
|
+
this.service = service
|
|
5
|
+
this.flags = flags
|
|
6
|
+
this.emittedEvents = []
|
|
7
|
+
this.commited = false
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
emit(service, event) {
|
|
11
|
+
if(!event) {
|
|
12
|
+
event = service
|
|
13
|
+
service = this.service.name
|
|
14
|
+
}
|
|
15
|
+
let events
|
|
16
|
+
if(!this.commited) {
|
|
17
|
+
events = this.emittedEvents
|
|
18
|
+
} else {
|
|
19
|
+
events = []
|
|
20
|
+
}
|
|
21
|
+
if(Array.isArray(event)) {
|
|
22
|
+
for(let ev of event) if(!ev.service) ev.service = service
|
|
23
|
+
events.push(...event)
|
|
24
|
+
} else {
|
|
25
|
+
if(!event.service) event.service = service
|
|
26
|
+
events.push(event)
|
|
27
|
+
}
|
|
28
|
+
if(this.commited) {
|
|
29
|
+
if(events.length == 0) return
|
|
30
|
+
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
31
|
+
'events', { type: 'bucket', events, ...this.flags })
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async commit() {
|
|
36
|
+
this.commited = true
|
|
37
|
+
if(this.emittedEvents.length == 0) return []
|
|
38
|
+
await this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
39
|
+
'events', { type: 'bucket', events: this.emittedEvents, ...this.flags })
|
|
40
|
+
return this.emittedEvents
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
module.exports = SingleEmitQueue
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
|
|
2
|
+
class SplitEmitQueue {
|
|
3
|
+
constructor(service, flags = {}) {
|
|
4
|
+
this.service = service
|
|
5
|
+
this.flags = flags
|
|
6
|
+
this.emittedEvents = new Map()
|
|
7
|
+
this.commited = false
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
emit(service, event) {
|
|
11
|
+
if(!event) {
|
|
12
|
+
event = service
|
|
13
|
+
if(Array.isArray(event)) {
|
|
14
|
+
let hasServices = false
|
|
15
|
+
for(let ev of event) {
|
|
16
|
+
if(ev.service) hasServices = true
|
|
17
|
+
}
|
|
18
|
+
if(hasServices) {
|
|
19
|
+
for(let ev of event) {
|
|
20
|
+
this.emit(ev)
|
|
21
|
+
}
|
|
22
|
+
return
|
|
23
|
+
}
|
|
24
|
+
} else {
|
|
25
|
+
service = event.service || this.service.name
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
let events
|
|
29
|
+
if(!this.commited) {
|
|
30
|
+
events = this.emittedEvents.get(service)
|
|
31
|
+
if(!events) {
|
|
32
|
+
events = []
|
|
33
|
+
this.emittedEvents.set(service, events)
|
|
34
|
+
}
|
|
35
|
+
} else {
|
|
36
|
+
events = []
|
|
37
|
+
}
|
|
38
|
+
if(Array.isArray(event)) {
|
|
39
|
+
for(let ev of event) ev.service = service
|
|
40
|
+
events.push(...event)
|
|
41
|
+
} else {
|
|
42
|
+
event.service = service
|
|
43
|
+
events.push(event)
|
|
44
|
+
}
|
|
45
|
+
if(this.commited) {
|
|
46
|
+
if(events.length == 0) return
|
|
47
|
+
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
48
|
+
this.service.name+'_events', { type: 'bucket', events, ...this.flags })
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async commit() {
|
|
53
|
+
let promises = []
|
|
54
|
+
this.commited = true
|
|
55
|
+
if(this.emittedEvents.length == 0) return []
|
|
56
|
+
let allEvents = []
|
|
57
|
+
for(const [service, events] of this.emittedEvents.keys()) {
|
|
58
|
+
promises.push(this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
59
|
+
this.service.name+'_events', { type: 'bucket', events, ...this.flags }))
|
|
60
|
+
allEvents.push(...events)
|
|
61
|
+
}
|
|
62
|
+
await Promise.all(promises)
|
|
63
|
+
return allEvents
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
module.exports = SplitEmitQueue
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@live-change/framework",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.5.1",
|
|
4
4
|
"description": "Live Change Framework - ultimate solution for real time mobile/web apps",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -26,6 +26,7 @@
|
|
|
26
26
|
"@live-change/db": "^0.3.62",
|
|
27
27
|
"@live-change/db-store-level": "^0.1.14",
|
|
28
28
|
"@live-change/db-store-lmdb": "^0.1.21",
|
|
29
|
+
"@live-change/uid": "^0.1.2",
|
|
29
30
|
"cookie": "^0.4.1",
|
|
30
31
|
"encoding-down": "^7.0.0",
|
|
31
32
|
"express": "^4.17.1",
|