@live-change/framework 0.4.40 → 0.4.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/App.js +26 -41
- package/lib/processes/commandExecutor.js +74 -0
- package/lib/processes/eventListener.js +40 -0
- package/lib/processes/triggerExecutor.js +93 -0
- package/lib/runtime/Service.js +1 -339
- package/lib/utils/ProfileLogFilesystemWriter.js +40 -0
- package/lib/utils/SingleEmitQueue.js +44 -0
- package/lib/utils/SplitEmitQueue.js +67 -0
- package/lib/utils/profileLog.js +3 -33
- package/package.json +1 -4
- package/lib/processors/searchIndex.js +0 -21
- package/lib/updaters/elasticsearch.js +0 -293
- package/lib/utils/AnalyticsWriter.js +0 -79
package/lib/App.js
CHANGED
|
@@ -9,19 +9,14 @@ const Service = require("./runtime/Service.js")
|
|
|
9
9
|
|
|
10
10
|
const profileLog = require("./utils/profileLog.js")
|
|
11
11
|
|
|
12
|
-
const
|
|
12
|
+
const Dao = require("./runtime/Dao.js")
|
|
13
13
|
const ApiServer = require("./runtime/ApiServer.js")
|
|
14
14
|
const SessionApiServer = require("./runtime/SessionApiServer.js")
|
|
15
15
|
|
|
16
|
-
const { Client: ElasticSearch } = require('@elastic/elasticsearch')
|
|
17
|
-
|
|
18
|
-
const AnalyticsWriter = require('./utils/AnalyticsWriter.js')
|
|
19
|
-
|
|
20
16
|
const reverseRelationProcessor = require("./processors/reverseRelation.js")
|
|
21
17
|
const indexListProcessor = require("./processors/indexList.js")
|
|
22
18
|
const crudGenerator = require("./processors/crudGenerator.js")
|
|
23
19
|
const draftGenerator = require("./processors/draftGenerator.js")
|
|
24
|
-
const searchIndex = require("./processors/searchIndex.js")
|
|
25
20
|
const daoPathView = require("./processors/daoPathView.js")
|
|
26
21
|
const fetchView = require("./processors/fetchView.js")
|
|
27
22
|
const accessControl = require("./processors/accessControl.js")
|
|
@@ -29,11 +24,14 @@ const autoValidation = require("./processors/autoValidation.js")
|
|
|
29
24
|
const indexCode = require("./processors/indexCode.js")
|
|
30
25
|
|
|
31
26
|
const databaseUpdater = require("./updaters/database.js")
|
|
32
|
-
const elasticSearchUpdater = require("./updaters/elasticsearch.js")
|
|
33
27
|
|
|
34
28
|
const accessControlFilter = require("./clientSideFilters/accessControlFilter.js")
|
|
35
29
|
const clientSideFilter = require("./clientSideFilters/clientSideFilter.js")
|
|
36
30
|
|
|
31
|
+
const commandExecutor = require("./processes/commandExecutor.js")
|
|
32
|
+
const triggerExecutor = require("./processes/triggerExecutor.js")
|
|
33
|
+
const eventListener = require('./processes/eventListener.js')
|
|
34
|
+
|
|
37
35
|
const utils = require('./utils.js')
|
|
38
36
|
|
|
39
37
|
class App {
|
|
@@ -46,26 +44,28 @@ class App {
|
|
|
46
44
|
this.requestTimeout = (+env.DB_REQUEST_TIMEOUT) || 10*1000
|
|
47
45
|
|
|
48
46
|
this.defaultProcessors = [
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
47
|
+
crudGenerator,
|
|
48
|
+
draftGenerator,
|
|
49
|
+
reverseRelationProcessor,
|
|
50
|
+
indexListProcessor,
|
|
51
|
+
daoPathView,
|
|
52
|
+
fetchView,
|
|
53
|
+
accessControl,
|
|
54
|
+
autoValidation,
|
|
55
|
+
indexCode
|
|
58
56
|
]
|
|
59
|
-
if(env.SEARCH_INDEX_PREFIX) this.defaultProcessors.push(searchIndex)
|
|
60
57
|
this.defaultUpdaters = [
|
|
61
|
-
|
|
58
|
+
databaseUpdater
|
|
62
59
|
]
|
|
63
|
-
if(env.SEARCH_INDEX_PREFIX) this.defaultUpdaters.push(elasticSearchUpdater)
|
|
64
60
|
this.defaultClientSideFilters = [
|
|
65
|
-
|
|
66
|
-
|
|
61
|
+
accessControlFilter,
|
|
62
|
+
clientSideFilter
|
|
63
|
+
]
|
|
64
|
+
this.defaultProcesses = [
|
|
65
|
+
commandExecutor,
|
|
66
|
+
triggerExecutor,
|
|
67
|
+
eventListener
|
|
67
68
|
]
|
|
68
|
-
this.defaultPath = "."
|
|
69
69
|
const dbDao = new ReactiveDao(process.cwd()+' '+process.argv.join(' '), {
|
|
70
70
|
remoteUrl: env.DB_URL || "http://localhost:9417/api/ws",
|
|
71
71
|
protocols: {
|
|
@@ -98,22 +98,6 @@ class App {
|
|
|
98
98
|
this.databaseName = env.DB_NAME || 'test'
|
|
99
99
|
}
|
|
100
100
|
|
|
101
|
-
connectToSearch() {
|
|
102
|
-
if(!this.env.SEARCH_INDEX_PREFIX) throw new Error("ElasticSearch not configured")
|
|
103
|
-
if(this.search) return this.search
|
|
104
|
-
this.searchIndexPrefix = this.env.SEARCH_INDEX_PREFIX
|
|
105
|
-
this.search = new ElasticSearch({ node: this.env.SEARCH_URL || 'http://localhost:9200' })
|
|
106
|
-
//this.search.info(console.log)
|
|
107
|
-
return this.search
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
connectToAnalytics() {
|
|
111
|
-
if(!this.env.ANALYTICS_INDEX_PREFIX) throw new Error("ElasticSearch analytics not configured")
|
|
112
|
-
if(this.analytics) return this.analytics
|
|
113
|
-
this.analytics = new AnalyticsWriter(this.env.ANALYTICS_INDEX_PREFIX)
|
|
114
|
-
return this.analytics
|
|
115
|
-
}
|
|
116
|
-
|
|
117
101
|
createServiceDefinition( definition ) {
|
|
118
102
|
const config = this.config && this.config.services && this.config.services[definition.name]
|
|
119
103
|
return new ServiceDefinition({ ...definition, config })
|
|
@@ -162,7 +146,8 @@ class App {
|
|
|
162
146
|
await this.profileLog.end(profileOp)
|
|
163
147
|
}
|
|
164
148
|
|
|
165
|
-
async startService( serviceDefinition, config ) {
|
|
149
|
+
async startService( serviceDefinition, config = {}) {
|
|
150
|
+
if(!config.processes) config.processes = this.defaultProcesses
|
|
166
151
|
console.log("Starting service", serviceDefinition.name, "!")
|
|
167
152
|
const profileOp = await this.profileLog.begin({
|
|
168
153
|
operation: "startService", serviceName: serviceDefinition.name, config
|
|
@@ -170,14 +155,14 @@ class App {
|
|
|
170
155
|
if(!(serviceDefinition instanceof ServiceDefinition))
|
|
171
156
|
serviceDefinition = new ServiceDefinition(serviceDefinition)
|
|
172
157
|
let service = new Service(serviceDefinition, this)
|
|
173
|
-
await service.start(config
|
|
158
|
+
await service.start(config)
|
|
174
159
|
console.log("service started", serviceDefinition.name, "!")
|
|
175
160
|
await this.profileLog.end(profileOp)
|
|
176
161
|
return service
|
|
177
162
|
}
|
|
178
163
|
|
|
179
164
|
async createReactiveDao( config, clientData ) {
|
|
180
|
-
return new
|
|
165
|
+
return new Dao(config, clientData)
|
|
181
166
|
}
|
|
182
167
|
|
|
183
168
|
async createApiServer( config ) {
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
const KeyBasedExecutionQueues = require('../utils/KeyBasedExecutionQueues.js')
|
|
2
|
+
const CommandQueue = require('../utils/CommandQueue.js')
|
|
3
|
+
const SingleEmitQueue = require('../utils/SingleEmitQueue.js')
|
|
4
|
+
const SplitEmitQueue = require('../utils/SplitEmitQueue.js')
|
|
5
|
+
|
|
6
|
+
async function startCommandExecutor(service, config) {
|
|
7
|
+
if(!config.runCommands) return
|
|
8
|
+
|
|
9
|
+
service.keyBasedExecutionQueues = service.keyBasedExecutionQueues || new KeyBasedExecutionQueues(r => r.key)
|
|
10
|
+
|
|
11
|
+
service.commandQueue = new CommandQueue(service.dao, service.databaseName,
|
|
12
|
+
service.app.splitCommands ? `${service.name}_commands` : 'commands', service.name)
|
|
13
|
+
for (let actionName in service.actions) {
|
|
14
|
+
const action = service.actions[actionName]
|
|
15
|
+
if (action.definition.queuedBy) {
|
|
16
|
+
const queuedBy = action.definition.queuedBy
|
|
17
|
+
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
18
|
+
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k => c[k])) :
|
|
19
|
+
(c) => JSON.stringify(c[queuedBy]))
|
|
20
|
+
service.commandQueue.addCommandHandler(actionName, async (command) => {
|
|
21
|
+
const profileOp = await service.profileLog.begin({
|
|
22
|
+
operation: 'queueCommand', commandType: actionName,
|
|
23
|
+
commandId: command.id, client: command.client
|
|
24
|
+
})
|
|
25
|
+
const reportFinished = action.definition.waitForEvents ? 'command_' + command.id : undefined
|
|
26
|
+
const flags = {commandId: command.id, reportFinished}
|
|
27
|
+
const emit = service.app.splitEvents
|
|
28
|
+
? new SplitEmitQueue(service, flags)
|
|
29
|
+
: new SingleEmitQueue(service, flags)
|
|
30
|
+
const routine = () => service.profileLog.profile({
|
|
31
|
+
operation: 'runCommand', commandType: actionName,
|
|
32
|
+
commandId: command.id, client: command.client
|
|
33
|
+
}, async () => {
|
|
34
|
+
const result = await service.app.assertTime('command ' + action.definition.name,
|
|
35
|
+
action.definition.timeout || 10000,
|
|
36
|
+
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
37
|
+
const events = await emit.commit()
|
|
38
|
+
if (action.definition.waitForEvents)
|
|
39
|
+
await service.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
40
|
+
return result
|
|
41
|
+
})
|
|
42
|
+
routine.key = keyFunction(command)
|
|
43
|
+
const promise = service.keyBasedExecutionQueues.queue(routine)
|
|
44
|
+
await service.profileLog.endPromise(profileOp, promise)
|
|
45
|
+
return promise
|
|
46
|
+
})
|
|
47
|
+
} else {
|
|
48
|
+
service.commandQueue.addCommandHandler(actionName,
|
|
49
|
+
(command) => service.profileLog.profile({
|
|
50
|
+
operation: 'runCommand', commandType: actionName,
|
|
51
|
+
commandId: command.id, client: command.client
|
|
52
|
+
}, async () => {
|
|
53
|
+
const reportFinished = action.definition.waitForEvents ? 'command_' + command.id : undefined
|
|
54
|
+
const flags = {commandId: command.id, reportFinished}
|
|
55
|
+
const emit = service.app.splitEvents
|
|
56
|
+
? new SplitEmitQueue(service, flags)
|
|
57
|
+
: new SingleEmitQueue(service, flags)
|
|
58
|
+
const result = await service.app.assertTime('command ' + action.definition.name,
|
|
59
|
+
action.definition.timeout || 10000,
|
|
60
|
+
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
61
|
+
const events = await emit.commit()
|
|
62
|
+
if (action.definition.waitForEvents)
|
|
63
|
+
await service.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
64
|
+
return result
|
|
65
|
+
})
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
service.commandQueue.start()
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
module.exports = startCommandExecutor
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
const EventSourcing = require('../utils/EventSourcing.js')
|
|
2
|
+
|
|
3
|
+
async function startEventListener(service, config) {
|
|
4
|
+
if(!config.handleEvents) return
|
|
5
|
+
|
|
6
|
+
if(service.app.splitEvents) {
|
|
7
|
+
service.eventSourcing = new EventSourcing(service.dao, service.databaseName,
|
|
8
|
+
'events_'+service.name, service.name,
|
|
9
|
+
{ filter: (event) => event.service == service.name })
|
|
10
|
+
} else {
|
|
11
|
+
service.eventSourcing = new EventSourcing(service.dao, service.databaseName,
|
|
12
|
+
'events', service.name,
|
|
13
|
+
{ filter: (event) => event.service == service.name })
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
for (let eventName in service.events) {
|
|
18
|
+
const event = service.events[eventName]
|
|
19
|
+
service.eventSourcing.addEventHandler(eventName, async (ev, bucket) => {
|
|
20
|
+
return await service.profileLog.profile({ operation: "handleEvent", eventName, id: ev.id,
|
|
21
|
+
bucketId: bucket.id, triggerId: bucket.triggerId, commandId: bucket.commandId },
|
|
22
|
+
() => {
|
|
23
|
+
console.log("EXECUTING EVENT", ev)
|
|
24
|
+
return event.execute(ev, bucket)
|
|
25
|
+
}
|
|
26
|
+
)
|
|
27
|
+
})
|
|
28
|
+
service.eventSourcing.onBucketEnd = async (bucket, handledEvents) => {
|
|
29
|
+
if(bucket.reportFinished && handledEvents.length > 0) {
|
|
30
|
+
await service.dao.request(['database', 'update'], service.databaseName, 'eventReports', bucket.reportFinished,[
|
|
31
|
+
{ op: "mergeSets", property: 'finished', values: handledEvents.map(ev => ({ id: ev.id, type: ev.type })) }
|
|
32
|
+
])
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
service.eventSourcing.start()
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
module.exports = startEventListener
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
const KeyBasedExecutionQueues = require('../utils/KeyBasedExecutionQueues.js')
|
|
2
|
+
const CommandQueue = require('../utils/CommandQueue.js')
|
|
3
|
+
const SingleEmitQueue = require('../utils/SingleEmitQueue.js')
|
|
4
|
+
const SplitEmitQueue = require('../utils/SplitEmitQueue.js')
|
|
5
|
+
|
|
6
|
+
async function startTriggerExecutor(service, config) {
|
|
7
|
+
if(!config.runCommands) return
|
|
8
|
+
|
|
9
|
+
service.keyBasedExecutionQueues = service.keyBasedExecutionQueues || new KeyBasedExecutionQueues(r => r.key)
|
|
10
|
+
|
|
11
|
+
await service.dao.request(['database', 'createTable'], service.databaseName, 'triggerRoutes').catch(e => 'ok')
|
|
12
|
+
|
|
13
|
+
service.triggerQueue = new CommandQueue(service.dao, service.databaseName,
|
|
14
|
+
service.app.splitTriggers ? `${service.name}_triggers` : 'triggers', service.name )
|
|
15
|
+
for (let triggerName in service.triggers) {
|
|
16
|
+
const trigger = service.triggers[triggerName]
|
|
17
|
+
await service.dao.request(['database', 'put'], service.databaseName, 'triggerRoutes',
|
|
18
|
+
{ id: triggerName + '=>' + service.name, trigger: triggerName, service: service.name })
|
|
19
|
+
if(trigger.definition.queuedBy) {
|
|
20
|
+
const queuedBy = trigger.definition.queuedBy
|
|
21
|
+
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
22
|
+
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k=>c[k])) :
|
|
23
|
+
(c) => JSON.stringify(c[queuedBy]) )
|
|
24
|
+
service.triggerQueue.addCommandHandler(triggerName, async (trig) => {
|
|
25
|
+
const profileOp = await service.profileLog.begin({ operation: 'queueTrigger', triggerType: triggerName,
|
|
26
|
+
triggerId: trig.id, by: trig.by })
|
|
27
|
+
console.log("QUEUED TRIGGER STARTED", trig)
|
|
28
|
+
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
29
|
+
const flags = { triggerId: trig.id, reportFinished }
|
|
30
|
+
const emit = service.app.splitEvents
|
|
31
|
+
? new SplitEmitQueue(service, flags)
|
|
32
|
+
: new SingleEmitQueue(service, flags)
|
|
33
|
+
const routine = () => service.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
34
|
+
commandId: trig.id, by: trig.by }, async () => {
|
|
35
|
+
let result
|
|
36
|
+
try {
|
|
37
|
+
console.log("TRIGGERED!!", trig)
|
|
38
|
+
result = await service.app.assertTime('trigger '+trigger.definition.name,
|
|
39
|
+
trigger.definition.timeout || 10000,
|
|
40
|
+
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
41
|
+
console.log("TRIGGER DONE!", trig)
|
|
42
|
+
} catch (e) {
|
|
43
|
+
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
44
|
+
throw e
|
|
45
|
+
}
|
|
46
|
+
const events = await emit.commit()
|
|
47
|
+
if(trigger.definition.waitForEvents)
|
|
48
|
+
await service.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
49
|
+
return result
|
|
50
|
+
})
|
|
51
|
+
try {
|
|
52
|
+
routine.key = keyFunction(trig)
|
|
53
|
+
} catch(e) {
|
|
54
|
+
console.error("QUEUE KEY FUNCTION ERROR", e)
|
|
55
|
+
}
|
|
56
|
+
console.log("TRIGGER QUEUE KEY", routine.key)
|
|
57
|
+
const promise = service.keyBasedExecutionQueues.queue(routine)
|
|
58
|
+
await service.profileLog.endPromise(profileOp, promise)
|
|
59
|
+
return promise
|
|
60
|
+
})
|
|
61
|
+
} else {
|
|
62
|
+
service.triggerQueue.addCommandHandler(triggerName,
|
|
63
|
+
(trig) => service.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
64
|
+
commandId: trig.id, by: trig.by }, async () => {
|
|
65
|
+
console.log("NOT QUEUED TRIGGER STARTED", trig)
|
|
66
|
+
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
67
|
+
const flags = { triggerId: trig.id, reportFinished }
|
|
68
|
+
const emit = service.app.splitEvents
|
|
69
|
+
? new SplitEmitQueue(service, flags)
|
|
70
|
+
: new SingleEmitQueue(service, flags)
|
|
71
|
+
let result
|
|
72
|
+
try {
|
|
73
|
+
result = await service.app.assertTime('trigger '+trigger.definition.name,
|
|
74
|
+
trigger.definition.timeout || 10000,
|
|
75
|
+
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
76
|
+
console.log("TRIGGER DONE!", trig)
|
|
77
|
+
} catch (e) {
|
|
78
|
+
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
79
|
+
throw e
|
|
80
|
+
}
|
|
81
|
+
const events = await emit.commit()
|
|
82
|
+
if(trigger.definition.waitForEvents)
|
|
83
|
+
await service.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
84
|
+
return result
|
|
85
|
+
})
|
|
86
|
+
)
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
service.triggerQueue.start()
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
module.exports = startTriggerExecutor
|
package/lib/runtime/Service.js
CHANGED
|
@@ -5,12 +5,6 @@ const View = require("./View.js")
|
|
|
5
5
|
const Action = require("./Action.js")
|
|
6
6
|
const EventHandler = require("./EventHandler.js")
|
|
7
7
|
const TriggerHandler = require("./TriggerHandler.js")
|
|
8
|
-
const SearchIndexer = require("./SearchIndexer.js")
|
|
9
|
-
const ReactiveDao = require("@live-change/dao")
|
|
10
|
-
|
|
11
|
-
const EventSourcing = require('../utils/EventSourcing.js')
|
|
12
|
-
const CommandQueue = require('../utils/CommandQueue.js')
|
|
13
|
-
const KeyBasedExecutionQueues = require('../utils/KeyBasedExecutionQueues.js')
|
|
14
8
|
|
|
15
9
|
class Service {
|
|
16
10
|
|
|
@@ -79,11 +73,7 @@ class Service {
|
|
|
79
73
|
//console.log("DEFN", this.definition)
|
|
80
74
|
//console.log("DEFN JSON", JSON.stringify(this.definition.toJSON(), null, " "))
|
|
81
75
|
|
|
82
|
-
let promises =
|
|
83
|
-
if(config.runCommands) promises.push(this.startCommandExecutor())
|
|
84
|
-
if(config.handleEvents) promises.push(this.startEventListener())
|
|
85
|
-
if(config.indexSearch) promises.push(this.startSearchIndexer())
|
|
86
|
-
|
|
76
|
+
let promises = config.processes.map(proc => proc(this, config))
|
|
87
77
|
await Promise.all(promises)
|
|
88
78
|
|
|
89
79
|
//if(config.startEventListener) this.startEventListener()
|
|
@@ -99,335 +89,7 @@ class Service {
|
|
|
99
89
|
return this.app.triggerService(service, data)
|
|
100
90
|
}
|
|
101
91
|
|
|
102
|
-
async startEventListener() {
|
|
103
|
-
if(this.app.splitEvents) {
|
|
104
|
-
this.eventSourcing = new EventSourcing(this.dao, this.databaseName,
|
|
105
|
-
'events_'+this.name, this.name,
|
|
106
|
-
{ filter: (event) => event.service == this.name })
|
|
107
|
-
} else {
|
|
108
|
-
this.eventSourcing = new EventSourcing(this.dao, this.databaseName,
|
|
109
|
-
'events', this.name,
|
|
110
|
-
{ filter: (event) => event.service == this.name })
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
for (let eventName in this.events) {
|
|
115
|
-
const event = this.events[eventName]
|
|
116
|
-
this.eventSourcing.addEventHandler(eventName, async (ev, bucket) => {
|
|
117
|
-
return await this.profileLog.profile({ operation: "handleEvent", eventName, id: ev.id,
|
|
118
|
-
bucketId: bucket.id, triggerId: bucket.triggerId, commandId: bucket.commandId },
|
|
119
|
-
() => {
|
|
120
|
-
console.log("EXECUTING EVENT", ev)
|
|
121
|
-
return event.execute(ev, bucket)
|
|
122
|
-
}
|
|
123
|
-
)
|
|
124
|
-
})
|
|
125
|
-
this.eventSourcing.onBucketEnd = async (bucket, handledEvents) => {
|
|
126
|
-
if(bucket.reportFinished && handledEvents.length > 0) {
|
|
127
|
-
await this.dao.request(['database', 'update'], this.databaseName, 'eventReports', bucket.reportFinished,[
|
|
128
|
-
{ op: "mergeSets", property: 'finished', values: handledEvents.map(ev => ({ id: ev.id, type: ev.type })) }
|
|
129
|
-
])
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
this.eventSourcing.start()
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
async startCommandExecutor() {
|
|
138
|
-
this.commandQueue = new CommandQueue(this.dao, this.databaseName,
|
|
139
|
-
this.app.splitCommands ? `${this.name}_commands` : 'commands', this.name)
|
|
140
|
-
this.keyBasedCommandQueues = new KeyBasedExecutionQueues(r => r.key)
|
|
141
|
-
for (let actionName in this.actions) {
|
|
142
|
-
const action = this.actions[actionName]
|
|
143
|
-
if(action.definition.queuedBy) {
|
|
144
|
-
const queuedBy = action.definition.queuedBy
|
|
145
|
-
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
146
|
-
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k=>c[k])) :
|
|
147
|
-
(c) => JSON.stringify(c[queuedBy]) )
|
|
148
|
-
this.commandQueue.addCommandHandler(actionName, async (command) => {
|
|
149
|
-
const profileOp = await this.profileLog.begin({ operation: 'queueCommand', commandType: actionName,
|
|
150
|
-
commandId: command.id, client: command.client })
|
|
151
|
-
const reportFinished = action.definition.waitForEvents ? 'command_'+command.id : undefined
|
|
152
|
-
const flags = { commandId: command.id, reportFinished }
|
|
153
|
-
const emit = this.app.splitEvents
|
|
154
|
-
? new SplitEmitQueue(this, flags)
|
|
155
|
-
: new SingleEmitQueue(this, flags)
|
|
156
|
-
const routine = () => this.profileLog.profile({ operation: 'runCommand', commandType: actionName,
|
|
157
|
-
commandId: command.id, client: command.client }, async () => {
|
|
158
|
-
const result = await this.app.assertTime('command '+action.definition.name,
|
|
159
|
-
action.definition.timeout || 10000,
|
|
160
|
-
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
161
|
-
const events = await emit.commit()
|
|
162
|
-
if(action.definition.waitForEvents)
|
|
163
|
-
await this.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
164
|
-
return result
|
|
165
|
-
})
|
|
166
|
-
routine.key = keyFunction(command)
|
|
167
|
-
const promise = this.keyBasedCommandQueues.queue(routine)
|
|
168
|
-
await this.profileLog.endPromise(profileOp, promise)
|
|
169
|
-
return promise
|
|
170
|
-
})
|
|
171
|
-
} else {
|
|
172
|
-
this.commandQueue.addCommandHandler(actionName,
|
|
173
|
-
(command) => this.profileLog.profile({ operation: 'runCommand', commandType: actionName,
|
|
174
|
-
commandId: command.id, client: command.client }, async () => {
|
|
175
|
-
const reportFinished = action.definition.waitForEvents ? 'command_'+command.id : undefined
|
|
176
|
-
const flags = { commandId: command.id, reportFinished }
|
|
177
|
-
const emit = this.app.splitEvents
|
|
178
|
-
? new SplitEmitQueue(this, flags)
|
|
179
|
-
: new SingleEmitQueue(this, flags)
|
|
180
|
-
const result = await this.app.assertTime('command '+action.definition.name,
|
|
181
|
-
action.definition.timeout || 10000,
|
|
182
|
-
() => action.runCommand(command, (...args) => emit.emit(...args)), command)
|
|
183
|
-
const events = await emit.commit()
|
|
184
|
-
if(action.definition.waitForEvents)
|
|
185
|
-
await this.app.waitForEvents(reportFinished, events, action.definition.waitForEvents)
|
|
186
|
-
return result
|
|
187
|
-
})
|
|
188
|
-
)
|
|
189
|
-
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
await this.dao.request(['database', 'createTable'], this.databaseName, 'triggerRoutes').catch(e => 'ok')
|
|
194
|
-
|
|
195
|
-
this.triggerQueue = new CommandQueue(this.dao, this.databaseName,
|
|
196
|
-
this.app.splitTriggers ? `${this.name}_triggers` : 'triggers', this.name )
|
|
197
|
-
this.keyBasedTriggerQueues = new KeyBasedExecutionQueues(r => r.key)
|
|
198
|
-
for (let triggerName in this.triggers) {
|
|
199
|
-
const trigger = this.triggers[triggerName]
|
|
200
|
-
await this.dao.request(['database', 'put'], this.databaseName, 'triggerRoutes',
|
|
201
|
-
{ id: triggerName + '=>' + this.name, trigger: triggerName, service: this.name })
|
|
202
|
-
if(trigger.definition.queuedBy) {
|
|
203
|
-
const queuedBy = trigger.definition.queuedBy
|
|
204
|
-
const keyFunction = typeof queuedBy == 'function' ? queuedBy : (
|
|
205
|
-
Array.isArray(queuedBy) ? (c) => JSON.stringify(queuedBy.map(k=>c[k])) :
|
|
206
|
-
(c) => JSON.stringify(c[queuedBy]) )
|
|
207
|
-
this.triggerQueue.addCommandHandler(triggerName, async (trig) => {
|
|
208
|
-
const profileOp = await this.profileLog.begin({ operation: 'queueTrigger', triggerType: triggerName,
|
|
209
|
-
triggerId: trig.id, by: trig.by })
|
|
210
|
-
console.log("QUEUED TRIGGER STARTED", trig)
|
|
211
|
-
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
212
|
-
const flags = { triggerId: trig.id, reportFinished }
|
|
213
|
-
const emit = this.app.splitEvents
|
|
214
|
-
? new SplitEmitQueue(this, flags)
|
|
215
|
-
: new SingleEmitQueue(this, flags)
|
|
216
|
-
const routine = () => this.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
217
|
-
commandId: trig.id, by: trig.by }, async () => {
|
|
218
|
-
let result
|
|
219
|
-
try {
|
|
220
|
-
console.log("TRIGGERED!!", trig)
|
|
221
|
-
result = await this.app.assertTime('trigger '+trigger.definition.name,
|
|
222
|
-
trigger.definition.timeout || 10000,
|
|
223
|
-
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
224
|
-
console.log("TRIGGER DONE!", trig)
|
|
225
|
-
} catch (e) {
|
|
226
|
-
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
227
|
-
throw e
|
|
228
|
-
}
|
|
229
|
-
const events = await emit.commit()
|
|
230
|
-
if(trigger.definition.waitForEvents)
|
|
231
|
-
await this.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
232
|
-
return result
|
|
233
|
-
})
|
|
234
|
-
try {
|
|
235
|
-
routine.key = keyFunction(trig)
|
|
236
|
-
} catch(e) {
|
|
237
|
-
console.error("QUEUE KEY FUNCTION ERROR", e)
|
|
238
|
-
}
|
|
239
|
-
console.log("TRIGGER QUEUE KEY", routine.key)
|
|
240
|
-
const promise = this.keyBasedTriggerQueues.queue(routine)
|
|
241
|
-
await this.profileLog.endPromise(profileOp, promise)
|
|
242
|
-
return promise
|
|
243
|
-
})
|
|
244
|
-
} else {
|
|
245
|
-
this.triggerQueue.addCommandHandler(triggerName,
|
|
246
|
-
(trig) => this.profileLog.profile({ operation: 'runTrigger', triggerType: triggerName,
|
|
247
|
-
commandId: trig.id, by: trig.by }, async () => {
|
|
248
|
-
console.log("NOT QUEUED TRIGGER STARTED", trig)
|
|
249
|
-
const reportFinished = trigger.definition.waitForEvents ? 'trigger_'+trig.id : undefined
|
|
250
|
-
const flags = { triggerId: trig.id, reportFinished }
|
|
251
|
-
const emit = this.app.splitEvents
|
|
252
|
-
? new SplitEmitQueue(this, flags)
|
|
253
|
-
: new SingleEmitQueue(this, flags)
|
|
254
|
-
let result
|
|
255
|
-
try {
|
|
256
|
-
result = await this.app.assertTime('trigger '+trigger.definition.name,
|
|
257
|
-
trigger.definition.timeout || 10000,
|
|
258
|
-
() => trigger.execute(trig, (...args) => emit.emit(...args)), trig)
|
|
259
|
-
console.log("TRIGGER DONE!", trig)
|
|
260
|
-
} catch (e) {
|
|
261
|
-
console.error(`TRIGGER ${triggerName} ERROR`, e.stack)
|
|
262
|
-
throw e
|
|
263
|
-
}
|
|
264
|
-
const events = await emit.commit()
|
|
265
|
-
if(trigger.definition.waitForEvents)
|
|
266
|
-
await this.app.waitForEvents(reportFinished, events, trigger.definition.waitForEvents)
|
|
267
|
-
return result
|
|
268
|
-
})
|
|
269
|
-
)
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
|
|
273
|
-
this.commandQueue.start()
|
|
274
|
-
this.triggerQueue.start()
|
|
275
|
-
}
|
|
276
|
-
|
|
277
|
-
async startSearchIndexer() {
|
|
278
|
-
let anyIndex = false
|
|
279
|
-
for(const name in this.models) if(this.models[name].definition.searchIndex) anyIndex = true
|
|
280
|
-
for(const name in this.indexes) if(this.indexes[name].definition.searchIndex) anyIndex = true
|
|
281
|
-
if(!anyIndex) {
|
|
282
|
-
console.log("not starting search indexer - nothing to index!")
|
|
283
|
-
return
|
|
284
|
-
}
|
|
285
|
-
console.log("starting search indexer!")
|
|
286
|
-
await this.dao.request(['database', 'createTable'], this.databaseName, 'searchIndexes').catch(e => 'ok')
|
|
287
|
-
|
|
288
|
-
this.searchIndexers = []
|
|
289
|
-
|
|
290
|
-
const elasticsearch = this.app.connectToSearch()
|
|
291
|
-
|
|
292
|
-
for(const modelName in this.models) {
|
|
293
|
-
const model = this.models[modelName]
|
|
294
|
-
const indexName = model.definition.searchIndex
|
|
295
|
-
if(!indexName) continue
|
|
296
|
-
const indexer = new SearchIndexer(
|
|
297
|
-
this.dao, this.databaseName, 'Table', model.tableName, elasticsearch, indexName, model.definition
|
|
298
|
-
)
|
|
299
|
-
this.searchIndexers.push(indexer)
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
for(const indexName in this.indexes) {
|
|
303
|
-
const index = this.indexes[indexName]
|
|
304
|
-
const indexName = index.definition.searchIndex
|
|
305
|
-
if(!indexName) continue
|
|
306
|
-
const indexer = new SearchIndexer(
|
|
307
|
-
this.dao, this.databaseName, 'Index', model.tableName, elasticsearch, indexName, index.definition
|
|
308
|
-
)
|
|
309
|
-
this.searchIndexers.push(indexer)
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
const promises = []
|
|
313
|
-
for(const searchIndexer of this.searchIndexers) {
|
|
314
|
-
promises.push(this.profileLog.profile({
|
|
315
|
-
operation: "startIndexer", serviceName: this.name, indexName: searchIndexer.indexName
|
|
316
|
-
}, () => searchIndexer.start()))
|
|
317
|
-
}
|
|
318
|
-
await Promise.all(promises)
|
|
319
|
-
console.log("search indexer started!")
|
|
320
|
-
}
|
|
321
|
-
|
|
322
92
|
}
|
|
323
93
|
|
|
324
|
-
class SplitEmitQueue {
|
|
325
|
-
constructor(service, flags = {}) {
|
|
326
|
-
this.service = service
|
|
327
|
-
this.flags = flags
|
|
328
|
-
this.emittedEvents = new Map()
|
|
329
|
-
this.commited = false
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
emit(service, event) {
|
|
333
|
-
if(!event) {
|
|
334
|
-
event = service
|
|
335
|
-
if(Array.isArray(event)) {
|
|
336
|
-
let hasServices = false
|
|
337
|
-
for(let ev of event) {
|
|
338
|
-
if(ev.service) hasServices = true
|
|
339
|
-
}
|
|
340
|
-
if(hasServices) {
|
|
341
|
-
for(let ev of event) {
|
|
342
|
-
this.emit(ev)
|
|
343
|
-
}
|
|
344
|
-
return
|
|
345
|
-
}
|
|
346
|
-
} else {
|
|
347
|
-
service = event.service || this.service.name
|
|
348
|
-
}
|
|
349
|
-
}
|
|
350
|
-
let events
|
|
351
|
-
if(!this.commited) {
|
|
352
|
-
events = this.emittedEvents.get(service)
|
|
353
|
-
if(!events) {
|
|
354
|
-
events = []
|
|
355
|
-
this.emittedEvents.set(service, events)
|
|
356
|
-
}
|
|
357
|
-
} else {
|
|
358
|
-
events = []
|
|
359
|
-
}
|
|
360
|
-
if(Array.isArray(event)) {
|
|
361
|
-
for(let ev of event) ev.service = service
|
|
362
|
-
events.push(...event)
|
|
363
|
-
} else {
|
|
364
|
-
event.service = service
|
|
365
|
-
events.push(event)
|
|
366
|
-
}
|
|
367
|
-
if(this.commited) {
|
|
368
|
-
if(events.length == 0) return
|
|
369
|
-
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
370
|
-
this.service.name+'_events', { type: 'bucket', events, ...this.flags })
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
async commit() {
|
|
375
|
-
let promises = []
|
|
376
|
-
this.commited = true
|
|
377
|
-
if(this.emittedEvents.length == 0) return []
|
|
378
|
-
let allEvents = []
|
|
379
|
-
for(const [service, events] of this.emittedEvents.keys()) {
|
|
380
|
-
promises.push(this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
381
|
-
this.service.name+'_events', { type: 'bucket', events, ...this.flags }))
|
|
382
|
-
allEvents.push(...events)
|
|
383
|
-
}
|
|
384
|
-
await Promise.all(promises)
|
|
385
|
-
return allEvents
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
|
|
389
|
-
class SingleEmitQueue {
|
|
390
|
-
constructor(service, flags = {}) {
|
|
391
|
-
this.service = service
|
|
392
|
-
this.flags = flags
|
|
393
|
-
this.emittedEvents = []
|
|
394
|
-
this.commited = false
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
emit(service, event) {
|
|
398
|
-
if(!event) {
|
|
399
|
-
event = service
|
|
400
|
-
service = this.service.name
|
|
401
|
-
}
|
|
402
|
-
let events
|
|
403
|
-
if(!this.commited) {
|
|
404
|
-
events = this.emittedEvents
|
|
405
|
-
} else {
|
|
406
|
-
events = []
|
|
407
|
-
}
|
|
408
|
-
if(Array.isArray(event)) {
|
|
409
|
-
for(let ev of event) if(!ev.service) ev.service = service
|
|
410
|
-
events.push(...event)
|
|
411
|
-
} else {
|
|
412
|
-
if(!event.service) event.service = service
|
|
413
|
-
events.push(event)
|
|
414
|
-
}
|
|
415
|
-
if(this.commited) {
|
|
416
|
-
if(events.length == 0) return
|
|
417
|
-
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
418
|
-
'events', { type: 'bucket', events, ...this.flags })
|
|
419
|
-
}
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
async commit() {
|
|
423
|
-
this.commited = true
|
|
424
|
-
if(this.emittedEvents.length == 0) return []
|
|
425
|
-
await this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
426
|
-
'events', { type: 'bucket', events: this.emittedEvents, ...this.flags })
|
|
427
|
-
return this.emittedEvents
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
|
|
432
94
|
|
|
433
95
|
module.exports = Service
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const os = require('os')
|
|
3
|
+
const { once } = require('events')
|
|
4
|
+
|
|
5
|
+
class ProfileLogFilesystemWriter {
|
|
6
|
+
constructor(path) {
|
|
7
|
+
this.profileLogStream = null
|
|
8
|
+
this.profileLogStreamDrainPromise = null
|
|
9
|
+
|
|
10
|
+
if(path) {
|
|
11
|
+
this.profileLogStream = fs.createWriteStream(path)
|
|
12
|
+
} else if(process.env.PROFILE_LOG_PATH) {
|
|
13
|
+
const dateString = new Date().toISOString().slice(0, -1).replace(/[T:\\.-]/gi, '_')
|
|
14
|
+
const serviceName = process.cwd().split('/').pop()
|
|
15
|
+
const hostname = os.hostname()
|
|
16
|
+
const username = os.userInfo().username
|
|
17
|
+
|
|
18
|
+
const logPath = process.env.PROFILE_LOG_PATH + serviceName +
|
|
19
|
+
'@' + username + '@' + hostname + '@' + dateString + '.prof.log'
|
|
20
|
+
this.profileLogStream = fs.createWriteStream(logPath)
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async write(operation) {
|
|
25
|
+
if(!this.profileLogStream) return;
|
|
26
|
+
const msg = {
|
|
27
|
+
time: (new Date()).toISOString(),
|
|
28
|
+
...operation
|
|
29
|
+
}
|
|
30
|
+
if(!this.profileLogStream.write(JSON.stringify(msg)+'\n')) {
|
|
31
|
+
if(!this.profileLogStreamDrainPromise) {
|
|
32
|
+
this.profileLogStreamDrainPromise = once(this.profileLogStream, 'drain')
|
|
33
|
+
}
|
|
34
|
+
await this.profileLogStreamDrainPromise
|
|
35
|
+
this.profileLogStreamDrainPromise = null
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
module.exports = ProfileLogFilesystemWriter
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
|
|
2
|
+
class SingleEmitQueue {
|
|
3
|
+
constructor(service, flags = {}) {
|
|
4
|
+
this.service = service
|
|
5
|
+
this.flags = flags
|
|
6
|
+
this.emittedEvents = []
|
|
7
|
+
this.commited = false
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
emit(service, event) {
|
|
11
|
+
if(!event) {
|
|
12
|
+
event = service
|
|
13
|
+
service = this.service.name
|
|
14
|
+
}
|
|
15
|
+
let events
|
|
16
|
+
if(!this.commited) {
|
|
17
|
+
events = this.emittedEvents
|
|
18
|
+
} else {
|
|
19
|
+
events = []
|
|
20
|
+
}
|
|
21
|
+
if(Array.isArray(event)) {
|
|
22
|
+
for(let ev of event) if(!ev.service) ev.service = service
|
|
23
|
+
events.push(...event)
|
|
24
|
+
} else {
|
|
25
|
+
if(!event.service) event.service = service
|
|
26
|
+
events.push(event)
|
|
27
|
+
}
|
|
28
|
+
if(this.commited) {
|
|
29
|
+
if(events.length == 0) return
|
|
30
|
+
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
31
|
+
'events', { type: 'bucket', events, ...this.flags })
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async commit() {
|
|
36
|
+
this.commited = true
|
|
37
|
+
if(this.emittedEvents.length == 0) return []
|
|
38
|
+
await this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
39
|
+
'events', { type: 'bucket', events: this.emittedEvents, ...this.flags })
|
|
40
|
+
return this.emittedEvents
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
module.exports = SingleEmitQueue
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
|
|
2
|
+
class SplitEmitQueue {
|
|
3
|
+
constructor(service, flags = {}) {
|
|
4
|
+
this.service = service
|
|
5
|
+
this.flags = flags
|
|
6
|
+
this.emittedEvents = new Map()
|
|
7
|
+
this.commited = false
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
emit(service, event) {
|
|
11
|
+
if(!event) {
|
|
12
|
+
event = service
|
|
13
|
+
if(Array.isArray(event)) {
|
|
14
|
+
let hasServices = false
|
|
15
|
+
for(let ev of event) {
|
|
16
|
+
if(ev.service) hasServices = true
|
|
17
|
+
}
|
|
18
|
+
if(hasServices) {
|
|
19
|
+
for(let ev of event) {
|
|
20
|
+
this.emit(ev)
|
|
21
|
+
}
|
|
22
|
+
return
|
|
23
|
+
}
|
|
24
|
+
} else {
|
|
25
|
+
service = event.service || this.service.name
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
let events
|
|
29
|
+
if(!this.commited) {
|
|
30
|
+
events = this.emittedEvents.get(service)
|
|
31
|
+
if(!events) {
|
|
32
|
+
events = []
|
|
33
|
+
this.emittedEvents.set(service, events)
|
|
34
|
+
}
|
|
35
|
+
} else {
|
|
36
|
+
events = []
|
|
37
|
+
}
|
|
38
|
+
if(Array.isArray(event)) {
|
|
39
|
+
for(let ev of event) ev.service = service
|
|
40
|
+
events.push(...event)
|
|
41
|
+
} else {
|
|
42
|
+
event.service = service
|
|
43
|
+
events.push(event)
|
|
44
|
+
}
|
|
45
|
+
if(this.commited) {
|
|
46
|
+
if(events.length == 0) return
|
|
47
|
+
this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
48
|
+
this.service.name+'_events', { type: 'bucket', events, ...this.flags })
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async commit() {
|
|
53
|
+
let promises = []
|
|
54
|
+
this.commited = true
|
|
55
|
+
if(this.emittedEvents.length == 0) return []
|
|
56
|
+
let allEvents = []
|
|
57
|
+
for(const [service, events] of this.emittedEvents.keys()) {
|
|
58
|
+
promises.push(this.service.dao.request(['database', 'putLog'], this.service.databaseName,
|
|
59
|
+
this.service.name+'_events', { type: 'bucket', events, ...this.flags }))
|
|
60
|
+
allEvents.push(...events)
|
|
61
|
+
}
|
|
62
|
+
await Promise.all(promises)
|
|
63
|
+
return allEvents
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
module.exports = SplitEmitQueue
|
package/lib/utils/profileLog.js
CHANGED
|
@@ -1,40 +1,10 @@
|
|
|
1
|
-
const fs = require('fs')
|
|
2
|
-
const os = require('os')
|
|
3
|
-
const { once } = require('events')
|
|
4
|
-
|
|
5
1
|
class ProfileLog {
|
|
6
|
-
constructor(
|
|
7
|
-
this.
|
|
8
|
-
this.profileLogStreamDrainPromise = null
|
|
9
|
-
|
|
10
|
-
if(process.env.PROFILE_LOG_PATH) {
|
|
11
|
-
const dateString = new Date().toISOString().slice(0, -1).replace(/[T:\\.-]/gi, '_')
|
|
12
|
-
const serviceName = process.cwd().split('/').pop()
|
|
13
|
-
const hostname = os.hostname()
|
|
14
|
-
const username = os.userInfo().username
|
|
15
|
-
|
|
16
|
-
const logPath = process.env.PROFILE_LOG_PATH + serviceName +
|
|
17
|
-
'@' + username + '@' + hostname + '@' + dateString + '.prof.log'
|
|
18
|
-
this.profileLogStream = fs.createWriteStream(logPath)
|
|
19
|
-
}
|
|
20
|
-
if(path) {
|
|
21
|
-
this.profileLogStream = fs.createWriteStream(path)
|
|
22
|
-
}
|
|
2
|
+
constructor() {
|
|
3
|
+
this.writers = []
|
|
23
4
|
}
|
|
24
5
|
|
|
25
6
|
async log(operation) {
|
|
26
|
-
|
|
27
|
-
const msg = {
|
|
28
|
-
time: (new Date()).toISOString(),
|
|
29
|
-
...operation
|
|
30
|
-
}
|
|
31
|
-
if(!this.profileLogStream.write(JSON.stringify(msg)+'\n')) {
|
|
32
|
-
if(!this.profileLogStreamDrainPromise) {
|
|
33
|
-
this.profileLogStreamDrainPromise = once(this.profileLogStream, 'drain')
|
|
34
|
-
}
|
|
35
|
-
await this.profileLogStreamDrainPromise
|
|
36
|
-
this.profileLogStreamDrainPromise = null
|
|
37
|
-
}
|
|
7
|
+
await Promise.all(this.writers.map(writer => writer.write(operation)))
|
|
38
8
|
}
|
|
39
9
|
|
|
40
10
|
async begin(operation) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@live-change/framework",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.44",
|
|
4
4
|
"description": "Live Change Framework - ultimate solution for real time mobile/web apps",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -41,8 +41,5 @@
|
|
|
41
41
|
"subleveldown": "^5.0.1",
|
|
42
42
|
"tape": "^5.3.1",
|
|
43
43
|
"websocket": "^1.0.34"
|
|
44
|
-
},
|
|
45
|
-
"dependencies": {
|
|
46
|
-
"@elastic/elasticsearch": "7.13.0"
|
|
47
44
|
}
|
|
48
45
|
}
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
const utils = require("../utils.js")
|
|
2
|
-
|
|
3
|
-
module.exports = async function(service, app) {
|
|
4
|
-
const search = await app.connectToSearch()
|
|
5
|
-
const generateIndexName = (modelName) => {
|
|
6
|
-
return (app.searchIndexPrefix+service.name+"_"+modelName).toLowerCase()
|
|
7
|
-
}
|
|
8
|
-
|
|
9
|
-
for(let modelName in service.models) {
|
|
10
|
-
const model = service.models[modelName]
|
|
11
|
-
if (!model.search) continue
|
|
12
|
-
const searchIndex = generateIndexName(modelName)
|
|
13
|
-
model.searchIndex = searchIndex
|
|
14
|
-
}
|
|
15
|
-
for(let indexName in service.indexes) {
|
|
16
|
-
const index = service.models[indexName]
|
|
17
|
-
if (!index.search) continue
|
|
18
|
-
const searchIndex = generateIndexName(indexName)
|
|
19
|
-
model.searchIndex = searchIndex
|
|
20
|
-
}
|
|
21
|
-
}
|
|
@@ -1,293 +0,0 @@
|
|
|
1
|
-
const { typeName } = require("../utils.js")
|
|
2
|
-
const SearchIndexer = require("../runtime/SearchIndexer.js")
|
|
3
|
-
|
|
4
|
-
function generatePropertyMapping(property, search) {
|
|
5
|
-
//console.log("GENERATE PROPERTY MAPPING", property)
|
|
6
|
-
let options = search ? JSON.parse(JSON.stringify(search)) : {}
|
|
7
|
-
if(property.search === false) options.enabled = false
|
|
8
|
-
if(!options.type) {
|
|
9
|
-
switch(typeName(property.type)) {
|
|
10
|
-
case "String": options.type = "text"; break;
|
|
11
|
-
case "Number": options.type = "double"; break;
|
|
12
|
-
case "Date": options.type = "date"; break;
|
|
13
|
-
case "Boolean": options.type = "boolean"; break;
|
|
14
|
-
case "Array": options.type = "array"; break;
|
|
15
|
-
case "Object": options.type = "object"; break;
|
|
16
|
-
default: options.type = "keyword"
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
//console.log("GENERATED PROPERTY MAPPING", property, ":", options)
|
|
20
|
-
return options
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
function generatePropertyMappings(property, propName) {
|
|
24
|
-
const bindings = property.search
|
|
25
|
-
? (Array.isArray(property.search) ? property.search : [ property.search ])
|
|
26
|
-
: [null]
|
|
27
|
-
const mappings = {}
|
|
28
|
-
for(const binding of bindings) {
|
|
29
|
-
const options = generatePropertyMapping(property, binding)
|
|
30
|
-
//console.log("OPTIONS", options)
|
|
31
|
-
if(options.type == 'object' && !options.properties) {
|
|
32
|
-
options.properties = {}
|
|
33
|
-
for(let propName in property.properties) {
|
|
34
|
-
const mappings = generatePropertyMappings(property.properties[propName], propName)
|
|
35
|
-
for(let key in mappings) options.properties[key] = mappings[key]
|
|
36
|
-
}
|
|
37
|
-
options.include_in_root = true
|
|
38
|
-
}
|
|
39
|
-
if(options.type == 'array') {
|
|
40
|
-
if(typeName(property.of) != "Object") {
|
|
41
|
-
return generatePropertyMappings(property.of, propName)
|
|
42
|
-
} else {
|
|
43
|
-
options.type = 'nested'
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
delete options.name
|
|
47
|
-
mappings[(binding && binding.name) || propName] = options
|
|
48
|
-
}
|
|
49
|
-
//console.log("PROPERTY MAPPINGS", propName, mappings)
|
|
50
|
-
return mappings
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
function generateMetadata(model) {
|
|
54
|
-
let properties = {}
|
|
55
|
-
for(const propName in model.properties) {
|
|
56
|
-
const mappings = generatePropertyMappings(model.properties[propName], propName)
|
|
57
|
-
for(let key in mappings) properties[key] = mappings[key]
|
|
58
|
-
}
|
|
59
|
-
let settings = (typeof model.search == 'object') ? model.search.settings : undefined
|
|
60
|
-
return {
|
|
61
|
-
settings,
|
|
62
|
-
mappings: {
|
|
63
|
-
_source: {
|
|
64
|
-
enabled: true
|
|
65
|
-
},
|
|
66
|
-
properties: {
|
|
67
|
-
id: { type: "keyword", index: false },
|
|
68
|
-
...properties
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
async function updateElasticSearch(changes, service, app, force) {
|
|
75
|
-
|
|
76
|
-
const generateIndexName = (modelName) => {
|
|
77
|
-
return (app.searchIndexPrefix+service.name+"_"+modelName).toLowerCase()
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
const generateTableName = (modelName) => {
|
|
81
|
-
return service.name+"_"+modelName
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
console.log("ELASTICSEARCH UPDATER")
|
|
85
|
-
|
|
86
|
-
let changesByModel = new Map()
|
|
87
|
-
const addChange = function(modelName, change) {
|
|
88
|
-
let changes = changesByModel.get(modelName)
|
|
89
|
-
if(!changes) changesByModel.set(modelName, [change])
|
|
90
|
-
else changes.push(change)
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
/// Group by model
|
|
94
|
-
for(let change of changes) {
|
|
95
|
-
switch (change.operation) {
|
|
96
|
-
case "createModel": addChange(change.model.name, change); break
|
|
97
|
-
case "renameModel": addChange(change.from, change); break
|
|
98
|
-
case "deleteModel": addChange(change.name, change); break
|
|
99
|
-
case "createProperty":
|
|
100
|
-
case "renameProperty":
|
|
101
|
-
case "deleteProperty":
|
|
102
|
-
case "changePropertyType":
|
|
103
|
-
case "searchEnabled":
|
|
104
|
-
case "searchDisabled":
|
|
105
|
-
case "searchUpdated":
|
|
106
|
-
case "changePropertySearch": addChange(change.model, change); break
|
|
107
|
-
default:
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
const search = await app.connectToSearch()
|
|
112
|
-
|
|
113
|
-
async function getCurrentAlias(modelName) {
|
|
114
|
-
let alias = await search.indices.getAlias({name: generateIndexName(modelName) })
|
|
115
|
-
//console.log("GOT ALIAS", Object.keys(alias.body)[0])
|
|
116
|
-
return Object.keys(alias.body)[0]
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
async function setPropertyDefaultValue(currentAlias, propertyName, defaultValue) {
|
|
120
|
-
const req = {
|
|
121
|
-
index: currentAlias,
|
|
122
|
-
body: {
|
|
123
|
-
query: {
|
|
124
|
-
match_all: {}
|
|
125
|
-
},
|
|
126
|
-
script: {
|
|
127
|
-
source: `ctx._source.${propertyName} = ${JSON.stringify(defaultValue)}`,
|
|
128
|
-
lang: 'painless'
|
|
129
|
-
}
|
|
130
|
-
},
|
|
131
|
-
conflicts: 'proceed'
|
|
132
|
-
}
|
|
133
|
-
console.log("UPDATE BY QUERY", req)
|
|
134
|
-
await search.updateByQuery(req).catch(error => {
|
|
135
|
-
console.error("FIELD UPDATE ERROR", error.meta.body.error, error.meta.body.failures)
|
|
136
|
-
throw error
|
|
137
|
-
})
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
for(let [model, changes] of changesByModel.entries()) {
|
|
141
|
-
let definition = service.models[model]
|
|
142
|
-
for(let change of changes) {
|
|
143
|
-
if(!definition.search && change.operation!='searchDisabled' && change.operation!='deleteModel') return
|
|
144
|
-
switch (change.operation) {
|
|
145
|
-
case "createModel": {
|
|
146
|
-
if (changes.length != 1) {
|
|
147
|
-
console.error("Bad model operations set", changes)
|
|
148
|
-
throw new Error("createModel prohibits other operations for model")
|
|
149
|
-
}
|
|
150
|
-
const index = generateIndexName(change.model.name) + '_1'
|
|
151
|
-
const metadata = generateMetadata(service.models[change.model.name])
|
|
152
|
-
console.log("INDEX", index)
|
|
153
|
-
console.log("METADATA", JSON.stringify(metadata,null, " "))
|
|
154
|
-
await search.indices.create({
|
|
155
|
-
index,
|
|
156
|
-
body: metadata
|
|
157
|
-
})
|
|
158
|
-
await search.indices.putAlias({
|
|
159
|
-
name: generateIndexName(change.model.name),
|
|
160
|
-
index: generateIndexName(change.model.name) + '_1',
|
|
161
|
-
})
|
|
162
|
-
} break
|
|
163
|
-
case "searchEnabled": {
|
|
164
|
-
const index = generateIndexName(change.model) + '_1'
|
|
165
|
-
const metadata = generateMetadata(service.models[change.model])
|
|
166
|
-
console.log("INDEX", index)
|
|
167
|
-
console.log("METADATA", JSON.stringify(metadata,null, " "))
|
|
168
|
-
await search.indices.create({
|
|
169
|
-
index,
|
|
170
|
-
body: metadata
|
|
171
|
-
})
|
|
172
|
-
await search.indices.putAlias({
|
|
173
|
-
name: generateIndexName(change.model),
|
|
174
|
-
index,
|
|
175
|
-
})
|
|
176
|
-
} break
|
|
177
|
-
case "deleteModel":
|
|
178
|
-
if(changes.length != 1) {
|
|
179
|
-
console.error("Bad model operations set", changes)
|
|
180
|
-
throw new Error("deleteModel prohibits other operations for model")
|
|
181
|
-
} /// NO BREAK!
|
|
182
|
-
case "searchDisabled": {
|
|
183
|
-
console.log("SEARCH DISABLED")
|
|
184
|
-
const indexName = generateIndexName(change.model)
|
|
185
|
-
const currentAlias = await getCurrentAlias(change.model).catch(e=>null)
|
|
186
|
-
console.log("DELETE INDEX", currentAlias, "AND ALIAS", indexName)
|
|
187
|
-
if(currentAlias) await search.indices.delete({ index: currentAlias }).catch(e=>{})
|
|
188
|
-
await search.indices.deleteAlias({ name: indexName }).catch(e=>{})
|
|
189
|
-
await app.dao.request(['database', 'delete'], app.databaseName, 'searchIndexes', indexName)
|
|
190
|
-
} break
|
|
191
|
-
case "renameModel": {
|
|
192
|
-
const newAlias = generateIndexName(change.to) + '_1'
|
|
193
|
-
await search.indices.create({
|
|
194
|
-
name: newAlias,
|
|
195
|
-
body: generateMetadata(service.models[change.to])
|
|
196
|
-
})
|
|
197
|
-
await search.indices.putAlias({
|
|
198
|
-
name: generateIndexName(change.to),
|
|
199
|
-
index: newAlias
|
|
200
|
-
})
|
|
201
|
-
const currentAlias = await getCurrentAlias(change.from)
|
|
202
|
-
await search.reindex({ body: {
|
|
203
|
-
source: { index: currentAlias },
|
|
204
|
-
dest: { index: newAlias }
|
|
205
|
-
}})
|
|
206
|
-
await search.indices.delete({ name: currentAlias })
|
|
207
|
-
await search.indices.deleteAlias({ name: generateIndexName(change.from) })
|
|
208
|
-
} break
|
|
209
|
-
default:
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
|
|
213
|
-
let reindex = false
|
|
214
|
-
for(let change of changes) {
|
|
215
|
-
switch (change.operation) {
|
|
216
|
-
case "renameProperty":
|
|
217
|
-
case "deleteProperty":
|
|
218
|
-
case "changePropertyType":
|
|
219
|
-
case "changePropertySearch":
|
|
220
|
-
case "searchUpdated":
|
|
221
|
-
reindex = true;
|
|
222
|
-
break;
|
|
223
|
-
default:
|
|
224
|
-
}
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
if(reindex) {
|
|
229
|
-
try {
|
|
230
|
-
const currentAlias = await getCurrentAlias(model)
|
|
231
|
-
const currentVersion = +currentAlias.slice(currentAlias.lastIndexOf("_") + 1)
|
|
232
|
-
const newVersion = currentVersion + 1
|
|
233
|
-
const newAlias = generateIndexName(model) + "_" + newVersion
|
|
234
|
-
const metadata = generateMetadata(service.models[model])
|
|
235
|
-
console.log("METADATA", JSON.stringify(metadata, null, " "))
|
|
236
|
-
await search.indices.create({
|
|
237
|
-
index: newAlias,
|
|
238
|
-
body: metadata
|
|
239
|
-
})
|
|
240
|
-
|
|
241
|
-
for(let change of changes) { /// Create properties before reindex
|
|
242
|
-
if(change.operation == 'createProperty')
|
|
243
|
-
if(typeof change.property.defaultValue != 'undefined')
|
|
244
|
-
await setPropertyDefaultValue(currentAlias, change.name, change.property.defaultValue)
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
/*await search.reindex({
|
|
248
|
-
body: {
|
|
249
|
-
source: { index: currentAlias },
|
|
250
|
-
dest: { index: newAlias }
|
|
251
|
-
}
|
|
252
|
-
})*/
|
|
253
|
-
|
|
254
|
-
const indexer = new SearchIndexer(app.dao, app.databaseName, 'Table',
|
|
255
|
-
generateTableName(model), search, newAlias, service.models[model])
|
|
256
|
-
|
|
257
|
-
await indexer.copyAll()
|
|
258
|
-
|
|
259
|
-
await search.indices.putAlias({
|
|
260
|
-
name: generateIndexName(model),
|
|
261
|
-
index: newAlias
|
|
262
|
-
})
|
|
263
|
-
await search.indices.delete({ index: currentAlias })
|
|
264
|
-
} catch(error) {
|
|
265
|
-
if(error.meta) console.error("REINDEXING ERROR", JSON.stringify(error.meta))
|
|
266
|
-
else console.error("REINDEXING ERROR", error)
|
|
267
|
-
throw error
|
|
268
|
-
}
|
|
269
|
-
} else {
|
|
270
|
-
for(let change of changes) {
|
|
271
|
-
switch (change.operation) {
|
|
272
|
-
case "createProperty": {
|
|
273
|
-
let properties = {}
|
|
274
|
-
properties[change.name] = generatePropertyMapping(change.property)
|
|
275
|
-
const currentAlias = await getCurrentAlias(change.model)
|
|
276
|
-
await search.indices.putMapping({
|
|
277
|
-
index: currentAlias,
|
|
278
|
-
body: {properties}
|
|
279
|
-
}).catch(error => {
|
|
280
|
-
console.error('ES ERROR', error.meta.body.error)
|
|
281
|
-
throw error
|
|
282
|
-
})
|
|
283
|
-
if(typeof change.property.defaultValue != 'undefined')
|
|
284
|
-
await setPropertyDefaultValue(currentAlias, change.name, change.property.defaultValue)
|
|
285
|
-
} break
|
|
286
|
-
}
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
module.exports = updateElasticSearch
|
|
@@ -1,79 +0,0 @@
|
|
|
1
|
-
const { Client: ElasticSearch } = require('@elastic/elasticsearch')
|
|
2
|
-
|
|
3
|
-
class AnalyticsWriter {
|
|
4
|
-
|
|
5
|
-
constructor(indexPrefix) {
|
|
6
|
-
this.dbPromise = null
|
|
7
|
-
this.currentIndex = null
|
|
8
|
-
this.isWriting = false
|
|
9
|
-
this.indexPrefix = indexPrefix
|
|
10
|
-
this.queue = []
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
initDb(index) {
|
|
14
|
-
if(this.currentIndex == index && this.dbPromise) return this.dbPromise
|
|
15
|
-
this.dbPromise = new Promise(async (resolve, reject) => {
|
|
16
|
-
const db = new ElasticSearch({ node: process.env.ANALYTICS_URL || 'http://localhost:9200' })
|
|
17
|
-
this.currentIndex = index
|
|
18
|
-
await db.indices.create({
|
|
19
|
-
index: this.currentIndex,
|
|
20
|
-
body: {
|
|
21
|
-
mappings: {
|
|
22
|
-
properties: {
|
|
23
|
-
timestamp: {
|
|
24
|
-
type: "date"
|
|
25
|
-
},
|
|
26
|
-
clientTS: {
|
|
27
|
-
type: "date"
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
}).catch(err => {
|
|
33
|
-
if(err.meta.body) {
|
|
34
|
-
console.error("ES ERR: ", err.meta.body)
|
|
35
|
-
if(err.meta.body.error.type == 'resource_already_exists_exception') return db
|
|
36
|
-
} else {
|
|
37
|
-
console.error("ES ERROR: ", err)
|
|
38
|
-
}
|
|
39
|
-
throw err
|
|
40
|
-
})
|
|
41
|
-
db.info(console.log)
|
|
42
|
-
resolve(db)
|
|
43
|
-
})
|
|
44
|
-
return this.dbPromise
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
saveEvents(events) {
|
|
48
|
-
this.queue = this.queue.concat(events)
|
|
49
|
-
this.writeEvents()
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
writeEvents() {
|
|
53
|
-
if(this.isWriting) return
|
|
54
|
-
if(this.queue.length == 0) return
|
|
55
|
-
this.isWriting = true
|
|
56
|
-
const index = this.indexPrefix+(new Date()).toISOString().slice(0, 7)
|
|
57
|
-
const data = this.queue.slice()
|
|
58
|
-
this.queue = []
|
|
59
|
-
let operations = new Array(data.length*2)
|
|
60
|
-
for(let i = 0; i < data.length; i++) {
|
|
61
|
-
operations[i * 2] = { index: { } }
|
|
62
|
-
operations[i * 2 + 1] = data[i]
|
|
63
|
-
}
|
|
64
|
-
this.initDb(index).then(db => db.bulk({
|
|
65
|
-
index,
|
|
66
|
-
body: operations
|
|
67
|
-
})).then(result => {
|
|
68
|
-
this.isWriting = false
|
|
69
|
-
if(this.queue.length > 0) setTimeout(() => this.writeEvents(), 10)
|
|
70
|
-
}).catch(error => {
|
|
71
|
-
console.error("COULD NOT WRITE EVENTS to ES!", error)
|
|
72
|
-
this.queue = data.concat(this.queue)
|
|
73
|
-
this.isWriting = false
|
|
74
|
-
})
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
module.exports = AnalyticsWriter
|