@jsreport/jsreport-core 3.4.2 → 3.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -0
- package/lib/main/blobStorage/blobStorage.js +8 -0
- package/lib/main/optionsSchema.js +16 -1
- package/lib/main/profiler.js +166 -75
- package/lib/main/reporter.js +87 -74
- package/lib/main/schemaValidator.js +30 -0
- package/lib/main/settings.js +1 -2
- package/lib/main/store/collection.js +10 -8
- package/lib/main/store/documentStore.js +19 -0
- package/lib/main/store/setupValidateId.js +7 -1
- package/lib/main/store/setupValidateShortid.js +4 -0
- package/lib/shared/normalizeMetaFromLogs.js +1 -1
- package/lib/shared/reporter.js +16 -0
- package/lib/worker/render/executeEngine.js +30 -7
- package/lib/worker/render/profiler.js +14 -12
- package/lib/worker/render/render.js +0 -4
- package/lib/worker/sandbox/runInSandbox.js +1 -1
- package/package.json +6 -6
- package/lib/main/monitoring.js +0 -92
package/README.md
CHANGED
|
@@ -282,6 +282,15 @@ jsreport.documentStore.collection('templates')
|
|
|
282
282
|
|
|
283
283
|
## Changelog
|
|
284
284
|
|
|
285
|
+
### 3.5.0
|
|
286
|
+
|
|
287
|
+
- fix parsing issue of code with comment in the sandbox (helpers, scripts)
|
|
288
|
+
- improve profiling when there is big data
|
|
289
|
+
- make transactions support in store configurable
|
|
290
|
+
- improve timeout for the whole request
|
|
291
|
+
- fix applying req.options.timeout when enableRequestReportTimeout is true
|
|
292
|
+
- optimization regarding profile persistence
|
|
293
|
+
|
|
285
294
|
### 3.4.2
|
|
286
295
|
|
|
287
296
|
- update dep `vm2` to fix security vulnerability in sandbox
|
|
@@ -126,7 +126,13 @@ module.exports.getRootSchemaOptions = () => ({
|
|
|
126
126
|
store: {
|
|
127
127
|
type: 'object',
|
|
128
128
|
properties: {
|
|
129
|
-
provider: { type: 'string', enum: ['memory'] }
|
|
129
|
+
provider: { type: 'string', enum: ['memory'] },
|
|
130
|
+
transactions: {
|
|
131
|
+
type: 'object',
|
|
132
|
+
properties: {
|
|
133
|
+
enabled: { type: 'boolean', default: true }
|
|
134
|
+
}
|
|
135
|
+
}
|
|
130
136
|
}
|
|
131
137
|
},
|
|
132
138
|
blobStorage: {
|
|
@@ -163,6 +169,10 @@ module.exports.getRootSchemaOptions = () => ({
|
|
|
163
169
|
type: 'object',
|
|
164
170
|
default: {},
|
|
165
171
|
properties: {
|
|
172
|
+
defaultMode: {
|
|
173
|
+
type: 'string',
|
|
174
|
+
default: 'standard'
|
|
175
|
+
},
|
|
166
176
|
maxProfilesHistory: {
|
|
167
177
|
type: 'number',
|
|
168
178
|
default: 1000
|
|
@@ -171,6 +181,11 @@ module.exports.getRootSchemaOptions = () => ({
|
|
|
171
181
|
type: ['string', 'number'],
|
|
172
182
|
'$jsreport-acceptsDuration': true,
|
|
173
183
|
default: '1m'
|
|
184
|
+
},
|
|
185
|
+
maxResponseSize: {
|
|
186
|
+
type: ['string', 'number'],
|
|
187
|
+
'$jsreport-acceptsSize': true,
|
|
188
|
+
default: '50mb'
|
|
174
189
|
}
|
|
175
190
|
}
|
|
176
191
|
}
|
package/lib/main/profiler.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
const EventEmitter = require('events')
|
|
2
2
|
const extend = require('node.extend.without.arrays')
|
|
3
3
|
const generateRequestId = require('../shared/generateRequestId')
|
|
4
|
+
const fs = require('fs/promises')
|
|
4
5
|
|
|
5
6
|
module.exports = (reporter) => {
|
|
6
7
|
reporter.documentStore.registerEntityType('ProfileType', {
|
|
@@ -8,9 +9,9 @@ module.exports = (reporter) => {
|
|
|
8
9
|
timestamp: { type: 'Edm.DateTimeOffset', schema: { type: 'null' } },
|
|
9
10
|
finishedOn: { type: 'Edm.DateTimeOffset', schema: { type: 'null' } },
|
|
10
11
|
state: { type: 'Edm.String' },
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
12
|
+
error: { type: 'Edm.String' },
|
|
13
|
+
mode: { type: 'Edm.String', schema: { enum: ['full', 'standard', 'disabled'] } },
|
|
14
|
+
blobName: { type: 'Edm.String' }
|
|
14
15
|
})
|
|
15
16
|
|
|
16
17
|
reporter.documentStore.registerEntitySet('profiles', {
|
|
@@ -19,9 +20,41 @@ module.exports = (reporter) => {
|
|
|
19
20
|
})
|
|
20
21
|
|
|
21
22
|
const profilersMap = new Map()
|
|
22
|
-
const profilerAppendChain = new Map()
|
|
23
23
|
|
|
24
|
-
|
|
24
|
+
const profilerOperationsChainsMap = new Map()
|
|
25
|
+
function runInProfilerChain (fn, req) {
|
|
26
|
+
if (req.context.profiling.mode === 'disabled') {
|
|
27
|
+
return
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
profilerOperationsChainsMap.set(req.context.rootId, profilerOperationsChainsMap.get(req.context.rootId).then(async () => {
|
|
31
|
+
if (req.context.profiling.chainFailed) {
|
|
32
|
+
return
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
try {
|
|
36
|
+
await fn()
|
|
37
|
+
} catch (e) {
|
|
38
|
+
reporter.logger.warn('Failed persist profile', e)
|
|
39
|
+
req.context.profiling.chainFailed = true
|
|
40
|
+
}
|
|
41
|
+
}))
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function createProfileMessage (m, req) {
|
|
45
|
+
m.timestamp = new Date().getTime()
|
|
46
|
+
m.id = generateRequestId()
|
|
47
|
+
m.previousOperationId = m.previousOperationId || null
|
|
48
|
+
if (m.type !== 'log') {
|
|
49
|
+
m.operationId = m.operationId || generateRequestId()
|
|
50
|
+
req.context.profiling.lastOperationId = m.operationId
|
|
51
|
+
req.context.profiling.lastEventId = m.id
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return m
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function emitProfiles (events, req) {
|
|
25
58
|
if (events.length === 0) {
|
|
26
59
|
return
|
|
27
60
|
}
|
|
@@ -44,14 +77,16 @@ module.exports = (reporter) => {
|
|
|
44
77
|
req.context.profiling.lastOperation = lastOperation
|
|
45
78
|
}
|
|
46
79
|
|
|
47
|
-
|
|
80
|
+
runInProfilerChain(() => {
|
|
81
|
+
if (req.context.profiling.logFilePath) {
|
|
82
|
+
return fs.appendFile(req.context.profiling.logFilePath, Buffer.from(events.map(m => JSON.stringify(m)).join('\n') + '\n'))
|
|
83
|
+
}
|
|
84
|
+
|
|
48
85
|
return reporter.blobStorage.append(
|
|
49
86
|
req.context.profiling.entity.blobName,
|
|
50
87
|
Buffer.from(events.map(m => JSON.stringify(m)).join('\n') + '\n'), req
|
|
51
|
-
)
|
|
52
|
-
|
|
53
|
-
})
|
|
54
|
-
}))
|
|
88
|
+
)
|
|
89
|
+
}, req)
|
|
55
90
|
}
|
|
56
91
|
|
|
57
92
|
reporter.registerMainAction('profile', async (events, req) => {
|
|
@@ -62,8 +97,7 @@ module.exports = (reporter) => {
|
|
|
62
97
|
req.context = req.context || {}
|
|
63
98
|
req.context.rootId = reporter.generateRequestId()
|
|
64
99
|
req.context.profiling = {
|
|
65
|
-
mode: profileMode == null ? 'full' : profileMode
|
|
66
|
-
isAttached: true
|
|
100
|
+
mode: profileMode == null ? 'full' : profileMode
|
|
67
101
|
}
|
|
68
102
|
const profiler = new EventEmitter()
|
|
69
103
|
profilersMap.set(req.context.rootId, profiler)
|
|
@@ -71,88 +105,136 @@ module.exports = (reporter) => {
|
|
|
71
105
|
return profiler
|
|
72
106
|
}
|
|
73
107
|
|
|
74
|
-
reporter.
|
|
75
|
-
profilerAppendChain.set(req.context.rootId, Promise.resolve())
|
|
76
|
-
|
|
108
|
+
reporter.beforeRenderWorkerAllocatedListeners.add('profiler', async (req) => {
|
|
77
109
|
req.context.profiling = req.context.profiling || {}
|
|
110
|
+
|
|
111
|
+
if (req.context.profiling.mode == null) {
|
|
112
|
+
const profilerSettings = await reporter.settings.findValue('profiler', req)
|
|
113
|
+
const defaultMode = reporter.options.profiler.defaultMode || 'standard'
|
|
114
|
+
req.context.profiling.mode = (profilerSettings != null && profilerSettings.mode != null) ? profilerSettings.mode : defaultMode
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
profilerOperationsChainsMap.set(req.context.rootId, Promise.resolve())
|
|
118
|
+
|
|
78
119
|
req.context.profiling.lastOperation = null
|
|
79
120
|
|
|
80
|
-
|
|
121
|
+
const blobName = `profiles/${req.context.rootId}.log`
|
|
81
122
|
|
|
82
|
-
const
|
|
123
|
+
const profile = {
|
|
124
|
+
_id: reporter.documentStore.generateId(),
|
|
125
|
+
timestamp: new Date(),
|
|
126
|
+
state: 'queued',
|
|
127
|
+
mode: req.context.profiling.mode,
|
|
128
|
+
blobName
|
|
129
|
+
}
|
|
83
130
|
|
|
84
|
-
if (
|
|
85
|
-
const
|
|
86
|
-
|
|
87
|
-
// store a copy to prevent side-effects
|
|
88
|
-
req.context.resolvedTemplate = extend(true, {}, template)
|
|
131
|
+
if (!reporter.blobStorage.supportsAppend) {
|
|
132
|
+
const { pathToFile } = await reporter.writeTempFile((uuid) => `${uuid}.log`, '')
|
|
133
|
+
req.context.profiling.logFilePath = pathToFile
|
|
89
134
|
}
|
|
90
135
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
136
|
+
runInProfilerChain(async () => {
|
|
137
|
+
req.context.skipValidationFor = profile
|
|
138
|
+
await reporter.documentStore.collection('profiles').insert(profile, req)
|
|
139
|
+
}, req)
|
|
140
|
+
|
|
141
|
+
req.context.profiling.entity = profile
|
|
142
|
+
|
|
143
|
+
const profileStartOperation = createProfileMessage({
|
|
144
|
+
type: 'operationStart',
|
|
145
|
+
subtype: 'profile',
|
|
146
|
+
data: profile,
|
|
147
|
+
doDiffs: false
|
|
148
|
+
}, req)
|
|
149
|
+
|
|
150
|
+
req.context.profiling.profileStartOperationId = profileStartOperation.operationId
|
|
151
|
+
|
|
152
|
+
emitProfiles([profileStartOperation], req)
|
|
153
|
+
|
|
154
|
+
emitProfiles([createProfileMessage({
|
|
155
|
+
type: 'log',
|
|
156
|
+
level: 'info',
|
|
157
|
+
message: `Render request ${req.context.reportCounter} queued for execution and waiting for availible worker`,
|
|
158
|
+
previousOperationId: profileStartOperation.operationId
|
|
159
|
+
}, req)], req)
|
|
160
|
+
})
|
|
161
|
+
|
|
162
|
+
reporter.beforeRenderListeners.add('profiler', async (req, res) => {
|
|
163
|
+
const update = {
|
|
164
|
+
state: 'running'
|
|
97
165
|
}
|
|
98
166
|
|
|
99
|
-
|
|
100
|
-
|
|
167
|
+
const template = await reporter.templates.resolveTemplate(req)
|
|
168
|
+
if (template && template._id) {
|
|
169
|
+
req.context.resolvedTemplate = extend(true, {}, template)
|
|
170
|
+
const templatePath = await reporter.folders.resolveEntityPath(template, 'templates', req)
|
|
171
|
+
const blobName = `profiles/${templatePath.substring(1)}/${req.context.rootId}.log`
|
|
172
|
+
update.templateShortid = template.shortid
|
|
173
|
+
|
|
174
|
+
const originalBlobName = req.context.profiling.entity.blobName
|
|
175
|
+
// we want to store the profile into blobName path reflecting the template path so we need to copy the blob to new path now
|
|
176
|
+
runInProfilerChain(async () => {
|
|
177
|
+
if (req.context.profiling.logFilePath == null) {
|
|
178
|
+
const content = await reporter.blobStorage.read(originalBlobName, req)
|
|
179
|
+
await reporter.blobStorage.write(blobName, content, req)
|
|
180
|
+
return reporter.blobStorage.remove(originalBlobName, req)
|
|
181
|
+
}
|
|
182
|
+
}, req)
|
|
183
|
+
|
|
184
|
+
update.blobName = blobName
|
|
101
185
|
}
|
|
102
186
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
187
|
+
runInProfilerChain(() => {
|
|
188
|
+
req.context.skipValidationFor = update
|
|
189
|
+
return reporter.documentStore.collection('profiles').update({
|
|
190
|
+
_id: req.context.profiling.entity._id
|
|
191
|
+
}, {
|
|
192
|
+
$set: update
|
|
193
|
+
}, req)
|
|
109
194
|
}, req)
|
|
110
195
|
|
|
111
|
-
req.context.profiling.entity
|
|
196
|
+
Object.assign(req.context.profiling.entity, update)
|
|
112
197
|
})
|
|
113
198
|
|
|
114
199
|
reporter.afterRenderListeners.add('profiler', async (req, res) => {
|
|
200
|
+
emitProfiles([createProfileMessage({
|
|
201
|
+
type: 'operationEnd',
|
|
202
|
+
doDiffs: false,
|
|
203
|
+
previousEventId: req.context.profiling.lastEventId,
|
|
204
|
+
previousOperationId: req.context.profiling.lastOperationId,
|
|
205
|
+
operationId: req.context.profiling.profileStartOperationId
|
|
206
|
+
}, req)], req)
|
|
207
|
+
|
|
115
208
|
res.meta.profileId = req.context.profiling?.entity?._id
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
209
|
+
|
|
210
|
+
runInProfilerChain(async () => {
|
|
211
|
+
if (req.context.profiling.logFilePath != null) {
|
|
212
|
+
const content = await fs.readFile(req.context.profiling.logFilePath)
|
|
213
|
+
await reporter.blobStorage.write(req.context.profiling.entity.blobName, content, req)
|
|
214
|
+
await fs.unlink(req.context.profiling.logFilePath)
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
const update = {
|
|
124
218
|
state: 'success',
|
|
125
219
|
finishedOn: new Date()
|
|
126
220
|
}
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
reporter.documentStore.collection('profiles').update({
|
|
221
|
+
req.context.skipValidationFor = update
|
|
222
|
+
await reporter.documentStore.collection('profiles').update({
|
|
130
223
|
_id: req.context.profiling.entity._id
|
|
131
224
|
}, {
|
|
132
|
-
$set:
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
225
|
+
$set: update
|
|
226
|
+
}, req)
|
|
227
|
+
}, req)
|
|
228
|
+
|
|
229
|
+
// we don't remove from profiler requests map, because the renderErrorListeners are invoked if the afterRenderListener fails
|
|
137
230
|
})
|
|
138
231
|
|
|
139
232
|
reporter.renderErrorListeners.add('profiler', async (req, res, e) => {
|
|
140
233
|
try {
|
|
141
234
|
res.meta.profileId = req.context.profiling?.entity?._id
|
|
142
|
-
const profilerBlobPersistPromise = profilerAppendChain.get(req.context.rootId)
|
|
143
235
|
|
|
144
236
|
if (req.context.profiling?.entity != null) {
|
|
145
|
-
|
|
146
|
-
_id: req.context.profiling.entity._id
|
|
147
|
-
}, {
|
|
148
|
-
$set: {
|
|
149
|
-
state: 'error',
|
|
150
|
-
finishedOn: new Date(),
|
|
151
|
-
error: e.toString()
|
|
152
|
-
}
|
|
153
|
-
}, req)
|
|
154
|
-
|
|
155
|
-
await emitProfiles([{
|
|
237
|
+
emitProfiles([{
|
|
156
238
|
type: 'error',
|
|
157
239
|
timestamp: new Date().getTime(),
|
|
158
240
|
...e,
|
|
@@ -160,20 +242,29 @@ module.exports = (reporter) => {
|
|
|
160
242
|
stack: e.stack,
|
|
161
243
|
message: e.message
|
|
162
244
|
}], req)
|
|
245
|
+
runInProfilerChain(async () => {
|
|
246
|
+
if (req.context.profiling.logFilePath != null) {
|
|
247
|
+
const content = await fs.readFile(req.context.profiling.logFilePath, 'utf8')
|
|
248
|
+
await reporter.blobStorage.write(req.context.profiling.entity.blobName, content, req)
|
|
249
|
+
await fs.unlink(req.context.profiling.logFilePath)
|
|
250
|
+
}
|
|
163
251
|
|
|
164
|
-
|
|
165
|
-
|
|
252
|
+
const update = {
|
|
253
|
+
state: 'error',
|
|
254
|
+
finishedOn: new Date(),
|
|
255
|
+
error: e.toString()
|
|
256
|
+
}
|
|
257
|
+
req.context.skipValidationFor = update
|
|
258
|
+
await reporter.documentStore.collection('profiles').update({
|
|
166
259
|
_id: req.context.profiling.entity._id
|
|
167
260
|
}, {
|
|
168
|
-
$set:
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
}, req).catch((e) => reporter.logger.error('Failed to update profile blobPersisted', e))
|
|
172
|
-
})
|
|
261
|
+
$set: update
|
|
262
|
+
}, req)
|
|
263
|
+
}, req)
|
|
173
264
|
}
|
|
174
265
|
} finally {
|
|
175
266
|
profilersMap.delete(req.context.rootId)
|
|
176
|
-
|
|
267
|
+
profilerOperationsChainsMap.delete(req.context.rootId)
|
|
177
268
|
}
|
|
178
269
|
})
|
|
179
270
|
|
|
@@ -197,8 +288,8 @@ module.exports = (reporter) => {
|
|
|
197
288
|
clearInterval(profilesCleanupInterval)
|
|
198
289
|
}
|
|
199
290
|
|
|
200
|
-
for (const key of
|
|
201
|
-
const profileAppendPromise =
|
|
291
|
+
for (const key of profilerOperationsChainsMap.keys()) {
|
|
292
|
+
const profileAppendPromise = profilerOperationsChainsMap.get(key)
|
|
202
293
|
if (profileAppendPromise) {
|
|
203
294
|
await profileAppendPromise
|
|
204
295
|
}
|
package/lib/main/reporter.js
CHANGED
|
@@ -28,10 +28,10 @@ const Reporter = require('../shared/reporter')
|
|
|
28
28
|
const Request = require('./request')
|
|
29
29
|
const generateRequestId = require('../shared/generateRequestId')
|
|
30
30
|
const Profiler = require('./profiler')
|
|
31
|
-
const Monitoring = require('./monitoring')
|
|
32
31
|
const migrateXlsxTemplatesToAssets = require('./migration/xlsxTemplatesToAssets')
|
|
33
32
|
const migrateResourcesToAssets = require('./migration/resourcesToAssets')
|
|
34
33
|
const semver = require('semver')
|
|
34
|
+
let reportCounter = 0
|
|
35
35
|
|
|
36
36
|
class MainReporter extends Reporter {
|
|
37
37
|
constructor (options, defaults) {
|
|
@@ -69,6 +69,7 @@ class MainReporter extends Reporter {
|
|
|
69
69
|
|
|
70
70
|
this.logger = createLogger()
|
|
71
71
|
this.beforeMainActionListeners = this.createListenerCollection('beforeMainAction')
|
|
72
|
+
this.beforeRenderWorkerAllocatedListeners = this.createListenerCollection('beforeRenderWorkerAllocated')
|
|
72
73
|
}
|
|
73
74
|
|
|
74
75
|
discover () {
|
|
@@ -165,9 +166,11 @@ class MainReporter extends Reporter {
|
|
|
165
166
|
this._initializing = true
|
|
166
167
|
|
|
167
168
|
if (this.compilation) {
|
|
168
|
-
this.compilation.resource('
|
|
169
|
-
this.compilation.resource('
|
|
170
|
-
this.compilation.resource('
|
|
169
|
+
this.compilation.resource('vm2-events.js', require.resolve('vm2/lib/events.js'))
|
|
170
|
+
this.compilation.resource('vm2-resolver-compat.js', require.resolve('vm2/lib/resolver-compat.js'))
|
|
171
|
+
this.compilation.resource('vm2-resolver.js', require.resolve('vm2/lib/resolver.js'))
|
|
172
|
+
this.compilation.resource('vm2-setup-node-sandbox.js', require.resolve('vm2/lib/setup-node-sandbox.js'))
|
|
173
|
+
this.compilation.resource('vm2-setup-sandbox.js', require.resolve('vm2/lib/setup-sandbox.js'))
|
|
171
174
|
}
|
|
172
175
|
|
|
173
176
|
try {
|
|
@@ -180,7 +183,6 @@ class MainReporter extends Reporter {
|
|
|
180
183
|
blobStorageActions(this)
|
|
181
184
|
Templates(this)
|
|
182
185
|
Profiler(this)
|
|
183
|
-
Monitoring(this)
|
|
184
186
|
|
|
185
187
|
this.folders = Object.assign(this.folders, Folders(this))
|
|
186
188
|
|
|
@@ -267,8 +269,6 @@ class MainReporter extends Reporter {
|
|
|
267
269
|
name: 'none'
|
|
268
270
|
})
|
|
269
271
|
|
|
270
|
-
this.monitoring.init()
|
|
271
|
-
|
|
272
272
|
this.logger.info('reporter initialized')
|
|
273
273
|
this._initialized = true
|
|
274
274
|
this._initExecution.resolve()
|
|
@@ -306,6 +306,34 @@ class MainReporter extends Reporter {
|
|
|
306
306
|
await validateReservedName(this, c, doc)
|
|
307
307
|
}
|
|
308
308
|
|
|
309
|
+
async _handleRenderError (req, res, err) {
|
|
310
|
+
if (err.code === 'WORKER_TIMEOUT') {
|
|
311
|
+
err.message = 'Report timeout'
|
|
312
|
+
if (req.context.profiling?.lastOperation != null && req.context.profiling?.entity != null) {
|
|
313
|
+
err.message += `. Last profiler operation: (${req.context.profiling.lastOperation.subtype}) ${req.context.profiling.lastOperation.name}`
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
if (req.context.http != null) {
|
|
317
|
+
const profileUrl = `${req.context.http.baseUrl}/studio/profiles/${req.context.profiling.entity._id}`
|
|
318
|
+
err.message += `. You can inspect and find more details here: ${profileUrl}`
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
err.weak = true
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (err.code === 'WORKER_ABORTED') {
|
|
325
|
+
err.message = 'Report cancelled'
|
|
326
|
+
err.weak = true
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
if (!err.logged) {
|
|
330
|
+
const logFn = err.weak ? this.logger.warn : this.logger.error
|
|
331
|
+
logFn(`Report render failed: ${err.message}${err.stack != null ? ' ' + err.stack : ''}`, req)
|
|
332
|
+
}
|
|
333
|
+
await this.renderErrorListeners.fire(req, res, err)
|
|
334
|
+
throw err
|
|
335
|
+
}
|
|
336
|
+
|
|
309
337
|
/**
|
|
310
338
|
* Main method for invoking rendering
|
|
311
339
|
* render({ template: { content: 'foo', engine: 'none', recipe: 'html' }, data: { foo: 'hello' } })
|
|
@@ -324,23 +352,30 @@ class MainReporter extends Reporter {
|
|
|
324
352
|
req.context = Object.assign({}, req.context)
|
|
325
353
|
req.context.rootId = req.context.rootId || generateRequestId()
|
|
326
354
|
req.context.id = req.context.rootId
|
|
355
|
+
req.context.reportCounter = ++reportCounter
|
|
356
|
+
req.context.startTimestamp = new Date().getTime()
|
|
327
357
|
|
|
328
|
-
|
|
329
|
-
timeout: this.options.reportTimeout
|
|
330
|
-
})
|
|
331
|
-
|
|
332
|
-
let keepWorker
|
|
358
|
+
let worker
|
|
333
359
|
let workerAborted
|
|
360
|
+
let dontCloseProcessing
|
|
361
|
+
const res = { meta: {} }
|
|
362
|
+
try {
|
|
363
|
+
await this.beforeRenderWorkerAllocatedListeners.fire(req)
|
|
334
364
|
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
workerAborted = true
|
|
338
|
-
worker.release(req).catch((e) => this.logger.error('Failed to release worker ' + e))
|
|
365
|
+
worker = await this._workersManager.allocate(req, {
|
|
366
|
+
timeout: this.getReportTimeout(req)
|
|
339
367
|
})
|
|
340
|
-
}
|
|
341
368
|
|
|
342
|
-
|
|
343
|
-
|
|
369
|
+
if (options.abortEmitter) {
|
|
370
|
+
options.abortEmitter.once('abort', () => {
|
|
371
|
+
if (workerAborted) {
|
|
372
|
+
return
|
|
373
|
+
}
|
|
374
|
+
workerAborted = true
|
|
375
|
+
worker.release(req).catch((e) => this.logger.error('Failed to release worker ' + e))
|
|
376
|
+
})
|
|
377
|
+
}
|
|
378
|
+
|
|
344
379
|
if (workerAborted) {
|
|
345
380
|
throw this.createError('Request aborted by client')
|
|
346
381
|
}
|
|
@@ -351,7 +386,7 @@ class MainReporter extends Reporter {
|
|
|
351
386
|
req,
|
|
352
387
|
data: {}
|
|
353
388
|
}, {
|
|
354
|
-
timeout: this.
|
|
389
|
+
timeout: this.getReportTimeout(req)
|
|
355
390
|
})
|
|
356
391
|
req = result
|
|
357
392
|
}
|
|
@@ -369,39 +404,40 @@ class MainReporter extends Reporter {
|
|
|
369
404
|
}
|
|
370
405
|
}
|
|
371
406
|
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
if (
|
|
375
|
-
this.options.enableRequestReportTimeout &&
|
|
376
|
-
req.options &&
|
|
377
|
-
req.options.timeout != null
|
|
378
|
-
) {
|
|
379
|
-
reportTimeout = req.options.timeout
|
|
380
|
-
}
|
|
407
|
+
const reportTimeout = this.getReportTimeout(req)
|
|
381
408
|
|
|
382
409
|
await this.beforeRenderListeners.fire(req, res, { worker })
|
|
383
410
|
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
411
|
+
if (workerAborted) {
|
|
412
|
+
throw this.createError('Request aborted by client')
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
if (req.context.clientNotification) {
|
|
416
|
+
process.nextTick(async () => {
|
|
417
|
+
try {
|
|
418
|
+
const responseResult = await this.executeWorkerAction('render', {}, {
|
|
419
|
+
timeout: reportTimeout + this.options.reportTimeoutMargin,
|
|
420
|
+
worker
|
|
421
|
+
}, req)
|
|
422
|
+
|
|
423
|
+
Object.assign(res, responseResult)
|
|
424
|
+
await this.afterRenderListeners.fire(req, res)
|
|
425
|
+
} catch (err) {
|
|
426
|
+
await this._handleRenderError(req, res, err).catch((e) => {})
|
|
427
|
+
} finally {
|
|
428
|
+
if (!workerAborted) {
|
|
429
|
+
await worker.release(req)
|
|
430
|
+
}
|
|
402
431
|
}
|
|
403
|
-
}
|
|
404
|
-
|
|
432
|
+
})
|
|
433
|
+
|
|
434
|
+
dontCloseProcessing = true
|
|
435
|
+
const r = {
|
|
436
|
+
...req.context.clientNotification,
|
|
437
|
+
stream: Readable.from(req.context.clientNotification.content)
|
|
438
|
+
}
|
|
439
|
+
delete req.context.clientNotification
|
|
440
|
+
return r
|
|
405
441
|
}
|
|
406
442
|
|
|
407
443
|
if (workerAborted) {
|
|
@@ -418,33 +454,10 @@ class MainReporter extends Reporter {
|
|
|
418
454
|
res.stream = Readable.from(res.content)
|
|
419
455
|
return res
|
|
420
456
|
} catch (err) {
|
|
421
|
-
|
|
422
|
-
err.message = 'Report timeout'
|
|
423
|
-
if (req.context.profiling?.lastOperation != null && req.context.profiling?.entity != null) {
|
|
424
|
-
err.message += `. Last profiler operation: (${req.context.profiling.lastOperation.subtype}) ${req.context.profiling.lastOperation.name}`
|
|
425
|
-
}
|
|
426
|
-
|
|
427
|
-
if (req.context.http != null) {
|
|
428
|
-
const profileUrl = `${req.context.http.baseUrl}/studio/profiles/${req.context.profiling.entity._id}`
|
|
429
|
-
err.message += `. You can inspect and find more details here: ${profileUrl}`
|
|
430
|
-
}
|
|
431
|
-
|
|
432
|
-
err.weak = true
|
|
433
|
-
}
|
|
434
|
-
|
|
435
|
-
if (err.code === 'WORKER_ABORTED') {
|
|
436
|
-
err.message = 'Report cancelled'
|
|
437
|
-
err.weak = true
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
if (!err.logged) {
|
|
441
|
-
const logFn = err.weak ? this.logger.warn : this.logger.error
|
|
442
|
-
logFn(`Report render failed: ${err.message}${err.stack != null ? ' ' + err.stack : ''}`, req)
|
|
443
|
-
}
|
|
444
|
-
await this.renderErrorListeners.fire(req, res, err)
|
|
457
|
+
await this._handleRenderError(req, res, err)
|
|
445
458
|
throw err
|
|
446
459
|
} finally {
|
|
447
|
-
if (!workerAborted && !
|
|
460
|
+
if (worker && !workerAborted && !dontCloseProcessing) {
|
|
448
461
|
await worker.release(req)
|
|
449
462
|
}
|
|
450
463
|
}
|
|
@@ -3,6 +3,7 @@ const set = require('lodash.set')
|
|
|
3
3
|
const hasOwn = require('has-own-deep')
|
|
4
4
|
const unsetValue = require('unset-value')
|
|
5
5
|
const ms = require('ms')
|
|
6
|
+
const bytes = require('bytes')
|
|
6
7
|
const Ajv = require('ajv')
|
|
7
8
|
|
|
8
9
|
const validatorCollection = new WeakMap()
|
|
@@ -126,6 +127,35 @@ class SchemaValidator {
|
|
|
126
127
|
}
|
|
127
128
|
})
|
|
128
129
|
|
|
130
|
+
validator.addKeyword('$jsreport-acceptsSize', {
|
|
131
|
+
modifying: true,
|
|
132
|
+
compile: (sch) => {
|
|
133
|
+
if (sch !== true) {
|
|
134
|
+
return () => true
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return (data, dataPath, parentData, parentDataProperty) => {
|
|
138
|
+
if (typeof data !== 'string' && typeof data !== 'number') {
|
|
139
|
+
return false
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (typeof data === 'number') {
|
|
143
|
+
return true
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
const newData = bytes(data)
|
|
147
|
+
|
|
148
|
+
if (newData == null) {
|
|
149
|
+
return false
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
parentData[parentDataProperty] = newData
|
|
153
|
+
|
|
154
|
+
return true
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
})
|
|
158
|
+
|
|
129
159
|
let rootValidate
|
|
130
160
|
|
|
131
161
|
if (options.rootSchema != null) {
|
package/lib/main/settings.js
CHANGED
|
@@ -25,7 +25,7 @@ Settings.prototype.get = function (key) {
|
|
|
25
25
|
}
|
|
26
26
|
|
|
27
27
|
Settings.prototype.findValue = async function (key, req) {
|
|
28
|
-
const res = await this.documentStore.collection('settings').find({ key: key }, req)
|
|
28
|
+
const res = await this.documentStore.collection('settings').find({ key: key }, localReqWithoutAuthorization(req))
|
|
29
29
|
if (res.length !== 1) {
|
|
30
30
|
return null
|
|
31
31
|
}
|
|
@@ -49,7 +49,6 @@ Settings.prototype.addOrSet = async function (key, avalue, req) {
|
|
|
49
49
|
const value = typeof avalue !== 'string' ? JSON.stringify(avalue) : avalue
|
|
50
50
|
|
|
51
51
|
const updateCount = await this.documentStore.collection('settings').update({ key }, { $set: { key: key, value: value } }, localReqWithoutAuthorization(req))
|
|
52
|
-
|
|
53
52
|
if (updateCount === 0) {
|
|
54
53
|
await this.documentStore.collection('settings').insert({ key: key, value: value }, localReqWithoutAuthorization(req))
|
|
55
54
|
return 1
|
|
@@ -86,16 +86,18 @@ module.exports = (entitySet, provider, model, validator, encryption, transaction
|
|
|
86
86
|
validateEntityName(data.name)
|
|
87
87
|
}
|
|
88
88
|
|
|
89
|
-
|
|
89
|
+
if (req == null || req.context.skipValidationFor !== data) {
|
|
90
|
+
const entityType = model.entitySets[entitySet] ? model.entitySets[entitySet].normalizedEntityTypeName : null
|
|
90
91
|
|
|
91
|
-
|
|
92
|
-
|
|
92
|
+
if (entityType != null && validator.getSchema(entityType) != null) {
|
|
93
|
+
const validationResult = validator.validate(entityType, data)
|
|
93
94
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
95
|
+
if (!validationResult.valid) {
|
|
96
|
+
throw createError(`Error when trying to insert into "${entitySet}" collection. input contain values that does not match the schema. ${validationResult.fullErrorMessage}`, {
|
|
97
|
+
weak: true,
|
|
98
|
+
statusCode: 400
|
|
99
|
+
})
|
|
100
|
+
}
|
|
99
101
|
}
|
|
100
102
|
}
|
|
101
103
|
|
|
@@ -372,6 +372,10 @@ const DocumentStore = (options, validator, encryption) => {
|
|
|
372
372
|
},
|
|
373
373
|
|
|
374
374
|
async beginTransaction (req) {
|
|
375
|
+
if (this.options.store?.transactions?.enabled === false) {
|
|
376
|
+
return
|
|
377
|
+
}
|
|
378
|
+
|
|
375
379
|
if (req.context.storeTransaction && transactions.has(req.context.storeTransaction)) {
|
|
376
380
|
throw new Error('Can not call store.beginTransaction when an active transaction already exists, make sure you are not calling store.beginTransaction more than once')
|
|
377
381
|
}
|
|
@@ -386,6 +390,10 @@ const DocumentStore = (options, validator, encryption) => {
|
|
|
386
390
|
},
|
|
387
391
|
|
|
388
392
|
async commitTransaction (req) {
|
|
393
|
+
if (this.options.store?.transactions?.enabled === false) {
|
|
394
|
+
return
|
|
395
|
+
}
|
|
396
|
+
|
|
389
397
|
const tranId = req.context.storeTransaction
|
|
390
398
|
const tran = transactions.get(tranId)
|
|
391
399
|
|
|
@@ -400,6 +408,10 @@ const DocumentStore = (options, validator, encryption) => {
|
|
|
400
408
|
},
|
|
401
409
|
|
|
402
410
|
async rollbackTransaction (req) {
|
|
411
|
+
if (this.options.store?.transactions?.enabled === false) {
|
|
412
|
+
return
|
|
413
|
+
}
|
|
414
|
+
|
|
403
415
|
const tranId = req.context.storeTransaction
|
|
404
416
|
const tran = transactions.get(tranId)
|
|
405
417
|
|
|
@@ -411,6 +423,13 @@ const DocumentStore = (options, validator, encryption) => {
|
|
|
411
423
|
|
|
412
424
|
transactions.delete(tranId)
|
|
413
425
|
delete req.context.storeTransaction
|
|
426
|
+
},
|
|
427
|
+
|
|
428
|
+
generateId () {
|
|
429
|
+
if (this.provider.generateId) {
|
|
430
|
+
return this.provider.generateId()
|
|
431
|
+
}
|
|
432
|
+
return uuidv4()
|
|
414
433
|
}
|
|
415
434
|
}
|
|
416
435
|
|
|
@@ -3,10 +3,16 @@ module.exports = (reporter) => {
|
|
|
3
3
|
reporter.initializeListeners.add('core-validate-id', () => {
|
|
4
4
|
for (const c of Object.keys(reporter.documentStore.collections)) {
|
|
5
5
|
reporter.documentStore.collection(c).beforeInsertListeners.add('validate-id', (doc, req) => {
|
|
6
|
-
|
|
6
|
+
if (req == null || req.context.skipValidationFor !== doc) {
|
|
7
|
+
return validateIdForStoreChange(reporter, c, doc._id, undefined, req)
|
|
8
|
+
}
|
|
7
9
|
})
|
|
8
10
|
|
|
9
11
|
reporter.documentStore.collection(c).beforeUpdateListeners.add('validate-id', async (q, update, opts, req) => {
|
|
12
|
+
if (req != null && req.context.skipValidationFor === update) {
|
|
13
|
+
return
|
|
14
|
+
}
|
|
15
|
+
|
|
10
16
|
if (update.$set && opts && opts.upsert === true) {
|
|
11
17
|
await validateIdForStoreChange(reporter, c, update.$set._id, undefined, req)
|
|
12
18
|
}
|
|
@@ -31,6 +31,10 @@ module.exports = (reporter) => {
|
|
|
31
31
|
}
|
|
32
32
|
|
|
33
33
|
async function validateShortid (reporter, collectionName, doc, originalIdValue, req) {
|
|
34
|
+
if (req != null && req.context.skipValidationFor === doc) {
|
|
35
|
+
return
|
|
36
|
+
}
|
|
37
|
+
|
|
34
38
|
const shortid = doc.shortid
|
|
35
39
|
|
|
36
40
|
if (!shortid) {
|
|
@@ -13,7 +13,7 @@ module.exports = (level, msg, meta) => {
|
|
|
13
13
|
|
|
14
14
|
// TODO adding cancel looks bad, its before script is adding req.cancel()
|
|
15
15
|
// excluding non relevant properties for the log
|
|
16
|
-
const newMeta = Object.assign({}, omit(meta, ['template', 'options', 'data', 'context', 'timestamp', 'cancel']))
|
|
16
|
+
const newMeta = Object.assign({}, omit(meta, ['rawContent', 'template', 'options', 'data', 'context', 'timestamp', 'cancel']))
|
|
17
17
|
|
|
18
18
|
if (newMeta.rootId == null && meta.context.rootId != null) {
|
|
19
19
|
newMeta.rootId = meta.context.rootId
|
package/lib/shared/reporter.js
CHANGED
|
@@ -51,6 +51,22 @@ class Reporter extends EventEmitter {
|
|
|
51
51
|
return generateRequestId()
|
|
52
52
|
}
|
|
53
53
|
|
|
54
|
+
/**
|
|
55
|
+
* @public Ensures that we get the proper report timeout in case when custom timeout per request was enabled
|
|
56
|
+
*/
|
|
57
|
+
getReportTimeout (req) {
|
|
58
|
+
const elapsedTime = req.context.startTimestamp ? (new Date().getTime() - req.context.startTimestamp) : 0
|
|
59
|
+
if (
|
|
60
|
+
this.options.enableRequestReportTimeout &&
|
|
61
|
+
req.options != null &&
|
|
62
|
+
req.options.timeout != null
|
|
63
|
+
) {
|
|
64
|
+
return Math.max(0, req.options.timeout - elapsedTime)
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return Math.max(0, this.options.reportTimeout - elapsedTime)
|
|
68
|
+
}
|
|
69
|
+
|
|
54
70
|
/**
|
|
55
71
|
* Ensures that the jsreport auto-cleanup temp directory (options.tempAutoCleanupDirectory) exists by doing a mkdir call
|
|
56
72
|
*
|
|
@@ -14,6 +14,7 @@ module.exports = (reporter) => {
|
|
|
14
14
|
reporter.templatingEngines = { cache }
|
|
15
15
|
|
|
16
16
|
const executionFnParsedParamsMap = new Map()
|
|
17
|
+
const executionAsyncResultsMap = new Map()
|
|
17
18
|
|
|
18
19
|
const templatingEnginesEvaluate = async (mainCall, { engine, content, helpers, data }, { entity, entitySet }, req) => {
|
|
19
20
|
const engineImpl = reporter.extensionsManager.engines.find((e) => e.name === engine)
|
|
@@ -28,19 +29,23 @@ module.exports = (reporter) => {
|
|
|
28
29
|
executionFnParsedParamsMap.set(req.context.id, new Map())
|
|
29
30
|
}
|
|
30
31
|
|
|
32
|
+
const executionId = nanoid(7)
|
|
33
|
+
|
|
31
34
|
try {
|
|
32
35
|
const res = await executeEngine({
|
|
33
36
|
engine: engineImpl,
|
|
34
37
|
content,
|
|
35
38
|
helpers,
|
|
36
39
|
data
|
|
37
|
-
}, { handleErrors: false, entity, entitySet }, req)
|
|
40
|
+
}, { executionId, handleErrors: false, entity, entitySet }, req)
|
|
38
41
|
|
|
39
42
|
return res.content
|
|
40
43
|
} finally {
|
|
41
44
|
if (mainCall) {
|
|
42
45
|
executionFnParsedParamsMap.delete(req.context.id)
|
|
43
46
|
}
|
|
47
|
+
|
|
48
|
+
executionAsyncResultsMap.delete(executionId)
|
|
44
49
|
}
|
|
45
50
|
}
|
|
46
51
|
|
|
@@ -54,6 +59,12 @@ module.exports = (reporter) => {
|
|
|
54
59
|
proxy.templatingEngines = {
|
|
55
60
|
evaluate: async (executionInfo, entityInfo) => {
|
|
56
61
|
return templatingEnginesEvaluate(false, executionInfo, entityInfo, req)
|
|
62
|
+
},
|
|
63
|
+
waitForAsyncHelpers: async () => {
|
|
64
|
+
if (context.__executionId != null && executionAsyncResultsMap.has(context.__executionId)) {
|
|
65
|
+
const asyncResultMap = executionAsyncResultsMap.get(context.__executionId)
|
|
66
|
+
return Promise.all([...asyncResultMap.keys()].map((k) => asyncResultMap.get(k)))
|
|
67
|
+
}
|
|
57
68
|
}
|
|
58
69
|
}
|
|
59
70
|
})
|
|
@@ -64,6 +75,8 @@ module.exports = (reporter) => {
|
|
|
64
75
|
req.data.__rootDirectory = reporter.options.rootDirectory
|
|
65
76
|
req.data.__parentModuleDirectory = reporter.options.parentModuleDirectory
|
|
66
77
|
|
|
78
|
+
const executionId = nanoid(7)
|
|
79
|
+
|
|
67
80
|
try {
|
|
68
81
|
return await executeEngine({
|
|
69
82
|
engine,
|
|
@@ -71,16 +84,18 @@ module.exports = (reporter) => {
|
|
|
71
84
|
helpers: req.template.helpers,
|
|
72
85
|
data: req.data
|
|
73
86
|
}, {
|
|
87
|
+
executionId,
|
|
74
88
|
handleErrors: true,
|
|
75
89
|
entity: req.template,
|
|
76
90
|
entitySet: 'templates'
|
|
77
91
|
}, req)
|
|
78
92
|
} finally {
|
|
79
93
|
executionFnParsedParamsMap.delete(req.context.id)
|
|
94
|
+
executionAsyncResultsMap.delete(executionId)
|
|
80
95
|
}
|
|
81
96
|
}
|
|
82
97
|
|
|
83
|
-
async function executeEngine ({ engine, content, helpers, data }, { handleErrors, entity, entitySet }, req) {
|
|
98
|
+
async function executeEngine ({ engine, content, helpers, data }, { executionId, handleErrors, entity, entitySet }, req) {
|
|
84
99
|
let entityPath
|
|
85
100
|
|
|
86
101
|
if (entity._id) {
|
|
@@ -104,9 +119,14 @@ module.exports = (reporter) => {
|
|
|
104
119
|
const joinedHelpers = systemHelpersStr + '\n' + (helpers || '')
|
|
105
120
|
const executionFnParsedParamsKey = `entity:${entity.shortid || 'anonymous'}:helpers:${joinedHelpers}`
|
|
106
121
|
|
|
107
|
-
const executionFn = async ({ require, console, topLevelFunctions }) => {
|
|
122
|
+
const executionFn = async ({ require, console, topLevelFunctions, context }) => {
|
|
108
123
|
const asyncResultMap = new Map()
|
|
109
|
-
|
|
124
|
+
|
|
125
|
+
context.__executionId = executionId
|
|
126
|
+
|
|
127
|
+
executionAsyncResultsMap.set(executionId, asyncResultMap)
|
|
128
|
+
executionFnParsedParamsMap.get(req.context.id).get(executionFnParsedParamsKey).resolve({ require, console, topLevelFunctions, context })
|
|
129
|
+
|
|
110
130
|
const key = `template:${content}:${engine.name}`
|
|
111
131
|
|
|
112
132
|
if (!cache.has(key)) {
|
|
@@ -143,16 +163,19 @@ module.exports = (reporter) => {
|
|
|
143
163
|
}
|
|
144
164
|
|
|
145
165
|
return {
|
|
146
|
-
|
|
166
|
+
// handlebars escapes single brackets before execution to prevent errors on {#asset}
|
|
167
|
+
// we need to unescape them later here, because at the moment the engine.execute finishes
|
|
168
|
+
// the async helpers aren't executed yet
|
|
169
|
+
content: engine.unescape ? engine.unescape(contentResult) : contentResult
|
|
147
170
|
}
|
|
148
171
|
}
|
|
149
172
|
|
|
150
173
|
// executionFnParsedParamsMap is there to cache parsed components helpers to speed up longer loops
|
|
151
174
|
// we store there for the particular request and component a promise and only the first component gets compiled
|
|
152
175
|
if (executionFnParsedParamsMap.get(req.context.id).has(executionFnParsedParamsKey)) {
|
|
153
|
-
const { require, console, topLevelFunctions } = await (executionFnParsedParamsMap.get(req.context.id).get(executionFnParsedParamsKey).promise)
|
|
176
|
+
const { require, console, topLevelFunctions, context } = await (executionFnParsedParamsMap.get(req.context.id).get(executionFnParsedParamsKey).promise)
|
|
154
177
|
|
|
155
|
-
return executionFn({ require, console, topLevelFunctions })
|
|
178
|
+
return executionFn({ require, console, topLevelFunctions, context })
|
|
156
179
|
} else {
|
|
157
180
|
const awaiter = {}
|
|
158
181
|
awaiter.promise = new Promise((resolve) => {
|
|
@@ -57,9 +57,9 @@ class Profiler {
|
|
|
57
57
|
|
|
58
58
|
if (m.type !== 'log') {
|
|
59
59
|
req.context.profiling.lastEventId = m.id
|
|
60
|
+
m.operationId = m.operationId || generateRequestId()
|
|
60
61
|
}
|
|
61
62
|
|
|
62
|
-
m.operationId = m.operationId || generateRequestId()
|
|
63
63
|
if (m.previousOperationId == null && req.context.profiling.lastOperationId) {
|
|
64
64
|
m.previousOperationId = req.context.profiling.lastOperationId
|
|
65
65
|
}
|
|
@@ -72,15 +72,21 @@ class Profiler {
|
|
|
72
72
|
let content = res.content
|
|
73
73
|
|
|
74
74
|
if (content != null) {
|
|
75
|
-
if (
|
|
75
|
+
if (content.length > this.reporter.options.profiler.maxResponseSize) {
|
|
76
76
|
content = {
|
|
77
|
-
|
|
78
|
-
encoding: 'base64'
|
|
77
|
+
tooLarge: true
|
|
79
78
|
}
|
|
80
79
|
} else {
|
|
81
|
-
content
|
|
82
|
-
content
|
|
83
|
-
|
|
80
|
+
if (isbinaryfile(content)) {
|
|
81
|
+
content = {
|
|
82
|
+
content: res.content.toString('base64'),
|
|
83
|
+
encoding: 'base64'
|
|
84
|
+
}
|
|
85
|
+
} else {
|
|
86
|
+
content = {
|
|
87
|
+
content: createPatch('res', req.context.profiling.resLastVal ? req.context.profiling.resLastVal.toString() : '', res.content.toString(), 0),
|
|
88
|
+
encoding: 'diff'
|
|
89
|
+
}
|
|
84
90
|
}
|
|
85
91
|
}
|
|
86
92
|
}
|
|
@@ -93,7 +99,7 @@ class Profiler {
|
|
|
93
99
|
|
|
94
100
|
m.req = { diff: createPatch('req', req.context.profiling.reqLastVal || '', stringifiedReq, 0) }
|
|
95
101
|
|
|
96
|
-
req.context.profiling.resLastVal = (res.content == null || isbinaryfile(res.content)) ? null : res.content.toString()
|
|
102
|
+
req.context.profiling.resLastVal = (res.content == null || isbinaryfile(res.content) || content.tooLarge) ? null : res.content.toString()
|
|
97
103
|
req.context.profiling.resMetaLastVal = stringifiedResMeta
|
|
98
104
|
req.context.profiling.reqLastVal = stringifiedReq
|
|
99
105
|
}
|
|
@@ -129,10 +135,6 @@ class Profiler {
|
|
|
129
135
|
previousOperationId: parentReq ? parentReq.context.profiling.lastOperationId : null
|
|
130
136
|
}
|
|
131
137
|
|
|
132
|
-
if (!req.context.isChildRequest) {
|
|
133
|
-
profilerEvent.profileId = req.context.profiling.entity._id
|
|
134
|
-
}
|
|
135
|
-
|
|
136
138
|
return this.emit(profilerEvent, req, res)
|
|
137
139
|
}
|
|
138
140
|
|
|
@@ -10,7 +10,6 @@ const Request = require('../../shared/request')
|
|
|
10
10
|
const generateRequestId = require('../../shared/generateRequestId')
|
|
11
11
|
const resolveReferences = require('./resolveReferences.js')
|
|
12
12
|
const moduleHelper = require('./moduleHelper')
|
|
13
|
-
let reportCounter = 0
|
|
14
13
|
|
|
15
14
|
module.exports = (reporter) => {
|
|
16
15
|
moduleHelper(reporter)
|
|
@@ -127,9 +126,6 @@ module.exports = (reporter) => {
|
|
|
127
126
|
response.meta.reportName = 'report'
|
|
128
127
|
}
|
|
129
128
|
|
|
130
|
-
request.context.reportCounter = ++reportCounter
|
|
131
|
-
request.context.startTimestamp = new Date().getTime()
|
|
132
|
-
|
|
133
129
|
if (parentReq == null) {
|
|
134
130
|
reporter.requestModulesCache.set(request.context.rootId, Object.create(null))
|
|
135
131
|
}
|
|
@@ -116,7 +116,7 @@ module.exports = (reporter) => {
|
|
|
116
116
|
|
|
117
117
|
const functionNames = getTopLevelFunctions(userCode)
|
|
118
118
|
const functionsCode = `return {${functionNames.map(h => `"${h}": ${h}`).join(',')}}`
|
|
119
|
-
const executionCode = `;(async () => { ${userCode}
|
|
119
|
+
const executionCode = `;(async () => { ${userCode} \n\n;${functionsCode} })()
|
|
120
120
|
.then((topLevelFunctions) => {
|
|
121
121
|
const mergedTopLevelFunctions = { ...topLevelFunctions, ...__topLevelFunctions }
|
|
122
122
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@jsreport/jsreport-core",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.5.0",
|
|
4
4
|
"description": "javascript based business reporting",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"report",
|
|
@@ -32,11 +32,11 @@
|
|
|
32
32
|
"@babel/code-frame": "7.12.13",
|
|
33
33
|
"@babel/parser": "7.14.4",
|
|
34
34
|
"@babel/traverse": "7.12.9",
|
|
35
|
-
"@jsreport/advanced-workers": "1.2.
|
|
35
|
+
"@jsreport/advanced-workers": "1.2.1",
|
|
36
36
|
"@jsreport/mingo": "2.4.1",
|
|
37
37
|
"ajv": "6.12.6",
|
|
38
38
|
"app-root-path": "2.0.1",
|
|
39
|
-
"
|
|
39
|
+
"bytes": "3.1.2",
|
|
40
40
|
"camelcase": "5.0.0",
|
|
41
41
|
"debug": "4.3.2",
|
|
42
42
|
"decamelize": "2.0.0",
|
|
@@ -46,7 +46,7 @@
|
|
|
46
46
|
"enhanced-resolve": "5.8.3",
|
|
47
47
|
"has-own-deep": "1.1.0",
|
|
48
48
|
"isbinaryfile": "4.0.0",
|
|
49
|
-
"listener-collection": "
|
|
49
|
+
"listener-collection": "2.0.0",
|
|
50
50
|
"lodash.get": "4.4.2",
|
|
51
51
|
"lodash.groupby": "4.6.0",
|
|
52
52
|
"lodash.omit": "4.5.0",
|
|
@@ -54,7 +54,7 @@
|
|
|
54
54
|
"lru-cache": "4.1.1",
|
|
55
55
|
"ms": "2.1.3",
|
|
56
56
|
"nanoid": "3.2.0",
|
|
57
|
-
"nconf": "0.
|
|
57
|
+
"nconf": "0.12.0",
|
|
58
58
|
"node.extend.without.arrays": "1.1.6",
|
|
59
59
|
"reap2": "1.0.1",
|
|
60
60
|
"semver": "7.3.5",
|
|
@@ -68,7 +68,7 @@
|
|
|
68
68
|
"winston-transport": "4.4.0"
|
|
69
69
|
},
|
|
70
70
|
"devDependencies": {
|
|
71
|
-
"mocha": "
|
|
71
|
+
"mocha": "9.2.2",
|
|
72
72
|
"should": "13.2.3",
|
|
73
73
|
"standard": "16.0.4",
|
|
74
74
|
"std-mocks": "1.0.1",
|
package/lib/main/monitoring.js
DELETED
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
const os = require('os')
|
|
2
|
-
|
|
3
|
-
function cpu () {
|
|
4
|
-
// Create function to get CPU information
|
|
5
|
-
function cpuAverage () {
|
|
6
|
-
// Initialise sum of idle and time of cores and fetch CPU info
|
|
7
|
-
let totalIdle = 0; let totalTick = 0
|
|
8
|
-
const cpus = os.cpus()
|
|
9
|
-
|
|
10
|
-
// Loop through CPU cores
|
|
11
|
-
for (let i = 0, len = cpus.length; i < len; i++) {
|
|
12
|
-
// Select CPU core
|
|
13
|
-
const cpu = cpus[i]
|
|
14
|
-
|
|
15
|
-
// Total up the time in the cores tick
|
|
16
|
-
for (const type in cpu.times) {
|
|
17
|
-
totalTick += cpu.times[type]
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
// Total up the idle time of the core
|
|
21
|
-
totalIdle += cpu.times.idle
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
// Return the average Idle and Tick times
|
|
25
|
-
return { idle: totalIdle / cpus.length, total: totalTick / cpus.length }
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// Grab first CPU Measure
|
|
29
|
-
const startMeasure = cpuAverage()
|
|
30
|
-
|
|
31
|
-
return new Promise((resolve) => {
|
|
32
|
-
// Set delay for second Measure
|
|
33
|
-
setTimeout(function () {
|
|
34
|
-
// Grab second Measure
|
|
35
|
-
const endMeasure = cpuAverage()
|
|
36
|
-
|
|
37
|
-
// Calculate the difference in idle and total time between the measures
|
|
38
|
-
const idleDifference = endMeasure.idle - startMeasure.idle
|
|
39
|
-
const totalDifference = endMeasure.total - startMeasure.total
|
|
40
|
-
|
|
41
|
-
// Calculate the average percentage CPU usage
|
|
42
|
-
const percentageCPU = 100 - ~~(100 * idleDifference / totalDifference)
|
|
43
|
-
|
|
44
|
-
// Output result to console
|
|
45
|
-
resolve(percentageCPU)
|
|
46
|
-
}, 1000)
|
|
47
|
-
})
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
class Monitoring {
|
|
51
|
-
constructor (reporter) {
|
|
52
|
-
this.reporter = reporter
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
async execute () {
|
|
56
|
-
const monitoring = {
|
|
57
|
-
cpu: await cpu(),
|
|
58
|
-
freemem: Math.round(os.freemem() / 1024 / 1024),
|
|
59
|
-
timestamp: new Date(),
|
|
60
|
-
hostname: os.hostname()
|
|
61
|
-
}
|
|
62
|
-
return this.reporter.documentStore.collection('monitoring').insert(monitoring)
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
init () {
|
|
66
|
-
this._interval = setInterval(() => {
|
|
67
|
-
this.execute().catch((e) => this.reporter.logger.warn('unable to persist monitoring info, but no need to worry, we will retry, details:' + e.stack))
|
|
68
|
-
}, 60000)
|
|
69
|
-
this._interval.unref()
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
close () {
|
|
73
|
-
clearInterval(this._interval)
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
module.exports = (reporter) => {
|
|
78
|
-
reporter.documentStore.registerEntityType('MonitoringType', {
|
|
79
|
-
cpu: { type: 'Edm.Int32' },
|
|
80
|
-
freemem: { type: 'Edm.Int32' },
|
|
81
|
-
timestamp: { type: 'Edm.DateTimeOffset', schema: { type: 'null' } },
|
|
82
|
-
hostname: { type: 'Edm.String' }
|
|
83
|
-
})
|
|
84
|
-
|
|
85
|
-
reporter.documentStore.registerEntitySet('monitoring', {
|
|
86
|
-
entityType: 'jsreport.MonitoringType',
|
|
87
|
-
exportable: false,
|
|
88
|
-
shared: true
|
|
89
|
-
})
|
|
90
|
-
|
|
91
|
-
reporter.monitoring = new Monitoring(reporter)
|
|
92
|
-
}
|