@jsreport/jsreport-core 4.5.0 → 4.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -0
- package/index.js +1 -0
- package/lib/main/folders/cascadeFolderRemove.js +22 -12
- package/lib/main/folders/getEntitiesInFolder.js +4 -0
- package/lib/main/folders/index.js +7 -6
- package/lib/main/folders/validateDuplicatedName.js +25 -7
- package/lib/main/optionsSchema.js +5 -0
- package/lib/main/profiler.js +52 -2
- package/lib/main/reporter.js +11 -3
- package/lib/main/store/documentStore.js +6 -1
- package/lib/shared/reporter.js +2 -2
- package/lib/shared/request.js +4 -0
- package/lib/shared/runningRequests.js +30 -0
- package/lib/worker/render/engineStream.js +2 -2
- package/lib/worker/render/render.js +2 -2
- package/package.json +2 -2
- package/lib/shared/reqStorage.js +0 -20
package/README.md
CHANGED
|
@@ -282,6 +282,19 @@ jsreport.documentStore.collection('templates')
|
|
|
282
282
|
|
|
283
283
|
## Changelog
|
|
284
284
|
|
|
285
|
+
### 4.6.1
|
|
286
|
+
|
|
287
|
+
- clear profiles canceling check interval during reporter close
|
|
288
|
+
|
|
289
|
+
### 4.6.0
|
|
290
|
+
|
|
291
|
+
- update nanoid to fix security issue
|
|
292
|
+
- optimize fs store operations for big workspaces
|
|
293
|
+
- reimplement and optimize fs transactions
|
|
294
|
+
- fix async reports with mongo store
|
|
295
|
+
- create store indexes during schema creation fix
|
|
296
|
+
- implement canceling requests from profiler
|
|
297
|
+
|
|
285
298
|
### 4.5.0
|
|
286
299
|
|
|
287
300
|
- fix blobStorage failing to save reports bigger than 1gb
|
package/index.js
CHANGED
|
@@ -21,6 +21,7 @@ module.exports.Request = Request
|
|
|
21
21
|
module.exports.createListenerCollection = createListenerCollection
|
|
22
22
|
module.exports.loggerFormat = winston.format
|
|
23
23
|
module.exports.createDefaultLoggerFormat = createDefaultLoggerFormat
|
|
24
|
+
module.exports.createError = require('./lib/shared/createError')
|
|
24
25
|
|
|
25
26
|
module.exports.tests = {
|
|
26
27
|
documentStore: () => require('./test/store/common.js'),
|
|
@@ -1,25 +1,35 @@
|
|
|
1
1
|
module.exports = (reporter) => {
|
|
2
2
|
reporter.documentStore.collection('folders').beforeRemoveListeners.add('folders', async (q, req) => {
|
|
3
|
+
async function removeInCol (c, folder) {
|
|
4
|
+
const entities = await reporter.documentStore.collection(c).find({
|
|
5
|
+
folder: {
|
|
6
|
+
shortid: folder.shortid
|
|
7
|
+
}
|
|
8
|
+
}, req)
|
|
9
|
+
|
|
10
|
+
if (entities.length === 0) {
|
|
11
|
+
return
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
return reporter.documentStore.collection(c).remove({
|
|
15
|
+
_id: {
|
|
16
|
+
$in: entities.map(e => e._id)
|
|
17
|
+
}
|
|
18
|
+
}, req)
|
|
19
|
+
}
|
|
20
|
+
|
|
3
21
|
const foldersToRemove = await reporter.documentStore.collection('folders').find(q, req)
|
|
22
|
+
const promises = []
|
|
4
23
|
|
|
5
24
|
for (const folder of foldersToRemove) {
|
|
6
25
|
for (const c of Object.keys(reporter.documentStore.collections)) {
|
|
7
|
-
|
|
8
|
-
folder: {
|
|
9
|
-
shortid: folder.shortid
|
|
10
|
-
}
|
|
11
|
-
}, req)
|
|
12
|
-
|
|
13
|
-
if (entities.length === 0) {
|
|
26
|
+
if (!reporter.documentStore.model.entitySets[c].entityTypeDef.folder) {
|
|
14
27
|
continue
|
|
15
28
|
}
|
|
16
29
|
|
|
17
|
-
|
|
18
|
-
await reporter.documentStore.collection(c).remove({
|
|
19
|
-
_id: e._id
|
|
20
|
-
}, req)
|
|
21
|
-
}
|
|
30
|
+
promises.push(removeInCol(c, folder))
|
|
22
31
|
}
|
|
23
32
|
}
|
|
33
|
+
return Promise.all(promises)
|
|
24
34
|
})
|
|
25
35
|
}
|
|
@@ -8,6 +8,10 @@ module.exports = (reporter) => async function getEntitiesInFolder (folderShortId
|
|
|
8
8
|
const lookup = []
|
|
9
9
|
|
|
10
10
|
for (const [entitySetName] of Object.entries(reporter.documentStore.model.entitySets)) {
|
|
11
|
+
if (!reporter.documentStore.model.entitySets[entitySetName].entityTypeDef.folder) {
|
|
12
|
+
continue
|
|
13
|
+
}
|
|
14
|
+
|
|
11
15
|
lookup.push(reporter.documentStore.collection(entitySetName).find({
|
|
12
16
|
folder: {
|
|
13
17
|
shortid: folderShortId
|
|
@@ -15,18 +15,19 @@ module.exports = (reporter) => {
|
|
|
15
15
|
})
|
|
16
16
|
|
|
17
17
|
reporter.documentStore.registerComplexType('FolderRefType', {
|
|
18
|
-
shortid: { type: 'Edm.String', referenceTo: 'folders' }
|
|
18
|
+
shortid: { type: 'Edm.String', referenceTo: 'folders', index: true, length: 255 }
|
|
19
19
|
})
|
|
20
20
|
|
|
21
21
|
// before document store initialization, extend all entity types with folder information
|
|
22
22
|
reporter.documentStore.on('before-init', (documentStore) => {
|
|
23
23
|
Object.entries(documentStore.model.entitySets).forEach(([k, entitySet]) => {
|
|
24
24
|
const entityTypeName = entitySet.entityType.replace(documentStore.model.namespace + '.', '')
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
25
|
+
if (entitySet.exportable !== false) {
|
|
26
|
+
documentStore.model.entityTypes[entityTypeName].folder = {
|
|
27
|
+
type: 'jsreport.FolderRefType',
|
|
28
|
+
// folder reference can be null when entity is at the root level
|
|
29
|
+
schema: { type: 'null' }
|
|
30
|
+
}
|
|
30
31
|
}
|
|
31
32
|
})
|
|
32
33
|
})
|
|
@@ -1,18 +1,36 @@
|
|
|
1
1
|
const resolveEntityPath = require('../../shared/folders/resolveEntityPath')
|
|
2
2
|
|
|
3
3
|
async function findEntity (reporter, name, folder, req) {
|
|
4
|
+
async function findEntityInColAndFolder (c, folder) {
|
|
5
|
+
const entities = await reporter.documentStore.collection(c).findAdmin({
|
|
6
|
+
folder
|
|
7
|
+
}, {
|
|
8
|
+
name: 1
|
|
9
|
+
}, req)
|
|
10
|
+
|
|
11
|
+
return {
|
|
12
|
+
entities,
|
|
13
|
+
entitySet: c
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const promises = []
|
|
4
18
|
for (const c of Object.keys(reporter.documentStore.collections)) {
|
|
5
19
|
if (!reporter.documentStore.model.entitySets[c].entityTypeDef.name) {
|
|
6
20
|
continue
|
|
7
21
|
}
|
|
8
22
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
|
|
23
|
+
if (folder != null && !reporter.documentStore.model.entitySets[c].entityTypeDef.folder) {
|
|
24
|
+
continue
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
promises.push(findEntityInColAndFolder(c, folder))
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const results = await Promise.all(promises)
|
|
14
31
|
|
|
15
|
-
|
|
32
|
+
for (const { entities, entitySet } of results) {
|
|
33
|
+
const existingEntity = entities.find((entity) => {
|
|
16
34
|
if (entity.name) {
|
|
17
35
|
// doing the check for case insensitive string (foo === FOO)
|
|
18
36
|
return entity.name.toLowerCase() === name.toLowerCase()
|
|
@@ -22,7 +40,7 @@ async function findEntity (reporter, name, folder, req) {
|
|
|
22
40
|
})
|
|
23
41
|
|
|
24
42
|
if (existingEntity) {
|
|
25
|
-
return { entity: existingEntity, entitySet
|
|
43
|
+
return { entity: existingEntity, entitySet }
|
|
26
44
|
}
|
|
27
45
|
}
|
|
28
46
|
}
|
|
@@ -197,6 +197,11 @@ module.exports.getRootSchemaOptions = () => ({
|
|
|
197
197
|
type: ['string', 'number'],
|
|
198
198
|
'$jsreport-acceptsSize': true,
|
|
199
199
|
default: '10mb'
|
|
200
|
+
},
|
|
201
|
+
cancelingCheckInterval: {
|
|
202
|
+
type: ['string', 'number'],
|
|
203
|
+
'$jsreport-acceptsDuration': true,
|
|
204
|
+
default: '5s'
|
|
200
205
|
}
|
|
201
206
|
}
|
|
202
207
|
}
|
package/lib/main/profiler.js
CHANGED
|
@@ -12,7 +12,7 @@ module.exports = (reporter) => {
|
|
|
12
12
|
templateShortid: { type: 'Edm.String', referenceTo: 'templates' },
|
|
13
13
|
timestamp: { type: 'Edm.DateTimeOffset', schema: { type: 'null' } },
|
|
14
14
|
finishedOn: { type: 'Edm.DateTimeOffset', schema: { type: 'null' } },
|
|
15
|
-
state: { type: 'Edm.String' },
|
|
15
|
+
state: { type: 'Edm.String', schema: { enum: ['running', 'success', 'queued', 'error', 'canceling'] }, index: true, length: 255 },
|
|
16
16
|
error: { type: 'Edm.String' },
|
|
17
17
|
mode: { type: 'Edm.String', schema: { enum: ['full', 'standard', 'disabled'] } },
|
|
18
18
|
blobName: { type: 'Edm.String' },
|
|
@@ -377,6 +377,7 @@ module.exports = (reporter) => {
|
|
|
377
377
|
|
|
378
378
|
let profilesCleanupInterval
|
|
379
379
|
let fullModeDurationCheckInterval
|
|
380
|
+
let profilesCancelingCheckInterval
|
|
380
381
|
|
|
381
382
|
reporter.initializeListeners.add('profiler', async () => {
|
|
382
383
|
reporter.documentStore.collection('profiles').beforeRemoveListeners.add('profiles', async (query, req) => {
|
|
@@ -398,12 +399,40 @@ module.exports = (reporter) => {
|
|
|
398
399
|
return reporter._profilesFullModeDurationCheck()
|
|
399
400
|
}
|
|
400
401
|
|
|
402
|
+
let _profilesCancelingCheckExecRunning = false
|
|
403
|
+
async function profilesCancelingCheckExec () {
|
|
404
|
+
if (_profilesCancelingCheckExecRunning) {
|
|
405
|
+
return
|
|
406
|
+
}
|
|
407
|
+
_profilesCancelingCheckExecRunning = true
|
|
408
|
+
|
|
409
|
+
try {
|
|
410
|
+
const cancelingProfiles = await reporter.documentStore.collection('profiles').find({
|
|
411
|
+
state: 'canceling'
|
|
412
|
+
})
|
|
413
|
+
|
|
414
|
+
for (const profile of cancelingProfiles) {
|
|
415
|
+
const runningReq = [...reporter.runningRequests.map.values()].find(v => v.req.context.profiling?.entity?._id === profile._id)
|
|
416
|
+
if (runningReq) {
|
|
417
|
+
runningReq.options.abortEmitter.emit('abort')
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
} catch (e) {
|
|
421
|
+
reporter.logger.warn('Failed to process cancelling profiles. No worry, it will retry next time.', e)
|
|
422
|
+
} finally {
|
|
423
|
+
_profilesCancelingCheckExecRunning = false
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
401
427
|
profilesCleanupInterval = setInterval(profilesCleanupExec, reporter.options.profiler.cleanupInterval)
|
|
402
428
|
profilesCleanupInterval.unref()
|
|
403
429
|
|
|
404
430
|
fullModeDurationCheckInterval = setInterval(fullModeDurationCheckExec, reporter.options.profiler.fullModeDurationCheckInterval)
|
|
405
431
|
fullModeDurationCheckInterval.unref()
|
|
406
432
|
|
|
433
|
+
profilesCancelingCheckInterval = setInterval(profilesCancelingCheckExec, reporter.options.profiler.cancelingCheckInterval)
|
|
434
|
+
profilesCancelingCheckInterval.unref()
|
|
435
|
+
|
|
407
436
|
await reporter._profilesCleanup()
|
|
408
437
|
})
|
|
409
438
|
|
|
@@ -416,6 +445,27 @@ module.exports = (reporter) => {
|
|
|
416
445
|
clearInterval(fullModeDurationCheckInterval)
|
|
417
446
|
}
|
|
418
447
|
|
|
448
|
+
if (profilesCancelingCheckInterval) {
|
|
449
|
+
clearInterval(profilesCancelingCheckInterval)
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
try {
|
|
453
|
+
const runningRequests = [...reporter.runningRequests.map.values()]
|
|
454
|
+
await reporter.documentStore.collection('profiles').update({
|
|
455
|
+
_id: {
|
|
456
|
+
$in: runningRequests.map(r => r.req.context.profiling?.entity?._id)
|
|
457
|
+
}
|
|
458
|
+
}, {
|
|
459
|
+
$set: {
|
|
460
|
+
state: 'error',
|
|
461
|
+
finishedOn: new Date(),
|
|
462
|
+
error: 'The server unexpectedly stopped during the report rendering.'
|
|
463
|
+
}
|
|
464
|
+
})
|
|
465
|
+
} catch (e) {
|
|
466
|
+
reporter.logger.warn('Failed to set error state to the running requests when closing.', e)
|
|
467
|
+
}
|
|
468
|
+
|
|
419
469
|
for (const key of profilerOperationsChainsMap.keys()) {
|
|
420
470
|
const profileAppendPromise = profilerOperationsChainsMap.get(key)
|
|
421
471
|
if (profileAppendPromise) {
|
|
@@ -460,7 +510,7 @@ module.exports = (reporter) => {
|
|
|
460
510
|
}
|
|
461
511
|
|
|
462
512
|
const notFinishedProfiles = await reporter.documentStore.collection('profiles')
|
|
463
|
-
.find({ $or: [{ state: 'running' }, { state: 'queued' }] }, { _id: 1, timeout: 1, timestamp: 1 })
|
|
513
|
+
.find({ $or: [{ state: 'running' }, { state: 'queued' }, { state: 'canceling' }] }, { _id: 1, timeout: 1, timestamp: 1 })
|
|
464
514
|
.toArray()
|
|
465
515
|
|
|
466
516
|
for (const profile of notFinishedProfiles) {
|
package/lib/main/reporter.js
CHANGED
|
@@ -29,6 +29,7 @@ const Request = require('./request')
|
|
|
29
29
|
const Response = require('../shared/response')
|
|
30
30
|
const Profiler = require('./profiler')
|
|
31
31
|
const semver = require('semver')
|
|
32
|
+
const EventEmitter = require('events')
|
|
32
33
|
let reportCounter = 0
|
|
33
34
|
|
|
34
35
|
class MainReporter extends Reporter {
|
|
@@ -367,7 +368,10 @@ class MainReporter extends Reporter {
|
|
|
367
368
|
throw new Error('Not initialized, you need to call jsreport.init().then before rendering')
|
|
368
369
|
}
|
|
369
370
|
|
|
370
|
-
|
|
371
|
+
options.abortEmitter = options.abortEmitter || new EventEmitter()
|
|
372
|
+
|
|
373
|
+
req = Request(req)
|
|
374
|
+
|
|
371
375
|
req.context = Object.assign({}, req.context)
|
|
372
376
|
req.context.rootId = req.context.rootId || this.generateRequestId()
|
|
373
377
|
req.context.id = req.context.rootId
|
|
@@ -375,6 +379,8 @@ class MainReporter extends Reporter {
|
|
|
375
379
|
req.context.startTimestamp = new Date().getTime()
|
|
376
380
|
req.options = Object.assign({}, req.options)
|
|
377
381
|
|
|
382
|
+
this.runningRequests.register(req, options)
|
|
383
|
+
|
|
378
384
|
let worker
|
|
379
385
|
let workerAborted
|
|
380
386
|
let dontCloseProcessing
|
|
@@ -415,10 +421,9 @@ class MainReporter extends Reporter {
|
|
|
415
421
|
}, {
|
|
416
422
|
timeout: this.getReportTimeout(req)
|
|
417
423
|
})
|
|
418
|
-
req = result
|
|
424
|
+
req = Request(result)
|
|
419
425
|
}
|
|
420
426
|
|
|
421
|
-
req = Request(req)
|
|
422
427
|
options.onReqReady?.(req)
|
|
423
428
|
|
|
424
429
|
// TODO: we will probably validate in the thread
|
|
@@ -491,6 +496,9 @@ class MainReporter extends Reporter {
|
|
|
491
496
|
this._cleanProfileInRequest(req)
|
|
492
497
|
throw err
|
|
493
498
|
} finally {
|
|
499
|
+
options.abortEmitter.removeAllListeners('abort')
|
|
500
|
+
|
|
501
|
+
this.runningRequests.unregister(req, options)
|
|
494
502
|
if (worker && !workerAborted && !dontCloseProcessing) {
|
|
495
503
|
await worker.release(req)
|
|
496
504
|
}
|
|
@@ -75,6 +75,11 @@ const DocumentStore = (options, validator, encryption) => {
|
|
|
75
75
|
es.entityTypeDef = this.model.entityTypes[es.normalizedEntityTypeName]
|
|
76
76
|
const entityType = es.entityTypeDef
|
|
77
77
|
|
|
78
|
+
if (entityType.name) {
|
|
79
|
+
entityType.name.index = true
|
|
80
|
+
entityType.name.length = 1024
|
|
81
|
+
}
|
|
82
|
+
|
|
78
83
|
if (!entityType._id) {
|
|
79
84
|
entityType._id = { type: 'Edm.String' }
|
|
80
85
|
|
|
@@ -94,7 +99,7 @@ const DocumentStore = (options, validator, encryption) => {
|
|
|
94
99
|
}
|
|
95
100
|
|
|
96
101
|
if (!entityType.shortid) {
|
|
97
|
-
entityType.shortid = { type: 'Edm.String' }
|
|
102
|
+
entityType.shortid = { type: 'Edm.String', index: true, length: 255 }
|
|
98
103
|
}
|
|
99
104
|
|
|
100
105
|
const referenceProperties = findReferencePropertiesInType(this.model, entityType)
|
package/lib/shared/reporter.js
CHANGED
|
@@ -9,7 +9,7 @@ const tempFilesHandler = require('./tempFilesHandler')
|
|
|
9
9
|
const encryption = require('./encryption')
|
|
10
10
|
const generateRequestId = require('./generateRequestId')
|
|
11
11
|
const adminRequest = require('./adminRequest')
|
|
12
|
-
const
|
|
12
|
+
const RunningRequests = require('./runningRequests')
|
|
13
13
|
|
|
14
14
|
class Reporter extends EventEmitter {
|
|
15
15
|
constructor (options) {
|
|
@@ -19,7 +19,7 @@ class Reporter extends EventEmitter {
|
|
|
19
19
|
this.Request = Request
|
|
20
20
|
this.Response = (...args) => Response(this, ...args)
|
|
21
21
|
this.adminRequest = adminRequest
|
|
22
|
-
this.
|
|
22
|
+
this.runningRequests = RunningRequests(this)
|
|
23
23
|
|
|
24
24
|
// since `reporter` instance will be used for other extensions,
|
|
25
25
|
// it will quickly reach the limit of `10` listeners,
|
package/lib/shared/request.js
CHANGED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
// holds running requests and provides additionally keyValueStore
|
|
2
|
+
module.exports = (reporter) => {
|
|
3
|
+
const runningReqMap = new Map()
|
|
4
|
+
|
|
5
|
+
return {
|
|
6
|
+
keyValueStore: {
|
|
7
|
+
get: (key, req) => {
|
|
8
|
+
const keyValueMap = runningReqMap.get(req.context.rootId).keyValueMap
|
|
9
|
+
return keyValueMap.get(key)
|
|
10
|
+
},
|
|
11
|
+
set: (key, val, req) => {
|
|
12
|
+
const keyValueMap = runningReqMap.get(req.context.rootId).keyValueMap
|
|
13
|
+
keyValueMap.set(key, val)
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
|
|
17
|
+
map: runningReqMap,
|
|
18
|
+
|
|
19
|
+
register: (req, options) => {
|
|
20
|
+
runningReqMap.set(req.context.rootId, {
|
|
21
|
+
keyValueMap: new Map(),
|
|
22
|
+
req,
|
|
23
|
+
options
|
|
24
|
+
})
|
|
25
|
+
},
|
|
26
|
+
unregister: (req) => {
|
|
27
|
+
runningReqMap.delete(req.context.rootId)
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -58,14 +58,14 @@ module.exports = (reporter) => {
|
|
|
58
58
|
if (proxy.templatingEngines) {
|
|
59
59
|
proxy.templatingEngines.createStream = async (opts = {}) => {
|
|
60
60
|
// limiting the number of temp files to avoid breaking server, otherwise I see no reason why having more than 1000 calls per req should be valid usecase
|
|
61
|
-
const counter = reporter.
|
|
61
|
+
const counter = reporter.runningRequests.keyValueStore.get('engine-stream-counter', req) || 0
|
|
62
62
|
if (counter > 1000) {
|
|
63
63
|
throw reporter.createError('Reached maximum limit of templatingEngine.createStream calls', {
|
|
64
64
|
weak: true,
|
|
65
65
|
statusCode: 400
|
|
66
66
|
})
|
|
67
67
|
}
|
|
68
|
-
reporter.
|
|
68
|
+
reporter.runningRequests.keyValueStore.set('engine-stream-counter', counter + 1, req)
|
|
69
69
|
|
|
70
70
|
req.context.engineStreamEnabled = true
|
|
71
71
|
|
|
@@ -110,7 +110,7 @@ module.exports = (reporter) => {
|
|
|
110
110
|
request.context.id = reporter.generateRequestId()
|
|
111
111
|
}
|
|
112
112
|
if (parentReq == null) {
|
|
113
|
-
reporter.
|
|
113
|
+
reporter.runningRequests.register(request)
|
|
114
114
|
}
|
|
115
115
|
|
|
116
116
|
const response = Response(reporter, request.context.id)
|
|
@@ -195,7 +195,7 @@ module.exports = (reporter) => {
|
|
|
195
195
|
} finally {
|
|
196
196
|
if (parentReq == null) {
|
|
197
197
|
reporter.requestModulesCache.delete(request.context.rootId)
|
|
198
|
-
reporter.
|
|
198
|
+
reporter.runningRequests.unregister(request)
|
|
199
199
|
}
|
|
200
200
|
}
|
|
201
201
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@jsreport/jsreport-core",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.6.1",
|
|
4
4
|
"description": "javascript based business reporting",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"report",
|
|
@@ -63,7 +63,7 @@
|
|
|
63
63
|
"lodash.omit": "4.5.0",
|
|
64
64
|
"lru-cache": "4.1.1",
|
|
65
65
|
"ms": "2.1.3",
|
|
66
|
-
"nanoid": "3.
|
|
66
|
+
"nanoid": "3.3.8",
|
|
67
67
|
"nconf": "0.12.0",
|
|
68
68
|
"node.extend.without.arrays": "1.1.6",
|
|
69
69
|
"semver": "7.5.4",
|
package/lib/shared/reqStorage.js
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
module.exports = (reporter) => {
|
|
2
|
-
const runningReqMap = new Map()
|
|
3
|
-
|
|
4
|
-
return {
|
|
5
|
-
set: (key, val, req) => {
|
|
6
|
-
const keyValueMap = runningReqMap.get(req.context.rootId)
|
|
7
|
-
keyValueMap.set(key, val)
|
|
8
|
-
},
|
|
9
|
-
get: (key, req) => {
|
|
10
|
-
const keyValueMap = runningReqMap.get(req.context.rootId)
|
|
11
|
-
return keyValueMap.get(key)
|
|
12
|
-
},
|
|
13
|
-
registerReq: (req) => {
|
|
14
|
-
runningReqMap.set(req.context.rootId, new Map())
|
|
15
|
-
},
|
|
16
|
-
unregisterReq: (req) => {
|
|
17
|
-
runningReqMap.delete(req.context.rootId)
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
}
|