@jsreport/jsreport-core 4.0.0 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -282,8 +282,20 @@ jsreport.documentStore.collection('templates')
282
282
 
283
283
  ## Changelog
284
284
 
285
+ ### 4.1.0
286
+
287
+ - update deps to fix npm audit
288
+ - fix memory being held when timeouts are large
289
+ - fix node.js 21 compatibility when sandbox is used (`trustUserCode: false`
290
+ - support using different cache key per template engine if it supports it
291
+
292
+ ### 4.0.1
293
+
294
+ - fix parameter mutations passed to store methods producing unexpected changes in store
295
+
285
296
  ### 4.0.0
286
297
 
298
+ - minimum node.js version is now `18.15.0`
287
299
  - remove old migration options `migrateXlsxTemplatesToAssets`, `migrateResourcesToAssets`
288
300
  - sandbox now uses SES instead of vm2 for evaluating user code
289
301
  - internal changes to support multi admin users
@@ -29,7 +29,7 @@ module.exports = (reporter) => {
29
29
  const profilerRequestMap = new Map()
30
30
 
31
31
  function runInProfilerChain (fnOrOptions, req) {
32
- if (req.context.profiling.mode === 'disabled') {
32
+ if (req.context.profiling == null || req.context.profiling.mode === 'disabled') {
33
33
  return
34
34
  }
35
35
 
@@ -352,7 +352,7 @@ module.exports = (reporter) => {
352
352
  type: 'log',
353
353
  level: info.level,
354
354
  message: info.message,
355
- previousOperationId: req.context.profiling.lastOperationId
355
+ previousOperationId: req.context.profiling?.lastOperationId
356
356
  }, req)],
357
357
  log: false
358
358
  }, req)
@@ -266,7 +266,7 @@ class MainReporter extends Reporter {
266
266
 
267
267
  this._workersManager = this._workersManagerFactory
268
268
  ? this._workersManagerFactory(workersManagerOptions, workersManagerSystemOptions)
269
- : WorkersManager(workersManagerOptions, workersManagerSystemOptions, this.logger)
269
+ : WorkersManager(workersManagerOptions, workersManagerSystemOptions)
270
270
 
271
271
  const workersStart = new Date().getTime()
272
272
 
@@ -1,5 +1,5 @@
1
1
  const extend = require('node.extend.without.arrays')
2
- const set = require('lodash.set')
2
+ const set = require('set-value')
3
3
  const hasOwn = require('has-own-deep')
4
4
  const unsetValue = require('unset-value')
5
5
  const ms = require('ms')
@@ -1,99 +1,104 @@
1
- /*!
2
- * Copyright(c) 2018 Jan Blaha
3
- *
4
- * DocumentStore data layer provider using just memory.
5
- */
6
-
7
- const extend = require('node.extend.without.arrays')
8
- const { nanoid } = require('nanoid')
9
- const omit = require('lodash.omit')
10
- const mingo = require('@jsreport/mingo')
11
- const Transaction = require('./transaction')
12
- const Queue = require('./queue')
13
-
14
- module.exports = () => {
15
- return {
16
- load (model) {
17
- this.model = model
18
- this.transaction = Transaction({ queue: Queue() })
19
-
20
- return this.transaction.operation(async (documents) => {
21
- Object.keys(model.entitySets).forEach((e) => (documents[e] = []))
22
- })
23
- },
24
-
25
- beginTransaction () {
26
- return this.transaction.begin()
27
- },
28
-
29
- async commitTransaction (tran) {
30
- await this.transaction.commit(tran)
31
- },
32
-
33
- async rollbackTransaction (tran) {
34
- return this.transaction.rollback(tran)
35
- },
36
-
37
- find (entitySet, query, fields, opts = {}) {
38
- const documents = this.transaction.getCurrentDocuments(opts)
39
- const cursor = mingo.find(documents[entitySet], query, fields)
40
-
41
- // the queue is not used here because reads are supposed to not block
42
- cursor.toArray = () => cursor.all().map((e) => extend(true, {}, omit(e, '$$etag')))
43
-
44
- return cursor
45
- },
46
-
47
- insert (entitySet, doc, opts = {}) {
48
- return this.transaction.operation(opts, async (documents) => {
49
- doc._id = doc._id || nanoid(16)
50
- const newDoc = extend(true, {}, doc)
51
- newDoc.$$etag = Date.now()
52
- documents[entitySet].push(newDoc)
53
- return doc
54
- })
55
- },
56
-
57
- async update (entitySet, q, u, opts = {}) {
58
- let count
59
-
60
- const res = await this.transaction.operation(opts, async (documents) => {
61
- const toUpdate = mingo.find(documents[entitySet], q).all()
62
-
63
- count = toUpdate.length
64
-
65
- // need to get of queue first before calling insert, otherwise we get a deathlock
66
- if (toUpdate.length === 0 && opts.upsert) {
67
- return 'insert'
68
- }
69
-
70
- for (const doc of toUpdate) {
71
- Object.assign(doc, u.$set || {})
72
- doc.$$etag = Date.now()
73
- }
74
- })
75
-
76
- if (res === 'insert') {
77
- await this.insert(entitySet, u.$set, opts)
78
- return 1
79
- }
80
-
81
- return count
82
- },
83
-
84
- remove (entitySet, q, opts = {}) {
85
- return this.transaction.operation(opts, async (documents) => {
86
- const toRemove = mingo.find(documents[entitySet], q).all()
87
- documents[entitySet] = documents[entitySet].filter(d => !toRemove.includes(d))
88
- })
89
- },
90
-
91
- drop (opts = {}) {
92
- return this.transaction.operation(opts, async (documents) => {
93
- for (const [entitySetName] of Object.entries(documents)) {
94
- documents[entitySetName] = []
95
- }
96
- })
97
- }
98
- }
99
- }
1
+ /*!
2
+ * Copyright(c) 2018 Jan Blaha
3
+ *
4
+ * DocumentStore data layer provider using just memory.
5
+ */
6
+
7
+ const extend = require('node.extend.without.arrays')
8
+ const { nanoid } = require('nanoid')
9
+ const omit = require('lodash.omit')
10
+ const mingo = require('@jsreport/mingo')
11
+ const Transaction = require('./transaction')
12
+ const Queue = require('./queue')
13
+
14
+ module.exports = () => {
15
+ return {
16
+ load (model) {
17
+ this.model = model
18
+ this.transaction = Transaction({ queue: Queue() })
19
+
20
+ return this.transaction.operation(async (documents) => {
21
+ Object.keys(model.entitySets).forEach((e) => (documents[e] = []))
22
+ })
23
+ },
24
+
25
+ beginTransaction () {
26
+ return this.transaction.begin()
27
+ },
28
+
29
+ async commitTransaction (tran) {
30
+ await this.transaction.commit(tran)
31
+ },
32
+
33
+ async rollbackTransaction (tran) {
34
+ return this.transaction.rollback(tran)
35
+ },
36
+
37
+ find (entitySet, query, fields, opts = {}) {
38
+ const documents = this.transaction.getCurrentDocuments(opts)
39
+ const cursor = mingo.find(documents[entitySet], query, fields)
40
+
41
+ // the queue is not used here because reads are supposed to not block
42
+ cursor.toArray = () => cursor.all().map((e) => extend(true, {}, omit(e, '$$etag')))
43
+
44
+ return cursor
45
+ },
46
+
47
+ insert (entitySet, doc, opts = {}) {
48
+ doc._id = doc._id || nanoid(16)
49
+ const clonnedDoc = extend(true, {}, doc)
50
+ clonnedDoc.$$etag = Date.now()
51
+
52
+ return this.transaction.operation(opts, async (documents) => {
53
+ documents[entitySet].push(clonnedDoc)
54
+ return doc
55
+ })
56
+ },
57
+
58
+ async update (entitySet, q, u, opts = {}) {
59
+ let count
60
+ const qClone = extend(true, {}, q)
61
+ const setClone = extend(true, {}, u.$set)
62
+
63
+ const res = await this.transaction.operation(opts, async (documents) => {
64
+ const toUpdate = mingo.find(documents[entitySet], qClone).all()
65
+
66
+ count = toUpdate.length
67
+
68
+ // need to get of queue first before calling insert, otherwise we get a deathlock
69
+ if (toUpdate.length === 0 && opts.upsert) {
70
+ return 'insert'
71
+ }
72
+
73
+ for (const doc of toUpdate) {
74
+ Object.assign(doc, setClone)
75
+ doc.$$etag = Date.now()
76
+ }
77
+ })
78
+
79
+ if (res === 'insert') {
80
+ await this.insert(entitySet, setClone, opts)
81
+ return 1
82
+ }
83
+
84
+ return count
85
+ },
86
+
87
+ remove (entitySet, q, opts = {}) {
88
+ const qClone = extend(true, {}, q)
89
+
90
+ return this.transaction.operation(opts, async (documents) => {
91
+ const toRemove = mingo.find(documents[entitySet], qClone).all()
92
+ documents[entitySet] = documents[entitySet].filter(d => !toRemove.includes(d))
93
+ })
94
+ },
95
+
96
+ drop (opts = {}) {
97
+ return this.transaction.operation(opts, async (documents) => {
98
+ for (const [entitySetName] of Object.entries(documents)) {
99
+ documents[entitySetName] = []
100
+ }
101
+ })
102
+ }
103
+ }
104
+ }
@@ -192,7 +192,9 @@ module.exports = (reporter) => {
192
192
  executionAsyncResultsMap.set(executionId, asyncResultMap)
193
193
  executionFnParsedParamsMap.get(req.context.id).get(executionFnParsedParamsKey).resolve({ require, console, topLevelFunctions, context })
194
194
 
195
- const key = `template:${content}:${engine.name}`
195
+ const key = engine.buildTemplateCacheKey
196
+ ? engine.buildTemplateCacheKey({ content }, req)
197
+ : `template:${content}:${engine.name}`
196
198
 
197
199
  if (!templatesCache.has(key)) {
198
200
  try {
@@ -3,7 +3,6 @@
3
3
  *
4
4
  * Orchestration of the rendering process
5
5
  */
6
- const { Readable } = require('stream')
7
6
  const extend = require('node.extend.without.arrays')
8
7
  const ExecuteEngine = require('./executeEngine')
9
8
  const Request = require('../../shared/request')
@@ -101,10 +100,6 @@ module.exports = (reporter) => {
101
100
 
102
101
  async function afterRender (reporter, request, response) {
103
102
  await reporter.afterRenderListeners.fire(request, response)
104
-
105
- response.stream = Readable.from(response.content)
106
- response.result = response.stream
107
-
108
103
  return response
109
104
  }
110
105
 
@@ -1,7 +1,7 @@
1
1
  const extend = require('node.extend.without.arrays')
2
2
  const groupBy = require('lodash.groupby')
3
3
  const get = require('lodash.get')
4
- const set = require('lodash.set')
4
+ const set = require('set-value')
5
5
  const hasOwn = require('has-own-deep')
6
6
  const unsetValue = require('unset-value')
7
7
 
@@ -420,7 +420,7 @@ function sortPropertiesByLevel (a, b) {
420
420
  }
421
421
 
422
422
  function omitProp (context, prop) {
423
- // if property has value, then set it to undefined first,
423
+ // if property has value, then set it to some value first,
424
424
  // unsetValue expects that property has some non empty value to remove the property
425
425
  // so we set to "true" to ensure it works for all cases,
426
426
  // we use unsetValue instead of lodash.omit because
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@jsreport/jsreport-core",
3
- "version": "4.0.0",
3
+ "version": "4.1.0",
4
4
  "description": "javascript based business reporting",
5
5
  "keywords": [
6
6
  "report",
@@ -36,15 +36,15 @@
36
36
  "test:watch": "mocha --watch --recursive test"
37
37
  },
38
38
  "dependencies": {
39
- "@babel/code-frame": "7.12.13",
40
- "@babel/parser": "7.14.4",
41
- "@babel/traverse": "7.12.9",
39
+ "@babel/code-frame": "7.23.5",
40
+ "@babel/parser": "7.23.5",
41
+ "@babel/traverse": "7.23.5",
42
42
  "@colors/colors": "1.5.0",
43
- "@jsreport/advanced-workers": "2.0.0",
43
+ "@jsreport/advanced-workers": "2.0.1",
44
44
  "@jsreport/mingo": "2.4.1",
45
45
  "@jsreport/reap": "0.1.0",
46
46
  "@jsreport/serializator": "1.0.0",
47
- "@jsreport/ses": "1.0.0",
47
+ "@jsreport/ses": "1.0.1",
48
48
  "ajv": "6.12.6",
49
49
  "app-root-path": "3.0.0",
50
50
  "bytes": "3.1.2",
@@ -61,13 +61,13 @@
61
61
  "lodash.get": "4.4.2",
62
62
  "lodash.groupby": "4.6.0",
63
63
  "lodash.omit": "4.5.0",
64
- "lodash.set": "4.3.2",
65
64
  "lru-cache": "4.1.1",
66
65
  "ms": "2.1.3",
67
66
  "nanoid": "3.2.0",
68
67
  "nconf": "0.12.0",
69
68
  "node.extend.without.arrays": "1.1.6",
70
69
  "semver": "7.5.4",
70
+ "set-value": "4.1.0",
71
71
  "stack-trace": "0.0.10",
72
72
  "triple-beam": "1.3.0",
73
73
  "unset-value": "2.0.1",
@@ -141,6 +141,34 @@ function collectionTests (store, isInternal, runTransactions) {
141
141
  res[0].phantom.header.should.be.eql('original')
142
142
  })
143
143
 
144
+ it('update should use clones', async () => {
145
+ const colName = !isInternal ? 'templates' : 'internalTemplates'
146
+
147
+ await store().collection('folders').insert({
148
+ name: 'f1',
149
+ shortid: 'f1'
150
+ })
151
+
152
+ await getCollection(colName).insert({
153
+ name: 'test',
154
+ engine: 'none',
155
+ recipe: 'html',
156
+ content: 'original'
157
+ })
158
+
159
+ const set = {
160
+ folder: {
161
+ shortid: 'f1'
162
+ }
163
+ }
164
+
165
+ await getCollection(colName).update({ name: 'test' }, { $set: set })
166
+ set.folder.shortid = 'changing'
167
+
168
+ const res = await getCollection(colName).findOne({})
169
+ res.folder.shortid.should.be.eql('f1')
170
+ })
171
+
144
172
  it('skip and limit', async () => {
145
173
  const colName = !isInternal ? 'templates' : 'internalTemplates'
146
174
 
@@ -443,6 +471,33 @@ function collectionTests (store, isInternal, runTransactions) {
443
471
  should(found != null).be.True()
444
472
  })
445
473
 
474
+ it('insert with transaction should use clones', async () => {
475
+ const colName = !isInternal ? 'templates' : 'internalTemplates'
476
+ const req = Request({})
477
+
478
+ await store().beginTransaction(req)
479
+
480
+ try {
481
+ const t1 = {
482
+ name: 't1',
483
+ engine: 'none',
484
+ recipe: 'html'
485
+ }
486
+
487
+ const newT1 = await getCollection(colName).insert(t1, req)
488
+
489
+ newT1.name = 'fake-t1'
490
+
491
+ await store().commitTransaction(req)
492
+ } catch (e) {
493
+ await store().rollbackTransaction(req)
494
+ throw e
495
+ }
496
+
497
+ const found = await getCollection(colName).findOne({ name: 't1' })
498
+ should(found).not.be.null()
499
+ })
500
+
446
501
  it('should be able to rollback (insert)', async () => {
447
502
  const colName = !isInternal ? 'templates' : 'internalTemplates'
448
503
  const req = Request({})