@jsreport/jsreport-core 4.1.0 → 4.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,97 @@
1
+ const bytes = require('bytes')
2
+ const { Readable } = require('stream')
3
+ /*
4
+ This adds jsreport.templatingEngines.createStream to the helpers proxy allowing to write giant texts to output
5
+ which would otherwise hit nodejs max string size limit.
6
+
7
+ Example usage
8
+ ===================
9
+ async function myEach(items, options) {
10
+ const stream = await jsreport.templatingEngines.createStream()
11
+ for (let i = 0; i < items.length; i++) {
12
+ await stream.write(options.fn())
13
+ }
14
+ return await stream.toResult()
15
+ }
16
+ */
17
+
18
+ module.exports = (reporter) => {
19
+ reporter.afterTemplatingEnginesExecutedListeners.add('streamedEach', async (req, res) => {
20
+ if (req.context.engineStreamEnabled !== true) {
21
+ return
22
+ }
23
+
24
+ const content = (await res.output.getBuffer()).toString()
25
+
26
+ const matches = [...content.matchAll(/{#stream ([^{}]{0,500})}/g)]
27
+
28
+ async function * transform () {
29
+ if (matches.length) {
30
+ yield content.substring(0, matches[0].index)
31
+
32
+ for (let i = 0; i < matches.length; i++) {
33
+ const { stream } = reporter.readTempFileStream(matches[i][1])
34
+
35
+ for await (const content of stream) {
36
+ yield content
37
+ }
38
+
39
+ if (i < matches.length - 1) {
40
+ yield content.substring(matches[i].index + matches[i][0].length, matches[i + 1].index)
41
+ } else {
42
+ yield content.substring(matches[i].index + matches[i][0].length)
43
+ }
44
+ }
45
+ } else {
46
+ yield content
47
+ }
48
+ }
49
+
50
+ await res.output.update(Readable.from(transform()))
51
+ })
52
+
53
+ reporter.extendProxy((proxy, req, {
54
+ runInSandbox,
55
+ context,
56
+ getTopLevelFunctions
57
+ }) => {
58
+ if (proxy.templatingEngines) {
59
+ proxy.templatingEngines.createStream = async (opts = {}) => {
60
+ // limiting the number of temp files to avoid breaking server, otherwise I see no reason why having more than 1000 calls per req should be valid usecase
61
+ const counter = reporter.reqStorage.get('engine-stream-counter', req) || 0
62
+ if (counter > 1000) {
63
+ throw reporter.createError('Reached maximum limit of templatingEngine.createStream calls', {
64
+ weak: true,
65
+ statusCode: 400
66
+ })
67
+ }
68
+ reporter.reqStorage.set('engine-stream-counter', counter + 1, req)
69
+
70
+ req.context.engineStreamEnabled = true
71
+
72
+ const bufferSize = bytes(opts.bufferSize || '10mb')
73
+ let buf = ''
74
+
75
+ const { fileHandle, filename } = await reporter.openTempFile((uuid) => `${uuid}.stream`, 'a')
76
+ proxy.templatingEngines.addFinishListener(() => fileHandle.close().catch((e) => reporter.logger.error('Failed to close temp file handle', e, req)))
77
+
78
+ return {
79
+ write: async (text) => {
80
+ const realText = await proxy.templatingEngines.waitForAsyncHelper(text)
81
+
82
+ buf += realText
83
+
84
+ if (buf.length > bufferSize) {
85
+ await fileHandle.appendFile(buf)
86
+ buf = ''
87
+ }
88
+ },
89
+ toResult: async () => {
90
+ await fileHandle.appendFile(buf)
91
+ return `{#stream ${filename}}`
92
+ }
93
+ }
94
+ }
95
+ }
96
+ })
97
+ }
@@ -16,6 +16,7 @@ module.exports = (reporter) => {
16
16
 
17
17
  const executionFnParsedParamsMap = new Map()
18
18
  const executionAsyncResultsMap = new Map()
19
+ const executionFinishListenersMap = new Map()
19
20
 
20
21
  const templatingEnginesEvaluate = async (mainCall, { engine, content, helpers, data }, { entity, entitySet }, req) => {
21
22
  const engineImpl = reporter.extensionsManager.engines.find((e) => e.name === engine)
@@ -47,6 +48,7 @@ module.exports = (reporter) => {
47
48
  }
48
49
 
49
50
  executionAsyncResultsMap.delete(executionId)
51
+ executionFinishListenersMap.delete(executionId)
50
52
  }
51
53
  }
52
54
 
@@ -94,6 +96,11 @@ module.exports = (reporter) => {
94
96
  return Promise.all([...asyncResultMap.keys()].map((k) => asyncResultMap.get(k)))
95
97
  }
96
98
  },
99
+ addFinishListener: (fn) => {
100
+ if (executionFinishListenersMap.has(context.__executionId)) {
101
+ executionFinishListenersMap.get(context.__executionId).add('finish', fn)
102
+ }
103
+ },
97
104
  createAsyncHelperResult: (v) => {
98
105
  const asyncResultMap = executionAsyncResultsMap.get(context.__executionId)
99
106
  const asyncResultId = nanoid(7)
@@ -190,6 +197,7 @@ module.exports = (reporter) => {
190
197
  context.__executionId = executionId
191
198
 
192
199
  executionAsyncResultsMap.set(executionId, asyncResultMap)
200
+ executionFinishListenersMap.set(executionId, reporter.createListenerCollection())
193
201
  executionFnParsedParamsMap.get(req.context.id).get(executionFnParsedParamsKey).resolve({ require, console, topLevelFunctions, context })
194
202
 
195
203
  const key = engine.buildTemplateCacheKey
@@ -210,7 +218,7 @@ module.exports = (reporter) => {
210
218
 
211
219
  for (const h of Object.keys(topLevelFunctions)) {
212
220
  // extra wrapping for enhance the error with the helper name
213
- wrappedTopLevelFunctions[h] = wrapHelperForHelperNameWhenError(topLevelFunctions[h], h)
221
+ wrappedTopLevelFunctions[h] = wrapHelperForHelperNameWhenError(topLevelFunctions[h], h, () => executionFnParsedParamsMap.has(req.context.id))
214
222
 
215
223
  if (engine.getWrappingHelpersEnabled && engine.getWrappingHelpersEnabled(req) === false) {
216
224
  wrappedTopLevelFunctions[h] = engine.wrapHelper(wrappedTopLevelFunctions[h], { context })
@@ -223,19 +231,33 @@ module.exports = (reporter) => {
223
231
 
224
232
  const resolvedResultsMap = new Map()
225
233
 
226
- while (asyncResultMap.size > 0) {
227
- await Promise.all([...asyncResultMap.keys()].map(async (k) => {
228
- resolvedResultsMap.set(k, `${await asyncResultMap.get(k)}`)
229
- asyncResultMap.delete(k)
234
+ // we need to use the cloned map, becuase there can be a waitForAsyncHelper pending that needs the asyncResultMap values
235
+ const clonedMap = new Map(asyncResultMap)
236
+ while (clonedMap.size > 0) {
237
+ await Promise.all([...clonedMap.keys()].map(async (k) => {
238
+ resolvedResultsMap.set(k, `${await clonedMap.get(k)}`)
239
+ clonedMap.delete(k)
230
240
  }))
231
241
  }
242
+ asyncResultMap.clear()
232
243
 
233
244
  while (contentResult.includes('{#asyncHelperResult')) {
234
245
  contentResult = contentResult.replace(/{#asyncHelperResult ([^{}]+)}/g, (str, p1) => {
235
246
  const asyncResultId = p1
247
+ // this can happen if a child jsreport.templatingEngines.evaluate receives an async value from outer scope
248
+ // because every evaluate uses a unique map of async resuts
249
+ // example is the case when component receives as a value async thing
250
+ // instead of returning "undefined" we let the outer eval to do the replace
251
+ if (!resolvedResultsMap.has(asyncResultId)) {
252
+ // returning asyncUnresolvedHelperResult just to avoid endless loop, after replace we put it back to asyncHelperResult
253
+ return `{#asyncUnresolvedHelperResult ${asyncResultId}}`
254
+ }
236
255
  return `${resolvedResultsMap.get(asyncResultId)}`
237
256
  })
238
257
  }
258
+ contentResult = contentResult.replace(/asyncUnresolvedHelperResult/g, 'asyncHelperResult')
259
+
260
+ await executionFinishListenersMap.get(context.__executionId).fire()
239
261
 
240
262
  return {
241
263
  // handlebars escapes single brackets before execution to prevent errors on {#asset}
@@ -357,7 +379,7 @@ module.exports = (reporter) => {
357
379
  }
358
380
  }
359
381
 
360
- function wrapHelperForHelperNameWhenError (fn, helperName) {
382
+ function wrapHelperForHelperNameWhenError (fn, helperName, isMainEvalStillRunningFn) {
361
383
  return function (...args) {
362
384
  let fnResult
363
385
 
@@ -375,6 +397,10 @@ module.exports = (reporter) => {
375
397
  }
376
398
 
377
399
  return fnResult.catch((asyncError) => {
400
+ if (!isMainEvalStillRunningFn()) {
401
+ // main exec already finished on some error, we just ignore errors of the hanging async calls
402
+ return
403
+ }
378
404
  throw getEnhancedHelperError(asyncError)
379
405
  })
380
406
  }
@@ -72,7 +72,15 @@ class Profiler {
72
72
  }
73
73
 
74
74
  if (m.doDiffs !== false && req.context.profiling.mode === 'full' && (m.type === 'operationStart' || m.type === 'operationEnd')) {
75
- let content = res.content
75
+ let originalResContent = res.content
76
+
77
+ // if content is empty assume null to keep old logic working without major changes
78
+ // (here and in studio)
79
+ if (originalResContent != null && originalResContent.length === 0) {
80
+ originalResContent = null
81
+ }
82
+
83
+ let content = originalResContent
76
84
 
77
85
  if (content != null) {
78
86
  if (content.length > this.reporter.options.profiler.maxDiffSize) {
@@ -82,12 +90,12 @@ class Profiler {
82
90
  } else {
83
91
  if (isbinaryfile(content)) {
84
92
  content = {
85
- content: res.content.toString('base64'),
93
+ content: originalResContent.toString('base64'),
86
94
  encoding: 'base64'
87
95
  }
88
96
  } else {
89
97
  content = {
90
- content: createPatch('res', req.context.profiling.resLastVal ? req.context.profiling.resLastVal.toString() : '', res.content.toString(), 0),
98
+ content: createPatch('res', req.context.profiling.resLastVal ? req.context.profiling.resLastVal.toString() : '', originalResContent.toString(), 0),
91
99
  encoding: 'diff'
92
100
  }
93
101
  }
@@ -107,7 +115,7 @@ class Profiler {
107
115
  m.req.diff = createPatch('req', req.context.profiling.reqLastVal || '', stringifiedReq, 0)
108
116
  }
109
117
 
110
- req.context.profiling.resLastVal = (res.content == null || isbinaryfile(res.content) || content.tooLarge) ? null : res.content.toString()
118
+ req.context.profiling.resLastVal = (originalResContent == null || isbinaryfile(originalResContent) || content.tooLarge) ? null : originalResContent.toString()
111
119
  req.context.profiling.resMetaLastVal = stringifiedResMeta
112
120
  req.context.profiling.reqLastVal = stringifiedReq
113
121
  }
@@ -6,7 +6,7 @@
6
6
  const extend = require('node.extend.without.arrays')
7
7
  const ExecuteEngine = require('./executeEngine')
8
8
  const Request = require('../../shared/request')
9
- const generateRequestId = require('../../shared/generateRequestId')
9
+ const Response = require('../../shared/response')
10
10
  const resolveReferences = require('./resolveReferences.js')
11
11
  const moduleHelper = require('./moduleHelper')
12
12
 
@@ -59,8 +59,7 @@ module.exports = (reporter) => {
59
59
  reporter.logger.debug(`Rendering engine ${engine.name}`, request)
60
60
 
61
61
  const engineRes = await executeEngine(engine, request)
62
-
63
- response.content = Buffer.from(engineRes.content != null ? engineRes.content : '')
62
+ await response.output.update(Buffer.from(engineRes.content != null ? engineRes.content : ''))
64
63
 
65
64
  reporter.profiler.emit({
66
65
  type: 'operationEnd',
@@ -92,6 +91,7 @@ module.exports = (reporter) => {
92
91
  reporter.logger.debug('Executing recipe ' + request.template.recipe, request)
93
92
 
94
93
  await recipe.execute(request, response)
94
+
95
95
  reporter.profiler.emit({
96
96
  type: 'operationEnd',
97
97
  operationId: recipeProfilerEvent.operationId
@@ -105,13 +105,19 @@ module.exports = (reporter) => {
105
105
 
106
106
  return async (req, parentReq) => {
107
107
  const request = Request(req, parentReq)
108
- const response = { meta: {} }
108
+
109
+ if (request.context.id == null) {
110
+ request.context.id = reporter.generateRequestId()
111
+ }
112
+ if (parentReq == null) {
113
+ reporter.reqStorage.registerReq(request)
114
+ }
115
+
116
+ const response = Response(reporter, request.context.id)
117
+
109
118
  let renderStartProfilerEvent
110
- try {
111
- if (request.context.id == null) {
112
- request.context.id = generateRequestId()
113
- }
114
119
 
120
+ try {
115
121
  renderStartProfilerEvent = await reporter.profiler.renderStart(request, parentReq, response)
116
122
  request.data = resolveReferences(request.data) || {}
117
123
 
@@ -189,6 +195,7 @@ module.exports = (reporter) => {
189
195
  } finally {
190
196
  if (parentReq == null) {
191
197
  reporter.requestModulesCache.delete(request.context.rootId)
198
+ reporter.reqStorage.unregisterReq(request)
192
199
  }
193
200
  }
194
201
  }
@@ -10,6 +10,7 @@ const Reporter = require('../shared/reporter')
10
10
  const BlobStorage = require('./blobStorage.js')
11
11
  const Render = require('./render/render')
12
12
  const Profiler = require('./render/profiler.js')
13
+ const engineStream = require('./render/engineStream.js')
13
14
 
14
15
  class WorkerReporter extends Reporter {
15
16
  constructor (workerData, executeMain) {
@@ -79,6 +80,8 @@ class WorkerReporter extends Reporter {
79
80
  execute: htmlRecipe
80
81
  })
81
82
 
83
+ engineStream(this)
84
+
82
85
  await this.initializeListeners.fire()
83
86
 
84
87
  if (!this._lockedDown && this.options.trustUserCode === false) {
@@ -169,8 +172,8 @@ class WorkerReporter extends Reporter {
169
172
  return proxyInstance
170
173
  }
171
174
 
172
- render (req, parentReq) {
173
- return this._render(req, parentReq)
175
+ async render (req, parentReq) {
176
+ return await this._render(req, parentReq)
174
177
  }
175
178
 
176
179
  async executeMainAction (actionName, data, req) {
@@ -221,17 +224,8 @@ class WorkerReporter extends Reporter {
221
224
 
222
225
  _registerRenderAction () {
223
226
  this.registerWorkerAction('render', async (data, req) => {
224
- const res = await this.render(req)
225
-
226
- const sharedBuf = new SharedArrayBuffer(res.content.byteLength)
227
- const buf = Buffer.from(sharedBuf)
228
-
229
- res.content.copy(buf)
230
-
231
- return {
232
- meta: res.meta,
233
- content: buf
234
- }
227
+ const response = await this._render(req)
228
+ return response.serialize()
235
229
  })
236
230
  }
237
231
 
@@ -1,17 +1,18 @@
1
1
  const Module = require('module')
2
2
  const path = require('path')
3
3
  const fs = require('fs')
4
+ const resolveFilename = require('./resolveFilename')
4
5
 
5
- const REQUIRE_RESOLVE_CACHE = new Map()
6
- const REQUIRE_SCRIPT_CACHE = new Map()
7
- const PACKAGE_JSON_CACHE = new Map()
6
+ const ISOLATED_REQUIRE_RESOLVE_CACHE = new Map()
7
+ const ISOLATED_REQUIRE_SCRIPT_CACHE = new Map()
8
+ const ISOLATED_PACKAGE_JSON_CACHE = new Map()
8
9
 
9
10
  // The isolated require is a function that replicates the node.js require but that does not
10
11
  // cache the modules with the standard node.js cache, instead its uses its own cache in order
11
12
  // to bring isolated modules across renders and without memory leaks.
12
13
  // most of the code is copied from node.js source code and adapted a bit
13
14
  // (you will see in some parts specific links to node.js source code counterpart for reference)
14
- function isolatedRequire (_moduleId, requireFromRootDirectory, isolatedModulesMeta) {
15
+ function isolatedRequire (_moduleId, modulesMeta, requireFromRootDirectory) {
15
16
  const parentModule = typeof _moduleId !== 'string' ? _moduleId.parent : null
16
17
  const moduleId = parentModule ? _moduleId.moduleId : _moduleId
17
18
 
@@ -30,19 +31,26 @@ function isolatedRequire (_moduleId, requireFromRootDirectory, isolatedModulesMe
30
31
  return require(moduleId)
31
32
  }
32
33
 
33
- const { modulesCache, requireExtensions } = isolatedModulesMeta
34
- const fullModulePath = resolveFilename(requireFromRootDirectory, moduleId, { parentModulePath: parentModule?.path })
34
+ const { rootModule, modulesCache, requireExtensions } = modulesMeta
35
+
36
+ const fullModulePath = resolveFilename(ISOLATED_REQUIRE_RESOLVE_CACHE, requireFromRootDirectory.resolve, moduleId, { parentModulePath: parentModule?.path })
35
37
 
36
38
  if (modulesCache[fullModulePath]) {
37
39
  return modulesCache[fullModulePath].exports
38
40
  }
39
41
 
40
- const mod = new IsolatedModule(fullModulePath, parentModule)
42
+ let targetParentModule = parentModule
43
+
44
+ if (targetParentModule == null) {
45
+ targetParentModule = rootModule
46
+ }
47
+
48
+ const mod = new IsolatedModule(fullModulePath, targetParentModule)
41
49
 
42
50
  // https://github.com/nodejs/node/blob/v18.14.2/lib/internal/modules/cjs/loader.js#L1133
43
51
  // we can not add this to the IsolatedModule.prototype because we need access to other variables
44
52
  mod.require = function (id) {
45
- return isolatedRequire({ parent: this, moduleId: id }, requireFromRootDirectory, isolatedModulesMeta)
53
+ return isolatedRequire({ parent: this, moduleId: id }, modulesMeta, requireFromRootDirectory)
46
54
  }
47
55
 
48
56
  modulesCache[fullModulePath] = mod
@@ -88,8 +96,8 @@ function setDefaultRequireExtensions (currentExtensions, requireFromRootDirector
88
96
 
89
97
  let compiledScript
90
98
 
91
- if (REQUIRE_SCRIPT_CACHE.has(filename)) {
92
- compiledScript = REQUIRE_SCRIPT_CACHE.get(filename)
99
+ if (ISOLATED_REQUIRE_SCRIPT_CACHE.has(filename)) {
100
+ compiledScript = ISOLATED_REQUIRE_SCRIPT_CACHE.get(filename)
93
101
  } else {
94
102
  let moduleContent = fs.readFileSync(filename, 'utf8')
95
103
 
@@ -99,7 +107,7 @@ function setDefaultRequireExtensions (currentExtensions, requireFromRootDirector
99
107
 
100
108
  compiledScript = compileScript(moduleWrappedContent, filename, false)
101
109
 
102
- REQUIRE_SCRIPT_CACHE.set(filename, compiledScript)
110
+ ISOLATED_REQUIRE_SCRIPT_CACHE.set(filename, compiledScript)
103
111
  }
104
112
 
105
113
  // we run module in same context than main context because we want to reproduce the same behavior
@@ -163,7 +171,7 @@ function IsolatedModule (id = '', parent) {
163
171
  // something here, however if the need appears we can check what we can do about it
164
172
  // we should be aware of the expected values it carries according to the node.js docs
165
173
  // https://nodejs.org/api/modules.html#moduleparent
166
- this.parent = undefined
174
+ this.parent = parent
167
175
 
168
176
  // this is always false for our case, because our modules we never run during the
169
177
  // Node.js preload phase
@@ -184,7 +192,7 @@ function makeRequireFunction (mod, requireFromRootDirectory, currentExtensions)
184
192
  options
185
193
  }
186
194
 
187
- return resolveFilename(requireFromRootDirectory, request, extra)
195
+ return resolveFilename(ISOLATED_REQUIRE_RESOLVE_CACHE, requireFromRootDirectory.resolve, request, extra)
188
196
  }
189
197
 
190
198
  requireFn.resolve = resolve
@@ -208,36 +216,6 @@ function makeRequireFunction (mod, requireFromRootDirectory, currentExtensions)
208
216
  return requireFn
209
217
  }
210
218
 
211
- function resolveFilename (requireFromRootDirectory, moduleId, extra) {
212
- const { parentModulePath, options } = extra
213
- const useCache = options == null
214
- const resolveCacheKey = parentModulePath ? `${parentModulePath}::${moduleId}` : moduleId
215
- let fullModulePath
216
-
217
- if (useCache && REQUIRE_RESOLVE_CACHE.has(resolveCacheKey)) {
218
- fullModulePath = REQUIRE_RESOLVE_CACHE.get(resolveCacheKey)
219
- } else {
220
- if (parentModulePath) {
221
- const optionsToUse = { ...options }
222
-
223
- // search from the parent module path by default if not explicit .paths has been passed
224
- if (optionsToUse.paths == null) {
225
- optionsToUse.paths = [parentModulePath]
226
- }
227
-
228
- fullModulePath = requireFromRootDirectory.resolve(moduleId, optionsToUse)
229
- } else {
230
- fullModulePath = requireFromRootDirectory.resolve(moduleId)
231
- }
232
-
233
- if (useCache) {
234
- REQUIRE_RESOLVE_CACHE.set(resolveCacheKey, fullModulePath)
235
- }
236
- }
237
-
238
- return fullModulePath
239
- }
240
-
241
219
  // https://github.com/nodejs/node/blob/v18.14.2/lib/internal/modules/cjs/loader.js#L496
242
220
  // Find the longest (possibly multi-dot) extension registered in extensions
243
221
  function findLongestRegisteredExtension (fullPath, extensions) {
@@ -295,7 +273,7 @@ function readPackageScope (checkPath) {
295
273
  function readPackage (requestPath) {
296
274
  const jsonPath = path.resolve(requestPath, 'package.json')
297
275
 
298
- const existing = PACKAGE_JSON_CACHE.get(jsonPath)
276
+ const existing = ISOLATED_PACKAGE_JSON_CACHE.get(jsonPath)
299
277
 
300
278
  if (existing !== undefined) {
301
279
  return existing
@@ -308,7 +286,7 @@ function readPackage (requestPath) {
308
286
  } catch (error) {}
309
287
 
310
288
  if (json === undefined) {
311
- PACKAGE_JSON_CACHE.set(jsonPath, false)
289
+ ISOLATED_PACKAGE_JSON_CACHE.set(jsonPath, false)
312
290
  return false
313
291
  }
314
292
 
@@ -321,7 +299,7 @@ function readPackage (requestPath) {
321
299
  'type'
322
300
  ])
323
301
 
324
- PACKAGE_JSON_CACHE.set(jsonPath, filtered)
302
+ ISOLATED_PACKAGE_JSON_CACHE.set(jsonPath, filtered)
325
303
  return filtered
326
304
  } catch (e) {
327
305
  e.path = jsonPath
@@ -439,4 +417,5 @@ function validateString (value, name) {
439
417
  }
440
418
 
441
419
  module.exports = isolatedRequire
420
+ module.exports.IsolatedModule = IsolatedModule
442
421
  module.exports.setDefaultRequireExtensions = setDefaultRequireExtensions
@@ -1,8 +1,11 @@
1
1
  const Module = require('module')
2
2
  const os = require('os')
3
3
  const path = require('path')
4
+ const resolveFilename = require('./resolveFilename')
4
5
  const isolatedRequire = require('./isolatedRequire')
5
6
 
7
+ const REQUIRE_RESOLVE_CACHE = new Map()
8
+
6
9
  module.exports = function createSandboxRequire (safeExecution, isolateModules, modulesCache, {
7
10
  rootDirectory,
8
11
  requirePaths,
@@ -15,20 +18,28 @@ module.exports = function createSandboxRequire (safeExecution, isolateModules, m
15
18
  throw new Error(`rootDirectory must be an absolute path, path: ${rootDirectory}`)
16
19
  }
17
20
 
18
- // we pass directory with trailing slash to ensure node recognize the path as directory
19
- const requireFromRootDirectory = Module.createRequire(ensureTrailingSlash(rootDirectory))
21
+ const rootProxyPath = path.join(rootDirectory, '___sandbox___')
20
22
 
21
- let isolatedModulesMeta
23
+ let modulesMeta
22
24
 
23
25
  if (isolateModules) {
24
- const requireExtensions = Object.create(null)
26
+ const rootModule = new isolatedRequire.IsolatedModule(rootProxyPath, null)
27
+ rootModule.filename = rootProxyPath
28
+ rootModule.paths = Module._nodeModulePaths(rootProxyPath)
29
+ rootModule.loaded = true
30
+
31
+ modulesMeta = {
32
+ rootModule,
33
+ modulesCache
34
+ }
35
+ }
25
36
 
26
- isolatedRequire.setDefaultRequireExtensions(requireExtensions, modulesCache, compileScript)
37
+ const requireFromRootDirectory = Module.createRequire(rootProxyPath)
27
38
 
28
- isolatedModulesMeta = {
29
- modulesCache: modulesCache,
30
- requireExtensions
31
- }
39
+ if (isolateModules) {
40
+ const requireExtensions = Object.create(null)
41
+ isolatedRequire.setDefaultRequireExtensions(requireExtensions, modulesCache, compileScript)
42
+ modulesMeta.requireExtensions = requireExtensions
32
43
  }
33
44
 
34
45
  return function sandboxRequire (moduleId, { context, useMap = true, allowAllModules = false } = {}) {
@@ -41,13 +52,13 @@ module.exports = function createSandboxRequire (safeExecution, isolateModules, m
41
52
  }
42
53
 
43
54
  if (!safeExecution || allowAllModules || allowedModules === '*') {
44
- return doRequire(moduleId, requireFromRootDirectory, requirePaths, isolatedModulesMeta)
55
+ return doRequire(moduleId, requireFromRootDirectory, requirePaths, modulesMeta)
45
56
  }
46
57
 
47
58
  const m = allowedModules.find(mod => (mod.id || mod) === moduleId)
48
59
 
49
60
  if (m) {
50
- return doRequire(m.path || moduleId, requireFromRootDirectory, requirePaths, isolatedModulesMeta)
61
+ return doRequire(m.path || moduleId, requireFromRootDirectory, requirePaths, modulesMeta)
51
62
  }
52
63
 
53
64
  const error = new Error(
@@ -62,25 +73,27 @@ module.exports = function createSandboxRequire (safeExecution, isolateModules, m
62
73
  }
63
74
  }
64
75
 
65
- function doRequire (moduleId, requireFromRootDirectory, _requirePaths, isolatedModulesMeta) {
66
- const isolateModules = isolatedModulesMeta != null
76
+ function doRequire (moduleId, requireFromRootDirectory, _requirePaths, modulesMeta) {
77
+ const isolateModules = modulesMeta != null
67
78
  const searchedPaths = []
68
79
  const requirePaths = _requirePaths || []
69
80
  const _require = isolateModules ? isolatedRequire : requireFromRootDirectory
70
81
  const extraRequireParams = []
71
82
 
72
83
  if (isolateModules) {
73
- extraRequireParams.push(requireFromRootDirectory, isolatedModulesMeta)
84
+ extraRequireParams.push(modulesMeta, requireFromRootDirectory)
74
85
  }
75
86
 
76
- let result = executeRequire(_require, moduleId, searchedPaths, ...extraRequireParams)
87
+ const resolveModule = requireFromRootDirectory.resolve
88
+
89
+ let result = executeRequire(_require, resolveModule, moduleId, searchedPaths, ...extraRequireParams)
77
90
 
78
91
  if (!result) {
79
92
  let pathsSearched = 0
80
93
 
81
94
  while (!result && pathsSearched < requirePaths.length) {
82
95
  const newModuleId = path.join(requirePaths[pathsSearched], moduleId)
83
- result = executeRequire(_require, newModuleId, searchedPaths, ...extraRequireParams)
96
+ result = executeRequire(_require, resolveModule, newModuleId, searchedPaths, ...extraRequireParams)
84
97
  pathsSearched++
85
98
  }
86
99
  }
@@ -92,10 +105,46 @@ function doRequire (moduleId, requireFromRootDirectory, _requirePaths, isolatedM
92
105
  return result
93
106
  }
94
107
 
95
- function executeRequire (_require, moduleId, searchedPaths, ...restOfParams) {
108
+ function executeRequire (_require, resolveModule, moduleId, searchedPaths, ...restOfParams) {
109
+ const isolateModules = restOfParams.length > 0
110
+ const shouldHandleModuleResolveFilenameOptimization = !isolateModules
111
+
112
+ const originalModuleResolveFilename = Module._resolveFilename
113
+
96
114
  try {
97
- return _require(moduleId, ...restOfParams)
115
+ if (shouldHandleModuleResolveFilenameOptimization) {
116
+ // when isolate modules is disabled we add an extra cache here to optimize require resolution,
117
+ // basically we want to avoid the overhead that node require resolution
118
+ // adds when trying to resolve the filename/path of a module, because even if the module
119
+ // is cached in require.cache module filename/path resolution still happens and have a cost
120
+ const customResolveFilename = (...args) => {
121
+ const customResolveModule = (...resolveArgs) => {
122
+ Module._resolveFilename = originalModuleResolveFilename
123
+ try {
124
+ return resolveModule(...resolveArgs)
125
+ } finally {
126
+ Module._resolveFilename = customResolveFilename
127
+ }
128
+ }
129
+
130
+ return optimizedResolveFilename(customResolveModule, ...args)
131
+ }
132
+
133
+ Module._resolveFilename = customResolveFilename
134
+ }
135
+
136
+ const result = _require(moduleId, ...restOfParams)
137
+
138
+ if (shouldHandleModuleResolveFilenameOptimization) {
139
+ Module._resolveFilename = originalModuleResolveFilename
140
+ }
141
+
142
+ return result
98
143
  } catch (e) {
144
+ if (shouldHandleModuleResolveFilenameOptimization) {
145
+ Module._resolveFilename = originalModuleResolveFilename
146
+ }
147
+
99
148
  if (e.code && e.code === 'MODULE_NOT_FOUND') {
100
149
  if (!searchedPaths.includes(moduleId)) {
101
150
  searchedPaths.push(moduleId)
@@ -108,10 +157,6 @@ function executeRequire (_require, moduleId, searchedPaths, ...restOfParams) {
108
157
  }
109
158
  }
110
159
 
111
- function ensureTrailingSlash (fullPath) {
112
- if (fullPath.endsWith(path.sep)) {
113
- return fullPath
114
- }
115
-
116
- return fullPath + path.sep
160
+ function optimizedResolveFilename (resolveModule, request, parent, isMain, options) {
161
+ return resolveFilename(REQUIRE_RESOLVE_CACHE, resolveModule, request, { parentModulePath: parent?.path, options })
117
162
  }