@eagleoutice/flowr 2.9.12 → 2.9.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/README.md +27 -27
  2. package/benchmark/slicer.d.ts +4 -2
  3. package/benchmark/slicer.js +20 -6
  4. package/benchmark/stats/print.js +12 -0
  5. package/benchmark/stats/stats.d.ts +3 -2
  6. package/benchmark/stats/stats.js +1 -1
  7. package/benchmark/summarizer/data.d.ts +1 -0
  8. package/benchmark/summarizer/second-phase/process.js +5 -0
  9. package/cli/benchmark-app.d.ts +1 -0
  10. package/cli/benchmark-app.js +1 -0
  11. package/cli/benchmark-helper-app.d.ts +2 -1
  12. package/cli/benchmark-helper-app.js +6 -3
  13. package/cli/common/options.d.ts +8 -0
  14. package/cli/common/options.js +3 -1
  15. package/cli/common/scripts-info.d.ts +8 -0
  16. package/cli/export-quads-app.js +1 -1
  17. package/cli/flowr.js +3 -3
  18. package/cli/repl/core.d.ts +3 -3
  19. package/cli/repl/server/connection.d.ts +2 -2
  20. package/cli/repl/server/server.d.ts +2 -2
  21. package/cli/script-core/statistics-core.d.ts +2 -2
  22. package/cli/script-core/statistics-helper-core.d.ts +2 -2
  23. package/cli/script-core/statistics-helper-core.js +1 -1
  24. package/cli/slicer-app.js +2 -2
  25. package/cli/statistics-app.js +1 -1
  26. package/cli/statistics-helper-app.js +1 -1
  27. package/cli/wiki.js +2 -2
  28. package/config.d.ts +65 -24
  29. package/config.js +197 -161
  30. package/control-flow/extract-cfg.js +5 -8
  31. package/core/steps/pipeline-step.d.ts +2 -2
  32. package/dataflow/eval/resolve/alias-tracking.js +12 -15
  33. package/dataflow/graph/graph.js +8 -8
  34. package/dataflow/internal/process/functions/call/built-in/built-in-eval.js +2 -2
  35. package/dataflow/internal/process/functions/call/built-in/built-in-source.d.ts +1 -1
  36. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +20 -9
  37. package/documentation/doc-readme.js +2 -2
  38. package/documentation/wiki-analyzer.js +7 -5
  39. package/documentation/wiki-core.js +1 -3
  40. package/documentation/wiki-dataflow-graph.js +1 -1
  41. package/documentation/wiki-interface.js +5 -3
  42. package/documentation/wiki-linter.js +5 -5
  43. package/documentation/wiki-mk/doc-context.js +3 -4
  44. package/engines.d.ts +2 -2
  45. package/engines.js +4 -4
  46. package/linter/rules/dataframe-access-validation.js +5 -5
  47. package/linter/rules/naming-convention.d.ts +1 -1
  48. package/linter/rules/naming-convention.js +7 -3
  49. package/package.json +3 -1
  50. package/project/context/flowr-analyzer-context.d.ts +6 -6
  51. package/project/context/flowr-analyzer-context.js +2 -2
  52. package/project/context/flowr-analyzer-files-context.d.ts +2 -2
  53. package/project/context/flowr-analyzer-files-context.js +28 -8
  54. package/project/flowr-analyzer-builder.d.ts +10 -6
  55. package/project/flowr-analyzer-builder.js +12 -3
  56. package/project/flowr-analyzer.d.ts +3 -3
  57. package/queries/catalog/config-query/config-query-format.d.ts +5 -5
  58. package/queries/catalog/df-shape-query/df-shape-query-format.d.ts +2 -2
  59. package/queries/catalog/does-call-query/does-call-query-format.d.ts +2 -2
  60. package/queries/catalog/files-query/files-query-format.d.ts +3 -3
  61. package/queries/catalog/inspect-exceptions-query/inspect-exception-query-format.d.ts +2 -2
  62. package/queries/catalog/inspect-higher-order-query/inspect-higher-order-query-format.d.ts +2 -2
  63. package/queries/catalog/inspect-recursion-query/inspect-recursion-query-format.d.ts +2 -2
  64. package/queries/catalog/linter-query/linter-query-format.d.ts +3 -3
  65. package/queries/catalog/location-map-query/location-map-query-format.d.ts +2 -2
  66. package/queries/catalog/origin-query/origin-query-format.d.ts +2 -2
  67. package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +3 -3
  68. package/queries/catalog/resolve-value-query/resolve-value-query-format.d.ts +2 -2
  69. package/queries/catalog/resolve-value-query/resolve-value-query-format.js +4 -0
  70. package/queries/catalog/static-slice-query/static-slice-query-format.d.ts +2 -2
  71. package/queries/query.d.ts +18 -18
  72. package/r-bridge/lang-4.x/ast/model/model.d.ts +7 -2
  73. package/r-bridge/lang-4.x/ast/model/model.js +13 -0
  74. package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +2 -2
  75. package/r-bridge/lang-4.x/ast/parser/json/parser.js +2 -2
  76. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +6 -2
  77. package/statistics/statistics.d.ts +2 -2
  78. package/util/objects.d.ts +12 -0
  79. package/util/objects.js +28 -0
  80. package/util/summarizer.js +1 -1
  81. package/util/version.js +1 -1
@@ -4,7 +4,7 @@
4
4
  */
5
5
  import type { MergeableRecord } from '../../util/objects';
6
6
  import type { InternalStepPrinter, IPipelineStepPrinter, StepOutputFormat } from '../print/print';
7
- import type { FlowrConfigOptions } from '../../config';
7
+ import type { FlowrConfig } from '../../config';
8
8
  /**
9
9
  * This represents the format of a step processor which retrieves two things:
10
10
  *
@@ -16,7 +16,7 @@ import type { FlowrConfigOptions } from '../../config';
16
16
  * list all steps that you require as your {@link IPipelineStepOrder#dependencies|dependencies}, even if they would be
17
17
  * already covered transitively.
18
18
  */
19
- export type StepProcessingFunction = (results: Record<string, unknown>, input: Record<string, unknown>, config: FlowrConfigOptions) => unknown;
19
+ export type StepProcessingFunction = (results: Record<string, unknown>, input: Record<string, unknown>, config: FlowrConfig) => unknown;
20
20
  /**
21
21
  * This represents the required execution frequency of a step.
22
22
  */
@@ -34,7 +34,7 @@ function getFunctionCallAlias(sourceId, dataflow, environment) {
34
34
  return undefined;
35
35
  }
36
36
  const defs = (0, resolve_by_name_1.resolveByName)(identifier, environment, identifier_1.ReferenceType.Function);
37
- if (defs === undefined || defs.length !== 1) {
37
+ if (defs?.length !== 1) {
38
38
  return undefined;
39
39
  }
40
40
  return [sourceId];
@@ -206,7 +206,7 @@ function trackAliasInEnvironments(identifier, environment, { blocked, idMap, res
206
206
  }
207
207
  }
208
208
  }
209
- if (values.size == 0) {
209
+ if (values.size === 0) {
210
210
  return r_value_1.Top;
211
211
  }
212
212
  return (0, set_constants_1.setFrom)(...values);
@@ -246,18 +246,7 @@ function trackAliasInEnvironments(identifier, environment, { blocked, idMap, res
246
246
  }
247
247
  });
248
248
  function isNestedInLoop(node, ast) {
249
- const parent = node?.info.parent;
250
- if (node === undefined || !parent) {
251
- return false;
252
- }
253
- const parentNode = ast.get(parent);
254
- if (parentNode === undefined) {
255
- return false;
256
- }
257
- if (parentNode.type === type_1.RType.WhileLoop || parentNode.type === type_1.RType.RepeatLoop || parentNode.type === type_1.RType.ForLoop) {
258
- return true;
259
- }
260
- return isNestedInLoop(parentNode, ast);
249
+ return model_1.RNode.iterateParents(node, ast).some(model_1.RLoopConstructs.is);
261
250
  }
262
251
  /**
263
252
  * Please use {@link resolveIdToValue}
@@ -311,12 +300,14 @@ function trackAliasesInGraph(id, graph, ctx, idMap) {
311
300
  }
312
301
  const isFn = t === vertex_1.VertexType.FunctionCall;
313
302
  const outgoingEdges = graph.outgoingEdges(id) ?? [];
303
+ let foundRetuns = false;
314
304
  // travel all read and defined-by edges
315
305
  for (const [targetId, { types }] of outgoingEdges) {
316
306
  if (isFn) {
317
307
  if (types === edge_1.EdgeType.Returns || types === edge_1.EdgeType.DefinedByOnCall || types === edge_1.EdgeType.DefinedBy) {
318
308
  queue.add(targetId, baseEnvironment, cleanFingerprint, false);
319
309
  }
310
+ foundRetuns ||= edge_1.DfEdge.includesType({ types }, edge_1.EdgeType.Returns);
320
311
  continue;
321
312
  }
322
313
  // currently, they have to be exact!
@@ -324,6 +315,9 @@ function trackAliasesInGraph(id, graph, ctx, idMap) {
324
315
  queue.add(targetId, baseEnvironment, cleanFingerprint, false);
325
316
  }
326
317
  }
318
+ if (isFn && !foundRetuns) {
319
+ return r_value_1.Top;
320
+ }
327
321
  }
328
322
  if (forceTop || resultIds.length === 0) {
329
323
  return r_value_1.Top;
@@ -332,10 +326,13 @@ function trackAliasesInGraph(id, graph, ctx, idMap) {
332
326
  for (const id of resultIds) {
333
327
  const node = idMap.get(id);
334
328
  if (node !== undefined) {
329
+ if (node.info.role === "param-v" /* RoleInParent.ParameterDefaultValue */ || model_1.RNode.iterateParents(node, idMap).some(p => p.info.role === "param-v" /* RoleInParent.ParameterDefaultValue */)) {
330
+ return r_value_1.Top;
331
+ }
335
332
  values.add((0, general_1.valueFromRNodeConstant)(node));
336
333
  }
337
334
  }
338
- return (0, set_constants_1.setFrom)(...values);
335
+ return values.size === 0 ? r_value_1.Top : (0, set_constants_1.setFrom)(...values);
339
336
  }
340
337
  /**
341
338
  * Please use {@link resolveIdToValue}
@@ -402,17 +402,17 @@ class DataflowGraph {
402
402
  to = node_id_1.NodeId.normalize(to);
403
403
  const vertex = this.getVertex(from);
404
404
  (0, assert_1.guard)(vertex !== undefined, () => `node must be defined for ${from} to add control dependency`);
405
- vertex.cds ??= [];
406
- let hasControlDependency = false;
407
- for (const { id, when: cond } of vertex.cds) {
408
- if (id === to && when !== cond) {
409
- hasControlDependency = true;
410
- break;
405
+ if (vertex.cds) {
406
+ for (const { id, when: cond } of vertex.cds) {
407
+ if (id === to && when !== cond) {
408
+ return this;
409
+ }
411
410
  }
412
411
  }
413
- if (!hasControlDependency) {
414
- vertex.cds.push({ id: to, when });
412
+ else {
413
+ vertex.cds = [];
415
414
  }
415
+ vertex.cds.push({ id: to, when });
416
416
  return this;
417
417
  }
418
418
  /** Marks the given node as having unknown side effects */
@@ -45,13 +45,13 @@ function processEvalCall(name, args, rootId, data, config) {
45
45
  const idGenerator = (0, decorate_1.sourcedDeterministicCountingIdGenerator)(name.lexeme + '::' + rootId, name.location);
46
46
  data = {
47
47
  ...data,
48
- cds: [...(data.cds ?? []), { id: rootId, when: true }]
48
+ cds: code.length > 1 ? [...(data.cds ?? []), { id: rootId, when: true }] : data.cds
49
49
  };
50
50
  const originalInfo = { ...information };
51
51
  const result = [];
52
52
  for (const c of code) {
53
53
  const codeRequest = (0, retriever_1.requestFromInput)(c);
54
- const r = (0, built_in_source_1.sourceRequest)(rootId, codeRequest, data, originalInfo, idGenerator);
54
+ const r = (0, built_in_source_1.sourceRequest)(rootId, codeRequest, data, originalInfo, code.length > 1, idGenerator);
55
55
  result.push(r);
56
56
  // add a returns edge from the eval to the result
57
57
  for (const e of r.exitPoints) {
@@ -45,7 +45,7 @@ export declare function processSourceCall<OtherInfo>(name: RSymbol<OtherInfo & P
45
45
  * Processes a source request with the given dataflow processor information and existing dataflow information
46
46
  * Otherwise, this can be an {@link RProjectFile} representing a standalone source file
47
47
  */
48
- export declare function sourceRequest<OtherInfo>(rootId: NodeId, request: RParseRequest | RProjectFile<OtherInfo & ParentInformation>, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, information: DataflowInformation, getId?: IdGenerator<NoInfo>): DataflowInformation;
48
+ export declare function sourceRequest<OtherInfo>(rootId: NodeId, request: RParseRequest | RProjectFile<OtherInfo & ParentInformation>, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, information: DataflowInformation, makeMaybe: boolean, getId?: IdGenerator<NoInfo>): DataflowInformation;
49
49
  /**
50
50
  * Processes a standalone source file (i.e., not from a source function call)
51
51
  */
@@ -30,6 +30,7 @@ const r_value_1 = require("../../../../../eval/values/r-value");
30
30
  const unknown_side_effect_1 = require("../../../../../graph/unknown-side-effect");
31
31
  const alias_tracking_1 = require("../../../../../eval/resolve/alias-tracking");
32
32
  const built_in_1 = require("../../../../../environments/built-in");
33
+ const edge_1 = require("../../../../../graph/edge");
33
34
  /**
34
35
  * Infers working directories based on the given option and reference chain
35
36
  */
@@ -117,8 +118,8 @@ function findSource(resolveSource, seed, data) {
117
118
  const effectivePath = explore ? path_1.default.join(explore, tryPath) : tryPath;
118
119
  const context = data.ctx.files;
119
120
  const get = context.exists(effectivePath, capitalization) ?? context.exists(returnPlatformPath(effectivePath), capitalization);
120
- if (get && !found.includes(effectivePath)) {
121
- found.push(returnPlatformPath(effectivePath));
121
+ if (get && !found.includes(returnPlatformPath(get))) {
122
+ found.push(returnPlatformPath(get));
122
123
  }
123
124
  }
124
125
  }
@@ -176,7 +177,7 @@ function processSourceCall(name, args, rootId, data, config) {
176
177
  result = sourceRequest(rootId, {
177
178
  request: 'file',
178
179
  content: f
179
- }, data, result, (0, decorate_1.sourcedDeterministicCountingIdGenerator)((findCount > 0 ? findCount + '::' : '') + f, name.location));
180
+ }, data, result, true, (0, decorate_1.sourcedDeterministicCountingIdGenerator)((findCount > 0 ? findCount + '::' : '') + f, name.location));
180
181
  }
181
182
  return result;
182
183
  }
@@ -189,7 +190,7 @@ function processSourceCall(name, args, rootId, data, config) {
189
190
  * Processes a source request with the given dataflow processor information and existing dataflow information
190
191
  * Otherwise, this can be an {@link RProjectFile} representing a standalone source file
191
192
  */
192
- function sourceRequest(rootId, request, data, information, getId) {
193
+ function sourceRequest(rootId, request, data, information, makeMaybe, getId) {
193
194
  // parse, normalize and dataflow the sourced file
194
195
  let dataflow;
195
196
  let fst;
@@ -239,11 +240,21 @@ function sourceRequest(rootId, request, data, information, getId) {
239
240
  }
240
241
  // take the entry point as well as all the written references, and give them a control dependency to the source call to show that they are conditional
241
242
  if (!String(rootId).startsWith('file-')) {
242
- if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
243
- dataflow.graph.addControlDependency(dataflow.entryPoint, rootId, true);
243
+ if (makeMaybe) {
244
+ if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
245
+ dataflow.graph.addControlDependency(dataflow.entryPoint, rootId, true);
246
+ }
247
+ for (const out of dataflow.out) {
248
+ dataflow.graph.addControlDependency(out.nodeId, rootId, true);
249
+ }
244
250
  }
245
- for (const out of dataflow.out) {
246
- dataflow.graph.addControlDependency(out.nodeId, rootId, true);
251
+ else {
252
+ if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
253
+ dataflow.graph.addEdge(dataflow.entryPoint, rootId, edge_1.EdgeType.Reads);
254
+ }
255
+ for (const out of dataflow.out) {
256
+ dataflow.graph.addEdge(out.nodeId, rootId, edge_1.EdgeType.Reads);
257
+ }
247
258
  }
248
259
  }
249
260
  data.ctx.files.addConsideredFile(filePath ?? '<inline>');
@@ -272,6 +283,6 @@ function standaloneSourceFile(idx, file, data, information) {
272
283
  ...data,
273
284
  environment: information.environment,
274
285
  referenceChain: [...data.referenceChain, file.filePath]
275
- }, information);
286
+ }, information, false);
276
287
  }
277
288
  //# sourceMappingURL=built-in-source.js.map
@@ -21,7 +21,7 @@ const PublicationsMain = [
21
21
  header: 'Statically Analyzing the Dataflow of R Programs (OOPSLA \'25)',
22
22
  description: 'Please cite this paper if you are using flowR in your research.',
23
23
  doi: 'https://doi.org/10.1145/3763087',
24
- bibtex: `@article{10.1145/3763087,
24
+ bibtex: String.raw `@article{10.1145/3763087,
25
25
  author = {Sihler, Florian and Tichy, Matthias},
26
26
  title = {Statically Analyzing the Dataflow of R Programs},
27
27
  year = {2025},
@@ -32,7 +32,7 @@ const PublicationsMain = [
32
32
  number = {OOPSLA2},
33
33
  url = {https://doi.org/10.1145/3763087},
34
34
  doi = {10.1145/3763087},
35
- abstract = {The R programming language is primarily designed for statistical computing and mostly used by researchers without a background in computer science. R provides a wide range of dynamic features and peculiarities that are difficult to analyze statically like dynamic scoping and lazy evaluation with dynamic side effects. At the same time, the R ecosystem lacks sophisticated analysis tools that support researchers in understanding and improving their code. In this paper, we present a novel static dataflow analysis framework for the R programming language that is capable of handling the dynamic nature of R programs and produces the dataflow graph of given R programs. This graph can be essential in a range of analyses, including program slicing, which we implement as a proof of concept. The core analysis works as a stateful fold over a normalized version of the abstract syntax tree of the R program, which tracks (re-)definitions, values, function calls, side effects, external files, and a dynamic control flow to produce one dataflow graph per program. We evaluate the correctness of our analysis using output equivalence testing on a manually curated dataset of 779 sensible slicing points from executable real-world R scripts. Additionally, we use a set of systematic test cases based on the capabilities of the R language and the implementation of the R interpreter and measure the runtimes well as the memory consumption on a set of 4,230 real-world R scripts and 20,815 packages available on R’s package manager CRAN. Furthermore, we evaluate the recall of our program slicer, its accuracy using shrinking, and its improvement over the state of the art. We correctly analyze almost all programs in our equivalence test suite, preserving the identical output for 99.7\\% of the manually curated slicing points. On average, we require 576ms to analyze the dataflow and around 213kB to store the graph of a research script. This shows that our analysis is capable of analyzing real-world sources quickly and correctly. Our slicer achieves an average reduction of 84.8\\% of tokens indicating its potential to improve program comprehension.},
35
+ abstract = {The R programming language is primarily designed for statistical computing and mostly used by researchers without a background in computer science. R provides a wide range of dynamic features and peculiarities that are difficult to analyze statically like dynamic scoping and lazy evaluation with dynamic side effects. At the same time, the R ecosystem lacks sophisticated analysis tools that support researchers in understanding and improving their code. In this paper, we present a novel static dataflow analysis framework for the R programming language that is capable of handling the dynamic nature of R programs and produces the dataflow graph of given R programs. This graph can be essential in a range of analyses, including program slicing, which we implement as a proof of concept. The core analysis works as a stateful fold over a normalized version of the abstract syntax tree of the R program, which tracks (re-)definitions, values, function calls, side effects, external files, and a dynamic control flow to produce one dataflow graph per program. We evaluate the correctness of our analysis using output equivalence testing on a manually curated dataset of 779 sensible slicing points from executable real-world R scripts. Additionally, we use a set of systematic test cases based on the capabilities of the R language and the implementation of the R interpreter and measure the runtimes well as the memory consumption on a set of 4,230 real-world R scripts and 20,815 packages available on R’s package manager CRAN. Furthermore, we evaluate the recall of our program slicer, its accuracy using shrinking, and its improvement over the state of the art. We correctly analyze almost all programs in our equivalence test suite, preserving the identical output for 99.7\% of the manually curated slicing points. On average, we require 576ms to analyze the dataflow and around 213kB to store the graph of a research script. This shows that our analysis is capable of analyzing real-world sources quickly and correctly. Our slicer achieves an average reduction of 84.8\% of tokens indicating its potential to improve program comprehension.},
36
36
  journal = {Proc. ACM Program. Lang.},
37
37
  month = oct,
38
38
  articleno = {309},
@@ -29,6 +29,7 @@ const flowr_analyzer_plugin_1 = require("../project/plugins/flowr-analyzer-plugi
29
29
  const flowr_analyzer_environment_context_1 = require("../project/context/flowr-analyzer-environment-context");
30
30
  const flowr_analyzer_functions_context_1 = require("../project/context/flowr-analyzer-functions-context");
31
31
  const flowr_analyzer_meta_context_1 = require("../project/context/flowr-analyzer-meta-context");
32
+ const config_1 = require("../config");
32
33
  async function analyzerQuickExample() {
33
34
  const analyzer = await new flowr_analyzer_builder_1.FlowrAnalyzerBuilder()
34
35
  .setEngine('tree-sitter')
@@ -172,23 +173,24 @@ The following sections highlight some of the most important configuration option
172
173
  ${(0, doc_structure_1.section)('Configuring flowR', 3)}
173
174
 
174
175
  You can fundamentally change the behavior of flowR using the [config file](${doc_files_1.FlowrWikiBaseRef}/Interface#configuring-flowr),
175
- embedded in the interface ${ctx.link('FlowrConfigOptions')}.
176
+ embedded in the interface ${ctx.link(config_1.FlowrConfig)}.
176
177
  With the builder you can either provide a complete configuration or amend the default configuration using:
177
178
 
178
179
  * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'setConfig')} to set a complete configuration
180
+ * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'configure')} to set the value of a specific key in the config
179
181
  * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'amendConfig')} to amend the default configuration
180
182
 
181
- By default, the builder uses flowR's standard configuration obtained with ${ctx.link('defaultConfigOptions')}.
183
+ By default, the builder uses flowR's standard configuration obtained with ${ctx.linkO(config_1.FlowrConfig, 'default')}.
182
184
 
183
185
  ${(0, doc_structure_1.block)({
184
186
  type: 'NOTE',
185
- content: `During the analysis with the ${ctx.link(flowr_analyzer_1.FlowrAnalyzer.name)}, you can also access the configuration with
187
+ content: `During the analysis with the ${ctx.link(flowr_analyzer_1.FlowrAnalyzer)}, you can also access the configuration with
186
188
  the ${ctx.link(flowr_analyzer_context_1.FlowrAnalyzerContext)}.`
187
189
  })}
188
190
 
189
191
  ${(0, doc_structure_1.section)('Configuring the Engine', 3)}
190
192
 
191
- FlowR supports multiple [engines](${doc_files_1.FlowrWikiBaseRef}/Engines) for parsing and analyzing R code.
193
+ FlowR supports multiple ${ctx.linkPage('wiki/Engines', 'engines')} for parsing and analyzing R code.
192
194
  With the builder, you can select the engine to use with:
193
195
 
194
196
  * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'setEngine')} to set the desired engine.
@@ -292,7 +294,7 @@ ${(0, doc_structure_1.section)('File Loading', 4)}
292
294
 
293
295
  These plugins register for every file encountered by the [files context](#Files_Context) and determine whether and _how_ they can process the file.
294
296
  They are responsible for transforming the raw file content into a representation that flowR can work with during the analysis.
295
- For example, the ${ctx.link(flowr_analyzer_description_file_plugin_1.FlowrAnalyzerDescriptionFilePlugin.name)} adds support for R \`DESCRIPTION\` files by parsing their content into key-value pairs.
297
+ For example, the ${ctx.link(flowr_analyzer_description_file_plugin_1.FlowrAnalyzerDescriptionFilePlugin)} adds support for R \`DESCRIPTION\` files by parsing their content into key-value pairs.
296
298
  These can then be used by other plugins, e.g. the ${ctx.link(flowr_analyzer_package_versions_description_file_plugin_1.FlowrAnalyzerPackageVersionsDescriptionFilePlugin)} that extracts package version information from these files.
297
299
 
298
300
  If multiple file plugins could apply (${ctx.link('DefaultFlowrAnalyzerFilePlugin::' + flowr_analyzer_file_plugin_1.FlowrAnalyzerFilePlugin.defaultPlugin().applies.name)}) to the same file,
@@ -47,9 +47,7 @@ const log_1 = require("../../test/functionality/_helper/log");
47
47
  const log_2 = require("../util/log");
48
48
  async function makeAnalyzerExample() {
49
49
  const analyzer = await new flowr_analyzer_builder_1.FlowrAnalyzerBuilder()
50
- .amendConfig(c => {
51
- c.ignoreSourceCalls = true;
52
- })
50
+ .configure('ignoreSourceCalls', true)
53
51
  .setEngine('tree-sitter')
54
52
  .build();
55
53
  analyzer.addRequest('x <- 1; y <- x; print(y);');
@@ -891,7 +891,7 @@ ${await getVertexExplanations(treeSitter, ctx)}
891
891
 
892
892
  ${(0, doc_structure_1.section)('Edges', 2, 'edges')}
893
893
 
894
- 1. ${(0, doc_data_dfg_util_1.getAllEdges)().map(([k, v], index) => `[\`${k}\` (${v})](#${index + 1}-${k.toLowerCase().replace(/\s/g, '-')}-edge)`).join('\n1. ')}
894
+ 1. ${(0, doc_data_dfg_util_1.getAllEdges)().map(([k, v], index) => `[\`${k}\` (${v})](#${index + 1}-${k.toLowerCase().replaceAll(/\s/g, '-')}-edge)`).join('\n1. ')}
895
895
 
896
896
  ${await getEdgesExplanations(treeSitter, ctx)}
897
897
 
@@ -200,7 +200,7 @@ ${await (0, doc_repl_1.documentReplSession)(parser, [{
200
200
  For more information on the available queries, please check out the ${ctx.linkPage('wiki/Query API', 'Query API')}.
201
201
  `;
202
202
  }
203
- function explainConfigFile() {
203
+ function explainConfigFile(ctx) {
204
204
  return `
205
205
 
206
206
  When running _flowR_, you may want to specify some behaviors with a dedicated configuration file.
@@ -211,6 +211,8 @@ Within the REPL this works by running the following:
211
211
 
212
212
  ${(0, doc_code_1.codeBlock)('shell', ':query @config')}
213
213
 
214
+ To work with the ${ctx.link(config_1.FlowrConfig)} you can use the provided helper objects alongside its methods like
215
+ ${ctx.linkO(config_1.FlowrConfig, 'amend')}.
214
216
  The following summarizes the configuration options:
215
217
 
216
218
  - \`ignoreSourceCalls\`: If set to \`true\`, _flowR_ will ignore source calls when analyzing the code, i.e., ignoring the inclusion of other files.
@@ -298,7 +300,7 @@ ${(0, doc_code_1.codeBlock)('json', JSON.stringify({
298
300
 
299
301
  <summary style='color:gray'>Full Configuration-File Schema</summary>
300
302
 
301
- ${(0, schema_1.describeSchema)(config_1.flowrConfigFileSchema, ansi_1.markdownFormatter)}
303
+ ${(0, schema_1.describeSchema)(config_1.FlowrConfig.Schema, ansi_1.markdownFormatter)}
302
304
 
303
305
  </details>
304
306
 
@@ -331,7 +333,7 @@ ${await explainRepl(treeSitter, ctx)}
331
333
  <a id='configuring-flowr'></a>
332
334
  ## ⚙️ Configuring FlowR
333
335
 
334
- ${explainConfigFile()}
336
+ ${explainConfigFile(ctx)}
335
337
 
336
338
  <a id='writing-code'></a>
337
339
  ## ⚒️ Writing Code
@@ -46,12 +46,12 @@ function prettyPrintExpectedOutput(expected) {
46
46
  //
47
47
  lines = expected.trim().replace(/^\s*\[+\s*{*/m, '').replace(/\s*}*\s*]+\s*$/, '').split('\n').filter(l => l.trim() !== '');
48
48
  /* take the indentation of the last line and remove it from all but the first: */
49
- const indentation = lines[lines.length - 1].match(/^\s*/)?.[0] ?? '';
49
+ const indentation = lines.at(-1)?.match(/^\s*/)?.[0] ?? '';
50
50
  return lines.map((line, i) => {
51
51
  if (i === 0) {
52
52
  return line;
53
53
  }
54
- return line.replace(new RegExp('^' + indentation, 'g'), '');
54
+ return line.replaceAll(new RegExp('^' + indentation, 'g'), '');
55
55
  }).join('\n');
56
56
  }
57
57
  function buildSamplesFromLinterTestCases(_parser, testFile) {
@@ -236,7 +236,7 @@ We use tags to categorize linting rules for users. The following tags are availa
236
236
  | Tag/Badge&emsp;&emsp; | Description |
237
237
  | --- | :-- |
238
238
  ${Object.entries(linter_tags_1.LintingRuleTag).map(([name, tag]) => {
239
- return `| <a id="${tag}"></a> ${(makeTagBadge(tag, tagTypes.info))} | ${(0, doc_types_1.getDocumentationForType)('LintingRuleTag::' + name, tagTypes.info).replaceAll(/\n/g, ' ')} (rule${getAllLintingRulesWithTag(tag).length === 1 ? '' : 's'}: ${(0, strings_1.joinWithLast)(getAllLintingRulesWithTag(tag).map(l => linkToRule(l))) || '_none_'}) | `;
239
+ return `| <a id="${tag}"></a> ${(makeTagBadge(tag, tagTypes.info))} | ${(0, doc_types_1.getDocumentationForType)('LintingRuleTag::' + name, tagTypes.info).replaceAll('\n', ' ')} (rule${getAllLintingRulesWithTag(tag).length === 1 ? '' : 's'}: ${(0, strings_1.joinWithLast)(getAllLintingRulesWithTag(tag).map(l => linkToRule(l))) || '_none_'}) | `;
240
240
  }).join('\n')}
241
241
 
242
242
  ${(0, doc_structure_1.section)('Certainty', 2, 'certainty')}
@@ -248,14 +248,14 @@ ${(0, doc_structure_1.section)('Rule Certainty', 3, 'rule-certainty')}
248
248
  | Rule Certainty | Description |
249
249
  | -------------- | :---------- |
250
250
  ${Object.entries(linter_format_1.LintingRuleCertainty).map(([name, certainty]) => {
251
- return `| <a id="${certainty}"></a> \`${certainty}\` | ${(0, doc_types_1.getDocumentationForType)('LintingRuleCertainty::' + name, tagTypes.info).replaceAll(/\n/g, ' ')} (rule${getAllLintingRulesWitCertainty(certainty).length === 1 ? '' : 's'}: ${(0, strings_1.joinWithLast)(getAllLintingRulesWitCertainty(certainty).map(l => linkToRule(l))) || '_none_'}) |`;
251
+ return `| <a id="${certainty}"></a> \`${certainty}\` | ${(0, doc_types_1.getDocumentationForType)('LintingRuleCertainty::' + name, tagTypes.info).replaceAll('\n', ' ')} (rule${getAllLintingRulesWitCertainty(certainty).length === 1 ? '' : 's'}: ${(0, strings_1.joinWithLast)(getAllLintingRulesWitCertainty(certainty).map(l => linkToRule(l))) || '_none_'}) |`;
252
252
  }).join('\n')}
253
253
 
254
254
  ${(0, doc_structure_1.section)('Result Certainty', 3, 'result-certainty')}
255
255
 
256
256
  | Result Certainty | Description |
257
257
  | ---------------- | :---------- |
258
- ${Object.entries(linter_format_1.LintingResultCertainty).map(([name, certainty]) => `| <a id="${certainty}"></a> \`${certainty}\` | ${(0, doc_types_1.getDocumentationForType)('LintingResultCertainty::' + name, tagTypes.info).replaceAll(/\n/g, ' ')} |`).join('\n')}
258
+ ${Object.entries(linter_format_1.LintingResultCertainty).map(([name, certainty]) => `| <a id="${certainty}"></a> \`${certainty}\` | ${(0, doc_types_1.getDocumentationForType)('LintingResultCertainty::' + name, tagTypes.info).replaceAll('\n', ' ')} |`).join('\n')}
259
259
 
260
260
  `.trim();
261
261
  }
@@ -49,8 +49,7 @@ exports.ConstantWikiLinkInfo = {
49
49
  */
50
50
  function makeDocContextForTypes(shell, ...rootFolders) {
51
51
  if (rootFolders.length === 0) {
52
- rootFolders.push(path_1.default.resolve(__dirname, '../../../src'));
53
- rootFolders.push(path_1.default.resolve(__dirname, '../../../test/functionality'));
52
+ rootFolders.push(path_1.default.resolve(__dirname, '../../../src'), path_1.default.resolve(__dirname, '../../../test/functionality'));
54
53
  }
55
54
  const { info, program } = (0, doc_types_1.getTypesFromFolder)({ rootFolder: rootFolders, typeNameForMermaid: undefined });
56
55
  return {
@@ -108,10 +107,10 @@ function makeDocContextForTypes(shell, ...rootFolders) {
108
107
  text ??= i.name;
109
108
  }
110
109
  else {
111
- link = `${doc_files_1.FlowrWikiBaseRef}/${pageName.toLowerCase().replace(/ /g, '-')}`;
110
+ link = `${doc_files_1.FlowrGithubRef}/${pageName.toLowerCase().replaceAll(' ', '-')}`;
112
111
  }
113
112
  text ??= pageName.split('/').pop() ?? pageName;
114
- return `[${text}](${link}${segment ? `#${segment}` : ''})`;
113
+ return `[${text}](${link}${segment ? '#' + segment : ''})`;
115
114
  },
116
115
  linkCode(path, lineNumber) {
117
116
  const lnk = lineNumber ? `${path.toString()}#L${lineNumber}` : path.toString();
package/engines.d.ts CHANGED
@@ -1,9 +1,9 @@
1
- import { type FlowrConfigOptions, type KnownEngines } from './config';
1
+ import { FlowrConfig, type KnownEngines } from './config';
2
2
  /**
3
3
  * Retrieve all requested engine instance.
4
4
  * Please make sure that if this includes the R engine, that you properly shut it down again!
5
5
  */
6
- export declare function retrieveEngineInstances(config: FlowrConfigOptions, defaultOnly?: boolean): Promise<{
6
+ export declare function retrieveEngineInstances(config: FlowrConfig, defaultOnly?: boolean): Promise<{
7
7
  engines: KnownEngines;
8
8
  default: keyof KnownEngines;
9
9
  }>;
package/engines.js CHANGED
@@ -12,9 +12,9 @@ const log_1 = require("./util/log");
12
12
  */
13
13
  async function retrieveEngineInstances(config, defaultOnly = false) {
14
14
  const engines = {};
15
- if ((0, config_1.getEngineConfig)(config, 'r-shell') && (!defaultOnly || config.defaultEngine === 'r-shell')) {
15
+ if (config_1.FlowrConfig.getForEngine(config, 'r-shell') && (!defaultOnly || config.defaultEngine === 'r-shell')) {
16
16
  // we keep an active shell session to allow other parse investigations :)
17
- engines['r-shell'] = new shell_1.RShell((0, config_1.getEngineConfig)(config, 'r-shell'), {
17
+ engines['r-shell'] = new shell_1.RShell(config_1.FlowrConfig.getForEngine(config, 'r-shell'), {
18
18
  revive: 2 /* RShellReviveOptions.Always */,
19
19
  onRevive: (code, signal) => {
20
20
  const signalText = signal == null ? '' : ` and signal ${signal}`;
@@ -23,8 +23,8 @@ async function retrieveEngineInstances(config, defaultOnly = false) {
23
23
  }
24
24
  });
25
25
  }
26
- if ((0, config_1.getEngineConfig)(config, 'tree-sitter') && (!defaultOnly || config.defaultEngine === 'tree-sitter')) {
27
- await tree_sitter_executor_1.TreeSitterExecutor.initTreeSitter((0, config_1.getEngineConfig)(config, 'tree-sitter'));
26
+ if (config_1.FlowrConfig.getForEngine(config, 'tree-sitter') && (!defaultOnly || config.defaultEngine === 'tree-sitter')) {
27
+ await tree_sitter_executor_1.TreeSitterExecutor.initTreeSitter(config_1.FlowrConfig.getForEngine(config, 'tree-sitter'));
28
28
  engines['tree-sitter'] = new tree_sitter_executor_1.TreeSitterExecutor();
29
29
  }
30
30
  let defaultEngine = config.defaultEngine;
@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DATA_FRAME_ACCESS_VALIDATION = void 0;
4
4
  const shape_inference_1 = require("../../abstract-interpretation/data-frame/shape-inference");
5
5
  const satisfiable_domain_1 = require("../../abstract-interpretation/domains/satisfiable-domain");
6
- const config_1 = require("../../config");
7
6
  const cfg_kind_1 = require("../../project/cfg-kind");
8
7
  const type_1 = require("../../r-bridge/lang-4.x/ast/model/type");
9
8
  const flowr_search_builder_1 = require("../../search/flowr-search-builder");
@@ -13,13 +12,14 @@ const range_1 = require("../../util/range");
13
12
  const linter_format_1 = require("../linter-format");
14
13
  const linter_tags_1 = require("../linter-tags");
15
14
  const identifier_1 = require("../../dataflow/environments/identifier");
15
+ const config_1 = require("../../config");
16
16
  exports.DATA_FRAME_ACCESS_VALIDATION = {
17
17
  createSearch: () => flowr_search_builder_1.Q.all().with(search_enrichers_1.Enrichment.CallTargets, { onlyBuiltin: true }),
18
18
  processSearchResult: async (elements, config, data) => {
19
19
  let ctx = data.analyzer.inspectContext();
20
20
  ctx = {
21
21
  ...ctx,
22
- config: (0, config_1.amendConfig)(data.analyzer.flowrConfig, flowrConfig => {
22
+ config: config_1.FlowrConfig.amend(data.analyzer.flowrConfig, flowrConfig => {
23
23
  if (config.readLoadedData !== undefined) {
24
24
  flowrConfig.abstractInterpretation.dataFrame.readLoadedData.readExternalFiles = config.readLoadedData;
25
25
  }
@@ -61,7 +61,7 @@ exports.DATA_FRAME_ACCESS_VALIDATION = {
61
61
  .map(({ nodeId, operand, ...accessed }) => ({
62
62
  ...accessed,
63
63
  node: data.normalize.idMap.get(nodeId),
64
- operand: operand !== undefined ? data.normalize.idMap.get(operand) : undefined,
64
+ operand: operand === undefined ? undefined : data.normalize.idMap.get(operand),
65
65
  }))
66
66
  .map(({ node, operand, ...accessed }) => ({
67
67
  ...accessed,
@@ -76,10 +76,10 @@ exports.DATA_FRAME_ACCESS_VALIDATION = {
76
76
  prettyPrint: {
77
77
  [linter_format_1.LintingPrettyPrintContext.Query]: result => `Access of ${result.type} ` +
78
78
  (typeof result.accessed === 'string' ? `"${result.accessed}"` : result.accessed) + ' ' +
79
- (result.operand !== undefined ? `of \`${result.operand}\`` : `at \`${result.access}\``) + ` at ${range_1.SourceRange.format(result.range)}`,
79
+ (result.operand === undefined ? `at \`${result.access}\`` : `of \`${result.operand}\``) + ` at ${range_1.SourceRange.format(result.range)}`,
80
80
  [linter_format_1.LintingPrettyPrintContext.Full]: result => `Accessed ${result.type} ` +
81
81
  (typeof result.accessed === 'string' ? `"${result.accessed}"` : result.accessed) + ' does not exist ' +
82
- (result.operand !== undefined ? `in \`${result.operand}\`` : `at \`${result.access}\``) + ` at ${range_1.SourceRange.format(result.range)}`
82
+ (result.operand === undefined ? `at \`${result.access}\`` : `in \`${result.operand}\``) + ` at ${range_1.SourceRange.format(result.range)}`
83
83
  },
84
84
  info: {
85
85
  name: 'Dataframe Access Validation',
@@ -53,7 +53,7 @@ export declare function getMostUsedCasing(symbols: {
53
53
  /**
54
54
  * Attempts to fix the casing of the given identifier to match the provided convention.
55
55
  */
56
- export declare function fixCasing(identifier: string, convention: CasingConvention): string | undefined;
56
+ export declare function fixCasing(identifier: string, convention: CasingConvention, ignorePrefix?: string): string | undefined;
57
57
  /**
58
58
  * Creates quick fixes for renaming all references to the given node to match the provided replacement.
59
59
  */
@@ -91,11 +91,15 @@ function getMostUsedCasing(symbols) {
91
91
  /**
92
92
  * Attempts to fix the casing of the given identifier to match the provided convention.
93
93
  */
94
- function fixCasing(identifier, convention) {
94
+ function fixCasing(identifier, convention, ignorePrefix) {
95
95
  if (!containsAlpha(identifier)) {
96
96
  return undefined;
97
97
  }
98
- const tokens = identifier.split(/(?=[A-Z])|_/).map(s => s.toLowerCase());
98
+ if (ignorePrefix) {
99
+ identifier = identifier.replace(new RegExp(`^(${ignorePrefix})`), '');
100
+ }
101
+ const splitOn = identifier.includes('_') ? /_/ : /(?=[A-Z])/;
102
+ const tokens = identifier.split(splitOn).map(s => s.toLowerCase());
99
103
  const firstUp = (s) => {
100
104
  if (s.length < 1) {
101
105
  return s.toUpperCase();
@@ -167,7 +171,7 @@ exports.NAMING_CONVENTION = {
167
171
  const results = symbols
168
172
  .filter(m => (m.detectedCasing !== casing) && (!config.ignoreNonAlpha || containsAlpha(m.name)))
169
173
  .map(({ id, ...m }) => {
170
- const fix = fixCasing(m.name, casing);
174
+ const fix = fixCasing(m.name, casing, config.ignorePrefix);
171
175
  return {
172
176
  ...m,
173
177
  involvedId: id,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@eagleoutice/flowr",
3
- "version": "2.9.12",
3
+ "version": "2.9.13",
4
4
  "description": "Static Dataflow Analyzer and Program Slicer for the R Programming Language",
5
5
  "types": "dist/src/index.d.ts",
6
6
  "repository": {
@@ -178,6 +178,7 @@
178
178
  "@types/n-readlines": "^1.0.6",
179
179
  "@types/n3": "^1.26.0",
180
180
  "@types/object-hash": "^3.0.6",
181
+ "@types/object-path": "^0.11.4",
181
182
  "@types/seedrandom": "^3.0.8",
182
183
  "@types/semver": "^7.7.0",
183
184
  "@types/tmp": "^0.2.6",
@@ -212,6 +213,7 @@
212
213
  "n-readlines": "^1.0.3",
213
214
  "n3": "^1.26.0",
214
215
  "object-hash": "^3.0.0",
216
+ "object-path": "^0.11.8",
215
217
  "object-sizeof": "^2.6.5",
216
218
  "rotating-file-stream": "^3.2.8",
217
219
  "seedrandom": "^3.0.5",
@@ -2,7 +2,7 @@ import { FlowrAnalyzerFilesContext, type RAnalysisRequest, type ReadOnlyFlowrAna
2
2
  import { FlowrAnalyzerDependenciesContext, type ReadOnlyFlowrAnalyzerDependenciesContext } from './flowr-analyzer-dependencies-context';
3
3
  import { type FlowrAnalyzerPlugin, PluginType } from '../plugins/flowr-analyzer-plugin';
4
4
  import type { fileProtocol, RParseRequestFromFile, RParseRequests } from '../../r-bridge/retriever';
5
- import type { FlowrConfigOptions } from '../../config';
5
+ import { FlowrConfig } from '../../config';
6
6
  import type { FlowrFileProvider } from './flowr-file';
7
7
  import type { ReadOnlyFlowrAnalyzerEnvironmentContext } from './flowr-analyzer-environment-context';
8
8
  import { FlowrAnalyzerEnvironmentContext } from './flowr-analyzer-environment-context';
@@ -34,7 +34,7 @@ export interface ReadOnlyFlowrAnalyzerContext {
34
34
  /**
35
35
  * The configuration options used by the analyzer.
36
36
  */
37
- readonly config: FlowrConfigOptions;
37
+ readonly config: FlowrConfig;
38
38
  /**
39
39
  * Run all resolution steps that can be done before the main analysis run.
40
40
  */
@@ -58,8 +58,8 @@ export declare class FlowrAnalyzerContext implements ReadOnlyFlowrAnalyzerContex
58
58
  readonly deps: FlowrAnalyzerDependenciesContext;
59
59
  readonly env: FlowrAnalyzerEnvironmentContext;
60
60
  private _analyzer;
61
- readonly config: FlowrConfigOptions;
62
- constructor(config: FlowrConfigOptions, plugins: ReadonlyMap<PluginType, readonly FlowrAnalyzerPlugin[]>);
61
+ readonly config: FlowrConfig;
62
+ constructor(config: FlowrConfig, plugins: ReadonlyMap<PluginType, readonly FlowrAnalyzerPlugin[]>);
63
63
  /**
64
64
  * Provides the analyzer associated with this context, if any.
65
65
  * This is usually set when the context is used within an analyzer instance.
@@ -91,7 +91,7 @@ export declare class FlowrAnalyzerContext implements ReadOnlyFlowrAnalyzerContex
91
91
  * @see {@link requestFromInput} - for details on how inputs are processed into requests.
92
92
  * @see {@link contextFromSources} - to create a context from source code strings directly.
93
93
  */
94
- export declare function contextFromInput(input: `${typeof fileProtocol}${string}` | string | readonly string[] | RParseRequests, config?: FlowrConfigOptions, plugins?: FlowrAnalyzerPlugin[]): FlowrAnalyzerContext;
94
+ export declare function contextFromInput(input: `${typeof fileProtocol}${string}` | string | readonly string[] | RParseRequests, config?: FlowrConfig, plugins?: FlowrAnalyzerPlugin[]): FlowrAnalyzerContext;
95
95
  /**
96
96
  * Create a {@link FlowrAnalyzerContext} from a set of source code strings.
97
97
  * @param sources - A record mapping file paths to their source code content.
@@ -100,4 +100,4 @@ export declare function contextFromInput(input: `${typeof fileProtocol}${string}
100
100
  * @see {@link contextFromInput} - to create a context from input requests.
101
101
  * @see {@link FlowrInlineTextFile} - to create inline text files for the sources.
102
102
  */
103
- export declare function contextFromSources(sources: Record<string, string>, config?: FlowrConfigOptions, plugins?: FlowrAnalyzerPlugin[]): FlowrAnalyzerContext;
103
+ export declare function contextFromSources(sources: Record<string, string>, config?: FlowrConfig, plugins?: FlowrAnalyzerPlugin[]): FlowrAnalyzerContext;
@@ -93,7 +93,7 @@ exports.FlowrAnalyzerContext = FlowrAnalyzerContext;
93
93
  * @see {@link requestFromInput} - for details on how inputs are processed into requests.
94
94
  * @see {@link contextFromSources} - to create a context from source code strings directly.
95
95
  */
96
- function contextFromInput(input, config = config_1.defaultConfigOptions, plugins) {
96
+ function contextFromInput(input, config = config_1.FlowrConfig.default(), plugins) {
97
97
  const context = new FlowrAnalyzerContext(config, (0, arrays_1.arraysGroupBy)(plugins ?? [], (p) => p.type));
98
98
  if (typeof input === 'string' || Array.isArray(input) && input.every(i => typeof i === 'string')) {
99
99
  const requests = (0, retriever_1.requestFromInput)(input);
@@ -113,7 +113,7 @@ function contextFromInput(input, config = config_1.defaultConfigOptions, plugins
113
113
  * @see {@link contextFromInput} - to create a context from input requests.
114
114
  * @see {@link FlowrInlineTextFile} - to create inline text files for the sources.
115
115
  */
116
- function contextFromSources(sources, config = config_1.defaultConfigOptions, plugins) {
116
+ function contextFromSources(sources, config = config_1.FlowrConfig.default(), plugins) {
117
117
  const context = new FlowrAnalyzerContext(config, (0, arrays_1.arraysGroupBy)(plugins ?? [], (p) => p.type));
118
118
  for (const [p, c] of Object.entries(sources)) {
119
119
  context.addFile(new flowr_file_1.FlowrInlineTextFile(p, c));