@eagleoutice/flowr 2.9.11 → 2.9.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/README.md +31 -31
  2. package/benchmark/slicer.d.ts +4 -2
  3. package/benchmark/slicer.js +20 -6
  4. package/benchmark/stats/print.js +12 -0
  5. package/benchmark/stats/stats.d.ts +3 -2
  6. package/benchmark/stats/stats.js +1 -1
  7. package/benchmark/summarizer/data.d.ts +1 -0
  8. package/benchmark/summarizer/second-phase/process.js +5 -0
  9. package/cli/benchmark-app.d.ts +1 -0
  10. package/cli/benchmark-app.js +1 -0
  11. package/cli/benchmark-helper-app.d.ts +2 -1
  12. package/cli/benchmark-helper-app.js +6 -3
  13. package/cli/common/options.d.ts +8 -0
  14. package/cli/common/options.js +3 -1
  15. package/cli/common/scripts-info.d.ts +8 -0
  16. package/cli/export-quads-app.js +1 -1
  17. package/cli/flowr.js +3 -3
  18. package/cli/repl/core.d.ts +3 -3
  19. package/cli/repl/server/connection.d.ts +2 -2
  20. package/cli/repl/server/server.d.ts +2 -2
  21. package/cli/script-core/statistics-core.d.ts +2 -2
  22. package/cli/script-core/statistics-helper-core.d.ts +2 -2
  23. package/cli/script-core/statistics-helper-core.js +1 -1
  24. package/cli/slicer-app.js +2 -2
  25. package/cli/statistics-app.js +1 -1
  26. package/cli/statistics-helper-app.js +1 -1
  27. package/cli/wiki.js +2 -2
  28. package/config.d.ts +65 -24
  29. package/config.js +197 -161
  30. package/control-flow/extract-cfg.js +5 -8
  31. package/core/steps/pipeline-step.d.ts +2 -2
  32. package/dataflow/cluster.js +12 -8
  33. package/dataflow/eval/resolve/alias-tracking.js +12 -15
  34. package/dataflow/graph/graph.js +8 -8
  35. package/dataflow/graph/quads.js +4 -7
  36. package/dataflow/internal/linker.js +5 -5
  37. package/dataflow/internal/process/functions/call/built-in/built-in-eval.js +2 -2
  38. package/dataflow/internal/process/functions/call/built-in/built-in-source.d.ts +1 -1
  39. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +20 -9
  40. package/documentation/doc-readme.js +2 -2
  41. package/documentation/wiki-analyzer.js +7 -5
  42. package/documentation/wiki-core.js +1 -3
  43. package/documentation/wiki-dataflow-graph.js +87 -32
  44. package/documentation/wiki-engine.js +18 -0
  45. package/documentation/wiki-interface.js +5 -3
  46. package/documentation/wiki-linter.js +5 -5
  47. package/documentation/wiki-mk/doc-context.d.ts +44 -11
  48. package/documentation/wiki-mk/doc-context.js +19 -17
  49. package/engines.d.ts +2 -2
  50. package/engines.js +4 -4
  51. package/linter/rules/dataframe-access-validation.js +5 -5
  52. package/linter/rules/naming-convention.d.ts +1 -1
  53. package/linter/rules/naming-convention.js +7 -3
  54. package/package.json +3 -1
  55. package/project/context/flowr-analyzer-context.d.ts +6 -6
  56. package/project/context/flowr-analyzer-context.js +2 -2
  57. package/project/context/flowr-analyzer-files-context.d.ts +2 -2
  58. package/project/context/flowr-analyzer-files-context.js +28 -8
  59. package/project/flowr-analyzer-builder.d.ts +10 -6
  60. package/project/flowr-analyzer-builder.js +12 -3
  61. package/project/flowr-analyzer.d.ts +3 -3
  62. package/queries/catalog/config-query/config-query-format.d.ts +5 -5
  63. package/queries/catalog/dependencies-query/function-info/library-functions.js +2 -1
  64. package/queries/catalog/dependencies-query/function-info/read-functions.js +1 -1
  65. package/queries/catalog/dependencies-query/function-info/visualize-functions.js +9 -1
  66. package/queries/catalog/dependencies-query/function-info/write-functions.js +1 -0
  67. package/queries/catalog/df-shape-query/df-shape-query-format.d.ts +2 -2
  68. package/queries/catalog/does-call-query/does-call-query-format.d.ts +2 -2
  69. package/queries/catalog/files-query/files-query-format.d.ts +3 -3
  70. package/queries/catalog/inspect-exceptions-query/inspect-exception-query-format.d.ts +2 -2
  71. package/queries/catalog/inspect-higher-order-query/inspect-higher-order-query-format.d.ts +2 -2
  72. package/queries/catalog/inspect-recursion-query/inspect-recursion-query-format.d.ts +2 -2
  73. package/queries/catalog/linter-query/linter-query-format.d.ts +3 -3
  74. package/queries/catalog/location-map-query/location-map-query-format.d.ts +2 -2
  75. package/queries/catalog/origin-query/origin-query-format.d.ts +2 -2
  76. package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +3 -3
  77. package/queries/catalog/resolve-value-query/resolve-value-query-format.d.ts +2 -2
  78. package/queries/catalog/resolve-value-query/resolve-value-query-format.js +4 -0
  79. package/queries/catalog/static-slice-query/static-slice-query-format.d.ts +2 -2
  80. package/queries/query.d.ts +18 -18
  81. package/r-bridge/lang-4.x/ast/model/model.d.ts +7 -2
  82. package/r-bridge/lang-4.x/ast/model/model.js +13 -0
  83. package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +2 -2
  84. package/r-bridge/lang-4.x/ast/parser/json/parser.js +2 -2
  85. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +6 -2
  86. package/statistics/statistics.d.ts +2 -2
  87. package/util/mermaid/dfg.d.ts +8 -0
  88. package/util/mermaid/dfg.js +4 -0
  89. package/util/objects.d.ts +12 -0
  90. package/util/objects.js +28 -0
  91. package/util/quads.js +14 -6
  92. package/util/range.d.ts +3 -0
  93. package/util/range.js +3 -0
  94. package/util/summarizer.js +1 -1
  95. package/util/version.js +1 -1
@@ -292,14 +292,11 @@ function cfgFor(forLoop, variable, vector, body) {
292
292
  for (const breakPoint of body.breaks) {
293
293
  graph.addEdge(control_flow_graph_1.CfgVertex.toExitId(forLoopId), breakPoint, control_flow_graph_1.CfgEdge.makeFd());
294
294
  }
295
- const isNotEndless = body.exitPoints.length > 0 || body.breaks.length > 0;
296
- if (isNotEndless) {
297
- graph.addVertex(control_flow_graph_1.CfgVertex.makeExitMarker(forLoopId));
298
- for (const e of variable.exitPoints) {
299
- graph.addEdge(control_flow_graph_1.CfgVertex.toExitId(forLoopId), e, control_flow_graph_1.CfgEdge.makeCdFalse(forLoopId));
300
- }
295
+ graph.addVertex(control_flow_graph_1.CfgVertex.makeExitMarker(forLoopId));
296
+ for (const e of variable.exitPoints) {
297
+ graph.addEdge(control_flow_graph_1.CfgVertex.toExitId(forLoopId), e, control_flow_graph_1.CfgEdge.makeCdFalse(forLoopId));
301
298
  }
302
- return { graph, breaks: [], nexts: [], returns: body.returns, exitPoints: isNotEndless ? [control_flow_graph_1.CfgVertex.toExitId(forLoopId)] : [], entryPoints: [forLoopId] };
299
+ return { graph, breaks: [], nexts: [], returns: body.returns, exitPoints: [control_flow_graph_1.CfgVertex.toExitId(forLoopId)], entryPoints: [forLoopId] };
303
300
  }
304
301
  function cfgFunctionDefinition(fn, params, body) {
305
302
  const fnId = fn.info.id;
@@ -336,7 +333,7 @@ function cfgFunctionDefinition(fn, params, body) {
336
333
  return { graph: graph, breaks: [], nexts: [], returns: [], exitPoints: [fnId], entryPoints: [fnId] };
337
334
  }
338
335
  function cfgFunctionCall(call, name, args, down) {
339
- if (call.named && call.functionName.content === 'ifelse') {
336
+ if (call.named && call.functionName.content === 'ifelse' && args.length > 1) {
340
337
  // special built-in handling for ifelse as it is an expression that does not short-circuit
341
338
  return cfgIfThenElse(call, args[0] === r_function_call_1.EmptyArgument ? (0, control_flow_graph_1.emptyControlFlowInformation)() : args[0], args[1] === r_function_call_1.EmptyArgument ? (0, control_flow_graph_1.emptyControlFlowInformation)() : args[1], args[2] === r_function_call_1.EmptyArgument ? (0, control_flow_graph_1.emptyControlFlowInformation)() : args[2]);
342
339
  }
@@ -4,7 +4,7 @@
4
4
  */
5
5
  import type { MergeableRecord } from '../../util/objects';
6
6
  import type { InternalStepPrinter, IPipelineStepPrinter, StepOutputFormat } from '../print/print';
7
- import type { FlowrConfigOptions } from '../../config';
7
+ import type { FlowrConfig } from '../../config';
8
8
  /**
9
9
  * This represents the format of a step processor which retrieves two things:
10
10
  *
@@ -16,7 +16,7 @@ import type { FlowrConfigOptions } from '../../config';
16
16
  * list all steps that you require as your {@link IPipelineStepOrder#dependencies|dependencies}, even if they would be
17
17
  * already covered transitively.
18
18
  */
19
- export type StepProcessingFunction = (results: Record<string, unknown>, input: Record<string, unknown>, config: FlowrConfigOptions) => unknown;
19
+ export type StepProcessingFunction = (results: Record<string, unknown>, input: Record<string, unknown>, config: FlowrConfig) => unknown;
20
20
  /**
21
21
  * This represents the required execution frequency of a step.
22
22
  */
@@ -30,18 +30,22 @@ function makeCluster(graph, from, notReached) {
30
30
  if (info.tag === vertex_1.VertexType.FunctionDefinition) {
31
31
  for (const { nodeId } of info.exitPoints) {
32
32
  if (notReached.delete(nodeId)) {
33
- makeCluster(graph, nodeId, notReached).forEach(n => nodes.add(n));
33
+ for (const m of makeCluster(graph, nodeId, notReached)) {
34
+ nodes.add(m);
35
+ }
34
36
  }
35
37
  }
36
38
  }
37
39
  // cluster adjacent edges
38
- for (const [dest, e] of [...graph.outgoingEdges(from) ?? [], ...graph.ingoingEdges(from) ?? []]) {
39
- // don't cluster for function content if it isn't returned
40
- if (edge_1.DfEdge.doesNotIncludeType(e, edge_1.EdgeType.Returns) && info.onlyBuiltin && info.name === '{') {
41
- continue;
42
- }
43
- if (notReached.delete(dest)) {
44
- makeCluster(graph, dest, notReached).forEach(n => nodes.add(n));
40
+ for (const edges of [graph.outgoingEdges(from), graph.ingoingEdges(from)]) {
41
+ for (const [dest, e] of edges ?? []) {
42
+ // don't cluster for function content if it isn't returned
43
+ if (edge_1.DfEdge.doesNotIncludeType(e, edge_1.EdgeType.Returns) && info.onlyBuiltin && info.name === '{') {
44
+ continue;
45
+ }
46
+ if (notReached.delete(dest)) {
47
+ makeCluster(graph, dest, notReached).forEach(n => nodes.add(n));
48
+ }
45
49
  }
46
50
  }
47
51
  return nodes;
@@ -34,7 +34,7 @@ function getFunctionCallAlias(sourceId, dataflow, environment) {
34
34
  return undefined;
35
35
  }
36
36
  const defs = (0, resolve_by_name_1.resolveByName)(identifier, environment, identifier_1.ReferenceType.Function);
37
- if (defs === undefined || defs.length !== 1) {
37
+ if (defs?.length !== 1) {
38
38
  return undefined;
39
39
  }
40
40
  return [sourceId];
@@ -206,7 +206,7 @@ function trackAliasInEnvironments(identifier, environment, { blocked, idMap, res
206
206
  }
207
207
  }
208
208
  }
209
- if (values.size == 0) {
209
+ if (values.size === 0) {
210
210
  return r_value_1.Top;
211
211
  }
212
212
  return (0, set_constants_1.setFrom)(...values);
@@ -246,18 +246,7 @@ function trackAliasInEnvironments(identifier, environment, { blocked, idMap, res
246
246
  }
247
247
  });
248
248
  function isNestedInLoop(node, ast) {
249
- const parent = node?.info.parent;
250
- if (node === undefined || !parent) {
251
- return false;
252
- }
253
- const parentNode = ast.get(parent);
254
- if (parentNode === undefined) {
255
- return false;
256
- }
257
- if (parentNode.type === type_1.RType.WhileLoop || parentNode.type === type_1.RType.RepeatLoop || parentNode.type === type_1.RType.ForLoop) {
258
- return true;
259
- }
260
- return isNestedInLoop(parentNode, ast);
249
+ return model_1.RNode.iterateParents(node, ast).some(model_1.RLoopConstructs.is);
261
250
  }
262
251
  /**
263
252
  * Please use {@link resolveIdToValue}
@@ -311,12 +300,14 @@ function trackAliasesInGraph(id, graph, ctx, idMap) {
311
300
  }
312
301
  const isFn = t === vertex_1.VertexType.FunctionCall;
313
302
  const outgoingEdges = graph.outgoingEdges(id) ?? [];
303
+ let foundRetuns = false;
314
304
  // travel all read and defined-by edges
315
305
  for (const [targetId, { types }] of outgoingEdges) {
316
306
  if (isFn) {
317
307
  if (types === edge_1.EdgeType.Returns || types === edge_1.EdgeType.DefinedByOnCall || types === edge_1.EdgeType.DefinedBy) {
318
308
  queue.add(targetId, baseEnvironment, cleanFingerprint, false);
319
309
  }
310
+ foundRetuns ||= edge_1.DfEdge.includesType({ types }, edge_1.EdgeType.Returns);
320
311
  continue;
321
312
  }
322
313
  // currently, they have to be exact!
@@ -324,6 +315,9 @@ function trackAliasesInGraph(id, graph, ctx, idMap) {
324
315
  queue.add(targetId, baseEnvironment, cleanFingerprint, false);
325
316
  }
326
317
  }
318
+ if (isFn && !foundRetuns) {
319
+ return r_value_1.Top;
320
+ }
327
321
  }
328
322
  if (forceTop || resultIds.length === 0) {
329
323
  return r_value_1.Top;
@@ -332,10 +326,13 @@ function trackAliasesInGraph(id, graph, ctx, idMap) {
332
326
  for (const id of resultIds) {
333
327
  const node = idMap.get(id);
334
328
  if (node !== undefined) {
329
+ if (node.info.role === "param-v" /* RoleInParent.ParameterDefaultValue */ || model_1.RNode.iterateParents(node, idMap).some(p => p.info.role === "param-v" /* RoleInParent.ParameterDefaultValue */)) {
330
+ return r_value_1.Top;
331
+ }
335
332
  values.add((0, general_1.valueFromRNodeConstant)(node));
336
333
  }
337
334
  }
338
- return (0, set_constants_1.setFrom)(...values);
335
+ return values.size === 0 ? r_value_1.Top : (0, set_constants_1.setFrom)(...values);
339
336
  }
340
337
  /**
341
338
  * Please use {@link resolveIdToValue}
@@ -402,17 +402,17 @@ class DataflowGraph {
402
402
  to = node_id_1.NodeId.normalize(to);
403
403
  const vertex = this.getVertex(from);
404
404
  (0, assert_1.guard)(vertex !== undefined, () => `node must be defined for ${from} to add control dependency`);
405
- vertex.cds ??= [];
406
- let hasControlDependency = false;
407
- for (const { id, when: cond } of vertex.cds) {
408
- if (id === to && when !== cond) {
409
- hasControlDependency = true;
410
- break;
405
+ if (vertex.cds) {
406
+ for (const { id, when: cond } of vertex.cds) {
407
+ if (id === to && when !== cond) {
408
+ return this;
409
+ }
411
410
  }
412
411
  }
413
- if (!hasControlDependency) {
414
- vertex.cds.push({ id: to, when });
412
+ else {
413
+ vertex.cds = [];
415
414
  }
415
+ vertex.cds.push({ id: to, when });
416
416
  return this;
417
417
  }
418
418
  /** Marks the given node as having unknown side effects */
@@ -10,13 +10,10 @@ const edge_1 = require("./edge");
10
10
  */
11
11
  function df2quads(graph, config) {
12
12
  return (0, quads_1.graph2quads)({
13
- rootIds: [...graph.rootIds()],
14
- vertices: graph.vertices(true)
15
- .map(([id, v]) => ({
16
- ...v,
17
- id
18
- })).toArray(),
19
- edges: graph.edges().flatMap(([fromId, targets]) => [...targets].map(([toId, info]) => ({
13
+ rootIds: Array.from(graph.rootIds()),
14
+ vertices: Array.from(graph.vertices(true)
15
+ .map(([, v]) => v)),
16
+ edges: graph.edges().flatMap(([fromId, targets]) => Array.from(targets).map(([toId, info]) => ({
20
17
  from: fromId,
21
18
  to: toId,
22
19
  type: Array.from(edge_1.DfEdge.typesToNames(info)),
@@ -395,18 +395,18 @@ function getAllLinkedFunctionDefinitions(functionDefinitionReadIds, dataflowGrap
395
395
  builtIns.add(cid);
396
396
  continue;
397
397
  }
398
- const currentInfo = dataflowGraph.get(cid, true);
399
- if (currentInfo === undefined) {
398
+ const vertex = dataflowGraph.getVertex(cid);
399
+ if (vertex === undefined) {
400
400
  continue;
401
401
  }
402
- const [vertex, edges] = currentInfo;
403
402
  // Found a function definition
404
403
  if (vertex.subflow !== undefined) {
405
404
  result.add(vertex);
406
405
  continue;
407
406
  }
408
407
  let hasReturnEdge = false;
409
- for (const [target, e] of edges) {
408
+ const outgoing = dataflowGraph.outgoingEdges(cid) ?? [];
409
+ for (const [target, e] of outgoing) {
410
410
  if (edge_1.DfEdge.includesType(e, edge_1.EdgeType.Returns)) {
411
411
  hasReturnEdge = true;
412
412
  if (!visited.has(target)) {
@@ -417,7 +417,7 @@ function getAllLinkedFunctionDefinitions(functionDefinitionReadIds, dataflowGrap
417
417
  if (vertex.tag === vertex_1.VertexType.FunctionCall || hasReturnEdge || (vertex.tag === vertex_1.VertexType.VariableDefinition && vertex.par)) {
418
418
  continue;
419
419
  }
420
- for (const [target, e] of edges) {
420
+ for (const [target, e] of outgoing) {
421
421
  if (edge_1.DfEdge.includesType(e, LinkedFnFollowBits) && !visited.has(target)) {
422
422
  potential.push(target);
423
423
  }
@@ -45,13 +45,13 @@ function processEvalCall(name, args, rootId, data, config) {
45
45
  const idGenerator = (0, decorate_1.sourcedDeterministicCountingIdGenerator)(name.lexeme + '::' + rootId, name.location);
46
46
  data = {
47
47
  ...data,
48
- cds: [...(data.cds ?? []), { id: rootId, when: true }]
48
+ cds: code.length > 1 ? [...(data.cds ?? []), { id: rootId, when: true }] : data.cds
49
49
  };
50
50
  const originalInfo = { ...information };
51
51
  const result = [];
52
52
  for (const c of code) {
53
53
  const codeRequest = (0, retriever_1.requestFromInput)(c);
54
- const r = (0, built_in_source_1.sourceRequest)(rootId, codeRequest, data, originalInfo, idGenerator);
54
+ const r = (0, built_in_source_1.sourceRequest)(rootId, codeRequest, data, originalInfo, code.length > 1, idGenerator);
55
55
  result.push(r);
56
56
  // add a returns edge from the eval to the result
57
57
  for (const e of r.exitPoints) {
@@ -45,7 +45,7 @@ export declare function processSourceCall<OtherInfo>(name: RSymbol<OtherInfo & P
45
45
  * Processes a source request with the given dataflow processor information and existing dataflow information
46
46
  * Otherwise, this can be an {@link RProjectFile} representing a standalone source file
47
47
  */
48
- export declare function sourceRequest<OtherInfo>(rootId: NodeId, request: RParseRequest | RProjectFile<OtherInfo & ParentInformation>, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, information: DataflowInformation, getId?: IdGenerator<NoInfo>): DataflowInformation;
48
+ export declare function sourceRequest<OtherInfo>(rootId: NodeId, request: RParseRequest | RProjectFile<OtherInfo & ParentInformation>, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, information: DataflowInformation, makeMaybe: boolean, getId?: IdGenerator<NoInfo>): DataflowInformation;
49
49
  /**
50
50
  * Processes a standalone source file (i.e., not from a source function call)
51
51
  */
@@ -30,6 +30,7 @@ const r_value_1 = require("../../../../../eval/values/r-value");
30
30
  const unknown_side_effect_1 = require("../../../../../graph/unknown-side-effect");
31
31
  const alias_tracking_1 = require("../../../../../eval/resolve/alias-tracking");
32
32
  const built_in_1 = require("../../../../../environments/built-in");
33
+ const edge_1 = require("../../../../../graph/edge");
33
34
  /**
34
35
  * Infers working directories based on the given option and reference chain
35
36
  */
@@ -117,8 +118,8 @@ function findSource(resolveSource, seed, data) {
117
118
  const effectivePath = explore ? path_1.default.join(explore, tryPath) : tryPath;
118
119
  const context = data.ctx.files;
119
120
  const get = context.exists(effectivePath, capitalization) ?? context.exists(returnPlatformPath(effectivePath), capitalization);
120
- if (get && !found.includes(effectivePath)) {
121
- found.push(returnPlatformPath(effectivePath));
121
+ if (get && !found.includes(returnPlatformPath(get))) {
122
+ found.push(returnPlatformPath(get));
122
123
  }
123
124
  }
124
125
  }
@@ -176,7 +177,7 @@ function processSourceCall(name, args, rootId, data, config) {
176
177
  result = sourceRequest(rootId, {
177
178
  request: 'file',
178
179
  content: f
179
- }, data, result, (0, decorate_1.sourcedDeterministicCountingIdGenerator)((findCount > 0 ? findCount + '::' : '') + f, name.location));
180
+ }, data, result, true, (0, decorate_1.sourcedDeterministicCountingIdGenerator)((findCount > 0 ? findCount + '::' : '') + f, name.location));
180
181
  }
181
182
  return result;
182
183
  }
@@ -189,7 +190,7 @@ function processSourceCall(name, args, rootId, data, config) {
189
190
  * Processes a source request with the given dataflow processor information and existing dataflow information
190
191
  * Otherwise, this can be an {@link RProjectFile} representing a standalone source file
191
192
  */
192
- function sourceRequest(rootId, request, data, information, getId) {
193
+ function sourceRequest(rootId, request, data, information, makeMaybe, getId) {
193
194
  // parse, normalize and dataflow the sourced file
194
195
  let dataflow;
195
196
  let fst;
@@ -239,11 +240,21 @@ function sourceRequest(rootId, request, data, information, getId) {
239
240
  }
240
241
  // take the entry point as well as all the written references, and give them a control dependency to the source call to show that they are conditional
241
242
  if (!String(rootId).startsWith('file-')) {
242
- if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
243
- dataflow.graph.addControlDependency(dataflow.entryPoint, rootId, true);
243
+ if (makeMaybe) {
244
+ if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
245
+ dataflow.graph.addControlDependency(dataflow.entryPoint, rootId, true);
246
+ }
247
+ for (const out of dataflow.out) {
248
+ dataflow.graph.addControlDependency(out.nodeId, rootId, true);
249
+ }
244
250
  }
245
- for (const out of dataflow.out) {
246
- dataflow.graph.addControlDependency(out.nodeId, rootId, true);
251
+ else {
252
+ if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
253
+ dataflow.graph.addEdge(dataflow.entryPoint, rootId, edge_1.EdgeType.Reads);
254
+ }
255
+ for (const out of dataflow.out) {
256
+ dataflow.graph.addEdge(out.nodeId, rootId, edge_1.EdgeType.Reads);
257
+ }
247
258
  }
248
259
  }
249
260
  data.ctx.files.addConsideredFile(filePath ?? '<inline>');
@@ -272,6 +283,6 @@ function standaloneSourceFile(idx, file, data, information) {
272
283
  ...data,
273
284
  environment: information.environment,
274
285
  referenceChain: [...data.referenceChain, file.filePath]
275
- }, information);
286
+ }, information, false);
276
287
  }
277
288
  //# sourceMappingURL=built-in-source.js.map
@@ -21,7 +21,7 @@ const PublicationsMain = [
21
21
  header: 'Statically Analyzing the Dataflow of R Programs (OOPSLA \'25)',
22
22
  description: 'Please cite this paper if you are using flowR in your research.',
23
23
  doi: 'https://doi.org/10.1145/3763087',
24
- bibtex: `@article{10.1145/3763087,
24
+ bibtex: String.raw `@article{10.1145/3763087,
25
25
  author = {Sihler, Florian and Tichy, Matthias},
26
26
  title = {Statically Analyzing the Dataflow of R Programs},
27
27
  year = {2025},
@@ -32,7 +32,7 @@ const PublicationsMain = [
32
32
  number = {OOPSLA2},
33
33
  url = {https://doi.org/10.1145/3763087},
34
34
  doi = {10.1145/3763087},
35
- abstract = {The R programming language is primarily designed for statistical computing and mostly used by researchers without a background in computer science. R provides a wide range of dynamic features and peculiarities that are difficult to analyze statically like dynamic scoping and lazy evaluation with dynamic side effects. At the same time, the R ecosystem lacks sophisticated analysis tools that support researchers in understanding and improving their code. In this paper, we present a novel static dataflow analysis framework for the R programming language that is capable of handling the dynamic nature of R programs and produces the dataflow graph of given R programs. This graph can be essential in a range of analyses, including program slicing, which we implement as a proof of concept. The core analysis works as a stateful fold over a normalized version of the abstract syntax tree of the R program, which tracks (re-)definitions, values, function calls, side effects, external files, and a dynamic control flow to produce one dataflow graph per program. We evaluate the correctness of our analysis using output equivalence testing on a manually curated dataset of 779 sensible slicing points from executable real-world R scripts. Additionally, we use a set of systematic test cases based on the capabilities of the R language and the implementation of the R interpreter and measure the runtimes well as the memory consumption on a set of 4,230 real-world R scripts and 20,815 packages available on R’s package manager CRAN. Furthermore, we evaluate the recall of our program slicer, its accuracy using shrinking, and its improvement over the state of the art. We correctly analyze almost all programs in our equivalence test suite, preserving the identical output for 99.7\\% of the manually curated slicing points. On average, we require 576ms to analyze the dataflow and around 213kB to store the graph of a research script. This shows that our analysis is capable of analyzing real-world sources quickly and correctly. Our slicer achieves an average reduction of 84.8\\% of tokens indicating its potential to improve program comprehension.},
35
+ abstract = {The R programming language is primarily designed for statistical computing and mostly used by researchers without a background in computer science. R provides a wide range of dynamic features and peculiarities that are difficult to analyze statically like dynamic scoping and lazy evaluation with dynamic side effects. At the same time, the R ecosystem lacks sophisticated analysis tools that support researchers in understanding and improving their code. In this paper, we present a novel static dataflow analysis framework for the R programming language that is capable of handling the dynamic nature of R programs and produces the dataflow graph of given R programs. This graph can be essential in a range of analyses, including program slicing, which we implement as a proof of concept. The core analysis works as a stateful fold over a normalized version of the abstract syntax tree of the R program, which tracks (re-)definitions, values, function calls, side effects, external files, and a dynamic control flow to produce one dataflow graph per program. We evaluate the correctness of our analysis using output equivalence testing on a manually curated dataset of 779 sensible slicing points from executable real-world R scripts. Additionally, we use a set of systematic test cases based on the capabilities of the R language and the implementation of the R interpreter and measure the runtimes well as the memory consumption on a set of 4,230 real-world R scripts and 20,815 packages available on R’s package manager CRAN. Furthermore, we evaluate the recall of our program slicer, its accuracy using shrinking, and its improvement over the state of the art. We correctly analyze almost all programs in our equivalence test suite, preserving the identical output for 99.7\% of the manually curated slicing points. On average, we require 576ms to analyze the dataflow and around 213kB to store the graph of a research script. This shows that our analysis is capable of analyzing real-world sources quickly and correctly. Our slicer achieves an average reduction of 84.8\% of tokens indicating its potential to improve program comprehension.},
36
36
  journal = {Proc. ACM Program. Lang.},
37
37
  month = oct,
38
38
  articleno = {309},
@@ -29,6 +29,7 @@ const flowr_analyzer_plugin_1 = require("../project/plugins/flowr-analyzer-plugi
29
29
  const flowr_analyzer_environment_context_1 = require("../project/context/flowr-analyzer-environment-context");
30
30
  const flowr_analyzer_functions_context_1 = require("../project/context/flowr-analyzer-functions-context");
31
31
  const flowr_analyzer_meta_context_1 = require("../project/context/flowr-analyzer-meta-context");
32
+ const config_1 = require("../config");
32
33
  async function analyzerQuickExample() {
33
34
  const analyzer = await new flowr_analyzer_builder_1.FlowrAnalyzerBuilder()
34
35
  .setEngine('tree-sitter')
@@ -172,23 +173,24 @@ The following sections highlight some of the most important configuration option
172
173
  ${(0, doc_structure_1.section)('Configuring flowR', 3)}
173
174
 
174
175
  You can fundamentally change the behavior of flowR using the [config file](${doc_files_1.FlowrWikiBaseRef}/Interface#configuring-flowr),
175
- embedded in the interface ${ctx.link('FlowrConfigOptions')}.
176
+ embedded in the interface ${ctx.link(config_1.FlowrConfig)}.
176
177
  With the builder you can either provide a complete configuration or amend the default configuration using:
177
178
 
178
179
  * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'setConfig')} to set a complete configuration
180
+ * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'configure')} to set the value of a specific key in the config
179
181
  * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'amendConfig')} to amend the default configuration
180
182
 
181
- By default, the builder uses flowR's standard configuration obtained with ${ctx.link('defaultConfigOptions')}.
183
+ By default, the builder uses flowR's standard configuration obtained with ${ctx.linkO(config_1.FlowrConfig, 'default')}.
182
184
 
183
185
  ${(0, doc_structure_1.block)({
184
186
  type: 'NOTE',
185
- content: `During the analysis with the ${ctx.link(flowr_analyzer_1.FlowrAnalyzer.name)}, you can also access the configuration with
187
+ content: `During the analysis with the ${ctx.link(flowr_analyzer_1.FlowrAnalyzer)}, you can also access the configuration with
186
188
  the ${ctx.link(flowr_analyzer_context_1.FlowrAnalyzerContext)}.`
187
189
  })}
188
190
 
189
191
  ${(0, doc_structure_1.section)('Configuring the Engine', 3)}
190
192
 
191
- FlowR supports multiple [engines](${doc_files_1.FlowrWikiBaseRef}/Engines) for parsing and analyzing R code.
193
+ FlowR supports multiple ${ctx.linkPage('wiki/Engines', 'engines')} for parsing and analyzing R code.
192
194
  With the builder, you can select the engine to use with:
193
195
 
194
196
  * ${ctx.linkM(flowr_analyzer_builder_1.FlowrAnalyzerBuilder, 'setEngine')} to set the desired engine.
@@ -292,7 +294,7 @@ ${(0, doc_structure_1.section)('File Loading', 4)}
292
294
 
293
295
  These plugins register for every file encountered by the [files context](#Files_Context) and determine whether and _how_ they can process the file.
294
296
  They are responsible for transforming the raw file content into a representation that flowR can work with during the analysis.
295
- For example, the ${ctx.link(flowr_analyzer_description_file_plugin_1.FlowrAnalyzerDescriptionFilePlugin.name)} adds support for R \`DESCRIPTION\` files by parsing their content into key-value pairs.
297
+ For example, the ${ctx.link(flowr_analyzer_description_file_plugin_1.FlowrAnalyzerDescriptionFilePlugin)} adds support for R \`DESCRIPTION\` files by parsing their content into key-value pairs.
296
298
  These can then be used by other plugins, e.g. the ${ctx.link(flowr_analyzer_package_versions_description_file_plugin_1.FlowrAnalyzerPackageVersionsDescriptionFilePlugin)} that extracts package version information from these files.
297
299
 
298
300
  If multiple file plugins could apply (${ctx.link('DefaultFlowrAnalyzerFilePlugin::' + flowr_analyzer_file_plugin_1.FlowrAnalyzerFilePlugin.defaultPlugin().applies.name)}) to the same file,
@@ -47,9 +47,7 @@ const log_1 = require("../../test/functionality/_helper/log");
47
47
  const log_2 = require("../util/log");
48
48
  async function makeAnalyzerExample() {
49
49
  const analyzer = await new flowr_analyzer_builder_1.FlowrAnalyzerBuilder()
50
- .amendConfig(c => {
51
- c.ignoreSourceCalls = true;
52
- })
50
+ .configure('ignoreSourceCalls', true)
53
51
  .setEngine('tree-sitter')
54
52
  .build();
55
53
  analyzer.addRequest('x <- 1; y <- x; print(y);');
@@ -39,6 +39,10 @@ const flowr_analyzer_context_1 = require("../project/context/flowr-analyzer-cont
39
39
  const doc_maker_1 = require("./wiki-mk/doc-maker");
40
40
  const flowr_analyzer_1 = require("../project/flowr-analyzer");
41
41
  const built_in_1 = require("../dataflow/environments/built-in");
42
+ const dfg_1 = require("../util/mermaid/dfg");
43
+ const r_number_1 = require("../r-bridge/lang-4.x/ast/model/nodes/r-number");
44
+ const model_1 = require("../r-bridge/lang-4.x/ast/model/model");
45
+ const range_1 = require("../util/range");
42
46
  async function subExplanation(parser, { description, code, expectedSubgraph }) {
43
47
  expectedSubgraph = await (0, doc_dfg_1.verifyExpectedSubgraph)(parser, code, expectedSubgraph);
44
48
  const marks = [];
@@ -741,12 +745,14 @@ class WikiDataflowGraph extends doc_maker_1.DocMaker {
741
745
  super('wiki/Dataflow Graph.md', module.filename, 'dataflow graph');
742
746
  }
743
747
  async text({ ctx, treeSitter }) {
748
+ const introExampleCode = 'x <- 3\ny <- x + 1\ny';
744
749
  return `
745
- This page briefly summarizes flowR's dataflow graph, represented by the ${ctx.link(graph_1.DataflowGraph)} class within the code.
746
- In case you want to manually build such a graph (e.g., for testing), you can use the ${ctx.link(dataflowgraph_builder_1.DataflowGraphBuilder)}.
750
+ This page briefly summarizes flowR's dataflow graph (${ctx.link(graph_1.DataflowGraph)}).
747
751
  If you are interested in which features we support and which features are still to be worked on, please refer to our ${ctx.linkPage('wiki/Capabilities')} page.
748
- In summary, we discuss the following topics:
752
+ In case you want to manually build such a graph (e.g., for testing), you can use the ${ctx.link(dataflowgraph_builder_1.DataflowGraphBuilder)}.
753
+ In summary, we discuss the following topics in this wiki page:
749
754
 
755
+ - [Reading the Visualization](#reading-the-visualization)
750
756
  - [Vertices](#vertices)
751
757
  - [Edges](#edges)
752
758
  - [Control Dependencies](#control-dependencies)
@@ -756,37 +762,35 @@ In summary, we discuss the following topics:
756
762
  - [Call Graph Perspective](#perspectives-cg)
757
763
  - [Working with the Dataflow Graph](#dfg-working)
758
764
 
759
- Please be aware that the accompanied [dataflow information](#dataflow-information) (${ctx.link('DataflowInformation')}) returned by _flowR_ contains things besides the graph,
760
- like the entry and exit points of the subgraphs, and currently active references (see [below](#dataflow-information)).
761
- Additionally, you may be interested in the set of [Unknown Side Effects](#unknown-side-effects), marking calls which _flowR_ is unable to handle correctly.
762
-
763
- Potentially, you are interested in another perspective that flowR provides, the [control flow graph](${doc_files_1.FlowrWikiBaseRef}/Control%20Flow%20Graph), so please check the correpsonding
764
- wiki page if you are unsure.
765
+ Please be aware that the accompanied [dataflow information](#dataflow-information) (${ctx.link('DataflowInformation')}) returned by _flowR_
766
+ contains things besides the graph, like the entry and exit points of the subgraphs, and currently active references (see [below](#dataflow-information)).
767
+ Additionally, you may be interested in the [Unknown Side Effects](#unknown-side-effects), marking calls which _flowR_ is unable to handle correctly.
765
768
 
766
769
  > [!TIP]
767
- > If you want to investigate the dataflow graph,
768
- > you can either use the [Visual Studio Code extension](${doc_files_1.FlowrVsCode}) or the ${ctx.replCmd('dataflow*')}
769
- > command in the REPL (see the ${ctx.linkPage('wiki/Interface', 'Interface wiki page')} for more information).
770
- > There is also a simplified perspective available with ${ctx.replCmd('dataflowsimple*')} that does not show everything but is easier to read.
770
+ > To investigate the dataflow graph,
771
+ > you can either use the ${ctx.linkPage('flowr:vscode')} or the ${ctx.replCmd('dataflow*')}
772
+ > command in the REPL (see the ${ctx.linkPage('wiki/Interface', 'Interface wiki page')}).
773
+ > There is also a simplified version available with ${ctx.replCmd('dataflowsimple*')} that does not show everything but is easier to read.
771
774
  > For small graphs, you can also use ${ctx.replCmd('dataflowascii')} to print the graph as ASCII art.
772
- >
773
- > When using _flowR_ as a library, you may use the functions in ${(0, doc_files_1.getFilePathMd)('../util/mermaid/dfg.ts')}.
774
775
  >
775
- > If you receive a dataflow graph in its serialized form (e.g., by talking to a [_flowR_ server](${doc_files_1.FlowrWikiBaseRef}/Interface)), you can use ${ctx.linkM(graph_1.DataflowGraph, 'fromJson', { realNameWrapper: 'i', codeFont: true })} to retrieve the graph from the JSON representation.
776
+ > If you receive a dataflow graph in its serialized form (e.g., by talking to a [_flowR_ server](${doc_files_1.FlowrWikiBaseRef}/Interface)), you can use ${ctx.linkM(graph_1.DataflowGraph, 'fromJson', { realNameWrapper: 'i', codeFont: true })} to recover the graph object.
776
777
  >
777
778
  > Also, check out the [${doc_files_1.FlowrGithubGroupName}/sample-analyzer-df-diff](${doc_files_1.FlowrGithubBaseRef}/sample-analyzer-df-diff) repository for a complete example project creating and comparing dataflow graphs.
778
779
 
779
- ${await (0, doc_dfg_1.printDfGraphForCode)(treeSitter, 'x <- 3\ny <- x + 1\ny')}
780
+ To get started, let's look at the graph for the following code snippet:
781
+ ${(0, doc_code_1.codeBlock)('r', introExampleCode)}
780
782
 
783
+ With this code, the corresponding dataflow graph looks like this:
781
784
 
782
- The above dataflow graph showcases the general gist. We define a dataflow graph as a directed graph G = (V, E), differentiating between ${(0, doc_data_dfg_util_1.getAllVertices)().length} types of vertices V and
783
- ${(0, doc_data_dfg_util_1.getAllEdges)().length} types of edges E allowing each vertex to have a single, and each edge to have multiple distinct types.
785
+ ${await (0, doc_dfg_1.printDfGraphForCode)(treeSitter, introExampleCode, { showCode: false })}
786
+
787
+ The above dataflow graph showcases the general gist. We define a dataflow graph as a directed graph G&nbsp;=&nbsp;(V,&nbsp;E),
788
+ differentiating between ${(0, doc_data_dfg_util_1.getAllVertices)().length} types of vertices&nbsp;V and
789
+ ${(0, doc_data_dfg_util_1.getAllEdges)().length} types of edges&nbsp;E allowing each vertex to have a single, and each edge to have multiple distinct types.
784
790
  Additionally, every node may have links to its [control dependencies](#control-dependencies) (which you may view as a ${(0, text_1.nth)((0, doc_data_dfg_util_1.getAllEdges)().length + 1)} edge type,
785
791
  although they are explicitly no data dependency and relate to the ${ctx.linkPage('wiki/Control Flow Graph')}.
786
792
 
787
- <details open>
788
-
789
- <summary>Vertex Types</summary>
793
+ ${(0, doc_structure_1.details)('Simplified Version of the graph', await (0, doc_dfg_1.printDfGraphForCode)(treeSitter, 'x <- 3\ny <- x + 1\ny', { simplified: true, showCode: false }))}
790
794
 
791
795
  The following vertices types exist:
792
796
 
@@ -794,20 +798,13 @@ The following vertices types exist:
794
798
 
795
799
  ${(0, doc_structure_1.details)('Class Diagram', 'All boxes should link to their respective implementation:\n' + (0, doc_code_1.codeBlock)('mermaid', ctx.mermaid('DataflowGraphVertexInfo', { inlineTypes: ['MergeableRecord'] })))}
796
800
 
797
- </details>
798
-
799
- <details open>
800
-
801
- <summary>Edge Types</summary>
802
-
803
- The following edges types exist, internally we use bitmasks to represent multiple types in a compact form:
801
+ The following edges types exist, internally we use bitmasks to represent multiple types in a compact form, so you
802
+ should use the ${ctx.link('DfEdge', { codeFont: false, realNameWrapper: 'i' }, { type: 'variable' })} object and its methods to work with them:
804
803
 
805
804
  1. ${(0, doc_data_dfg_util_1.getAllEdges)().map(([k, v], index) => `[\`${k}\` (${v})](#${index + 1}-${k.toLowerCase().replace(/\s/g, '-')}-edge)`).join('\n1. ')}
806
805
 
807
806
  ${(0, doc_structure_1.details)('Class Diagram', 'All boxes should link to their respective implementation:\n' + (0, doc_code_1.codeBlock)('mermaid', ctx.mermaid('EdgeType', { inlineTypes: ['MergeableRecord'] })))}
808
807
 
809
- </details>
810
-
811
808
 
812
809
  From an implementation perspective all of these types are represented by respective interfaces, see ${(0, doc_files_1.getFilePathMd)('../dataflow/graph/vertex.ts')} and ${(0, doc_files_1.getFilePathMd)('../dataflow/graph/edge.ts')}.
813
810
 
@@ -828,6 +825,64 @@ ${(0, doc_general_1.prefixLines)((0, doc_code_1.codeBlock)('ts', `const name = $
828
825
  > For argument wrappers you can access the dataflow information for their value. For dead code, however, flowR currently contains
829
826
  > some core heuristics that remove it which cannot be reversed easily. So please open [an issue](${doc_issue_1.NewIssueUrl}) if you encounter such a case and require the node to be present in the dataflow graph.
830
827
 
828
+ ${(0, doc_structure_1.section)('Reading the Visualizations', 2, 'reading-the-visualization')}
829
+
830
+ Before we dive into the details of the different vertices and edges, let's briefly talk about how to read the visualizations.
831
+ For this, let's have a look at a very simple graph, created for the number \`42\`:
832
+
833
+ ${await (0, doc_dfg_1.printDfGraphForCode)(treeSitter, '42', { showCode: false })}
834
+
835
+ ${(0, doc_structure_1.section)('Vertex Shape', 3, 'vtx-shape')}
836
+
837
+ The _shape_ of the vertex tells you the type of the vertex in the dataflow graph using the following scheme (the types are
838
+ explained in more detail in the following sections):
839
+
840
+ ${(0, doc_code_1.codeBlock)('mermaid', 'flowchart TD\n' +
841
+ // use mermaidNodeBrackets to get open and closing bracket
842
+ Object.entries(vertex_1.VertexType)
843
+ .map(([k, v]) => {
844
+ const { open, close } = (0, dfg_1.mermaidNodeBrackets)(v);
845
+ return ` ${v}${open}${k}${close}`;
846
+ }).join('\n') +
847
+ // we add a subflow for the function definition
848
+ '\n subgraph fbox ["function body"]\n body((...))\n end\n fdef-->fbox')}
849
+
850
+ ${(0, doc_structure_1.section)('Syntactic Types', 3, 'vtx-synt-type')}
851
+
852
+ Within the shape, in square brackets, you can find the syntactic type of the vertex
853
+ which is linked to the node in the ${ctx.linkPage('wiki/Normalized AST')}.
854
+ For more information on valid types and what to do with them, please refer to the ${ctx.linkPage('wiki/Normalized AST', 'normalized AST wiki page')}
855
+ and the corresponding helper objects (e.g., ${ctx.link(r_number_1.RNumber, undefined, { type: 'variable' })}).
856
+
857
+ ${(0, doc_structure_1.section)('Lexeme', 3, 'vtx-lexeme')}
858
+
859
+ Also in the first line, next to the [syntactic type](#vtx-synt-type), you can find the lexeme of the vertex (if it has one, e.g., for a variable definition or use).
860
+ This usually represents the textual source string of the respective vertex, and is also linked to the ${ctx.linkPage('wiki/Normalized AST')}.
861
+ You can access the lexeme too with ${ctx.linkO(model_1.RNode, 'lexeme')}.
862
+
863
+ ${(0, doc_structure_1.section)('Vertex Id', 3, 'vtx-id')}
864
+
865
+ In the second line, you will usually find the id (in the form of a ${ctx.link(node_id_1.NodeId, undefined, { type: 'variable' })}) of the vertex,
866
+ alongside its [control dependencies](#control-dependencies) if it has any. This id links the vertex to the respective node in the ${ctx.linkPage('wiki/Normalized AST')} (and all other perspectives created by flowR).
867
+ To give you an example, have a look at the following graph:
868
+
869
+ ${await (0, doc_dfg_1.printDfGraphForCode)(treeSitter, 'if(u) a', { showCode: false, mark: new Set(['1']) })}
870
+ With the _may_ prefix you can see that \`a\` has a [control dependency](#control-dependencies)
871
+ on the \`if\`, which only triggers when the condition is \`true\` (as indicated by the \`+\` suffix).
872
+
873
+ ${(0, doc_structure_1.section)('Location', 3, 'vtx-location')}
874
+
875
+ The third line indicates the compressed ${ctx.link(range_1.SourceRange)} of the vertex in the format \`startLine.startCharacter - endLine.endCharacter\`. If the range reads \`1.7\`,
876
+ this is short for \`1.7-1.7\`, likewise, \`1.7-9\` is short for \`1.7-1.9\`. So, \`1.7-9\` describes something starting
877
+ in the first line at the seventh character and ending in the first line at the ninth character.
878
+
879
+ ${(0, doc_structure_1.section)('Arguments and Additional Information', 3, 'vtx-additional-info')}
880
+
881
+ Some vertices (e.g., [function calls](#function-call-vertex)) have additional information, like the arguments of the call.
882
+ As you can see with the \`if\` example above alongside the [vertex id](#vtx-id),
883
+ these vertices also have an additional line which lists the ids of the arguments in order to clear any ambiguity in case, for example,
884
+ the mermaid graph layouting fumbles the order.
885
+
831
886
  ${(0, doc_structure_1.section)('Vertices', 2, 'vertices')}
832
887
 
833
888
  1. ${(0, doc_data_dfg_util_1.getAllVertices)().map(([k, v]) => `[\`${k}\`](#${v.toLowerCase().replaceAll(/\s/g, '-')}-vertex)`).join('\n1. ')}
@@ -836,7 +891,7 @@ ${await getVertexExplanations(treeSitter, ctx)}
836
891
 
837
892
  ${(0, doc_structure_1.section)('Edges', 2, 'edges')}
838
893
 
839
- 1. ${(0, doc_data_dfg_util_1.getAllEdges)().map(([k, v], index) => `[\`${k}\` (${v})](#${index + 1}-${k.toLowerCase().replace(/\s/g, '-')}-edge)`).join('\n1. ')}
894
+ 1. ${(0, doc_data_dfg_util_1.getAllEdges)().map(([k, v], index) => `[\`${k}\` (${v})](#${index + 1}-${k.toLowerCase().replaceAll(/\s/g, '-')}-edge)`).join('\n1. ')}
840
895
 
841
896
  ${await getEdgesExplanations(treeSitter, ctx)}
842
897