@eagleoutice/flowr 2.1.12 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. package/README.md +0 -1
  2. package/benchmark/slicer.d.ts +5 -12
  3. package/benchmark/slicer.js +46 -28
  4. package/cli/benchmark-app.d.ts +2 -0
  5. package/cli/benchmark-app.js +2 -1
  6. package/cli/benchmark-helper-app.d.ts +2 -0
  7. package/cli/benchmark-helper-app.js +2 -2
  8. package/cli/common/options.js +3 -1
  9. package/cli/flowr-main-options.js +36 -2
  10. package/cli/flowr.d.ts +6 -0
  11. package/cli/flowr.js +51 -24
  12. package/cli/repl/commands/repl-cfg.js +2 -4
  13. package/cli/repl/commands/repl-dataflow.js +2 -4
  14. package/cli/repl/commands/repl-execute.d.ts +2 -2
  15. package/cli/repl/commands/repl-execute.js +15 -5
  16. package/cli/repl/commands/repl-lineage.js +2 -4
  17. package/cli/repl/commands/repl-main.d.ts +2 -2
  18. package/cli/repl/commands/repl-normalize.js +2 -4
  19. package/cli/repl/commands/repl-parse.js +2 -4
  20. package/cli/repl/commands/repl-query.js +6 -8
  21. package/cli/repl/commands/repl-version.d.ts +5 -4
  22. package/cli/repl/commands/repl-version.js +10 -9
  23. package/cli/repl/core.d.ts +5 -5
  24. package/cli/repl/core.js +8 -12
  25. package/cli/repl/print-version.d.ts +2 -2
  26. package/cli/repl/print-version.js +3 -3
  27. package/cli/repl/server/connection.d.ts +3 -3
  28. package/cli/repl/server/connection.js +5 -7
  29. package/cli/repl/server/messages/message-hello.js +2 -1
  30. package/cli/repl/server/server.d.ts +4 -3
  31. package/cli/repl/server/server.js +7 -5
  32. package/cli/slicer-app.js +1 -1
  33. package/config.d.ts +36 -4
  34. package/config.js +30 -1
  35. package/core/pipeline-executor.d.ts +1 -1
  36. package/core/pipeline-executor.js +1 -1
  37. package/core/steps/all/core/00-parse.d.ts +4 -18
  38. package/core/steps/all/core/00-parse.js +2 -11
  39. package/core/steps/all/core/01-parse-tree-sitter.d.ts +23 -0
  40. package/core/steps/all/core/01-parse-tree-sitter.js +19 -0
  41. package/core/steps/all/core/10-normalize.d.ts +3 -2
  42. package/core/steps/all/core/10-normalize.js +1 -0
  43. package/core/steps/all/core/11-normalize-tree-sitter.d.ts +25 -0
  44. package/core/steps/all/core/11-normalize-tree-sitter.js +27 -0
  45. package/core/steps/all/core/20-dataflow.d.ts +2 -0
  46. package/core/steps/all/core/20-dataflow.js +1 -1
  47. package/core/steps/pipeline/default-pipelines.d.ts +368 -23
  48. package/core/steps/pipeline/default-pipelines.js +42 -4
  49. package/dataflow/extractor.d.ts +2 -1
  50. package/dataflow/extractor.js +2 -1
  51. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +6 -5
  52. package/dataflow/processor.d.ts +2 -0
  53. package/documentation/doc-util/doc-auto-gen.js +2 -1
  54. package/documentation/doc-util/doc-cfg.js +1 -1
  55. package/documentation/doc-util/doc-dfg.js +2 -2
  56. package/documentation/doc-util/doc-files.d.ts +1 -0
  57. package/documentation/doc-util/doc-files.js +4 -0
  58. package/documentation/doc-util/doc-normalized-ast.js +2 -3
  59. package/documentation/doc-util/doc-query.js +1 -1
  60. package/documentation/doc-util/doc-search.js +1 -1
  61. package/documentation/doc-util/doc-types.js +2 -2
  62. package/documentation/print-dataflow-graph-wiki.js +15 -15
  63. package/documentation/print-engines-wiki.d.ts +1 -0
  64. package/documentation/print-engines-wiki.js +82 -0
  65. package/documentation/print-interface-wiki.js +6 -7
  66. package/documentation/print-normalized-ast-wiki.js +1 -1
  67. package/package.json +9 -5
  68. package/queries/catalog/cluster-query/cluster-query-format.d.ts +5 -4
  69. package/queries/catalog/dataflow-query/dataflow-query-format.d.ts +5 -4
  70. package/queries/catalog/dependencies-query/dependencies-query-format.d.ts +5 -4
  71. package/queries/catalog/id-map-query/id-map-query-format.d.ts +5 -4
  72. package/queries/catalog/lineage-query/lineage-query-format.d.ts +5 -4
  73. package/queries/catalog/normalized-ast-query/normalized-ast-query-format.d.ts +5 -4
  74. package/queries/catalog/search-query/search-query-format.d.ts +5 -4
  75. package/queries/catalog/static-slice-query/static-slice-query-format.d.ts +5 -4
  76. package/queries/query.d.ts +40 -32
  77. package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +4 -2
  78. package/r-bridge/lang-4.x/ast/parser/json/parser.js +5 -0
  79. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-executor.d.ts +18 -0
  80. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-executor.js +57 -0
  81. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.d.ts +3 -0
  82. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +541 -0
  83. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-types.d.ts +35 -0
  84. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-types.js +40 -0
  85. package/r-bridge/parser.d.ts +32 -0
  86. package/r-bridge/parser.js +14 -0
  87. package/r-bridge/shell-executor.d.ts +37 -1
  88. package/r-bridge/shell-executor.js +39 -0
  89. package/r-bridge/shell.d.ts +12 -6
  90. package/r-bridge/shell.js +15 -6
  91. package/search/search-optimizer/search-optimizer.js +1 -1
  92. package/statistics/statistics.js +1 -1
  93. package/util/version.js +1 -1
@@ -1,25 +1,63 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.DEFAULT_PARSE_PIPELINE = exports.DEFAULT_NORMALIZE_PIPELINE = exports.DEFAULT_DATAFLOW_PIPELINE = exports.DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE = void 0;
4
- /**
5
- * Contains the default pipeline for working with flowr
6
- */
3
+ exports.TREE_SITTER_PARSE_PIPELINE = exports.DEFAULT_PARSE_PIPELINE = exports.TREE_SITTER_NORMALIZE_PIPELINE = exports.DEFAULT_NORMALIZE_PIPELINE = exports.TREE_SITTER_DATAFLOW_PIPELINE = exports.DEFAULT_DATAFLOW_PIPELINE = exports.TREE_SITTER_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = exports.TREE_SITTER_SLICE_AND_RECONSTRUCT_PIPELINE = exports.TREE_SITTER_SLICING_PIPELINE = exports.DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE = void 0;
4
+ exports.createParsePipeline = createParsePipeline;
5
+ exports.createSlicePipeline = createSlicePipeline;
6
+ exports.createNormalizePipeline = createNormalizePipeline;
7
+ exports.createDataflowPipeline = createDataflowPipeline;
7
8
  const pipeline_1 = require("./pipeline");
8
9
  const _00_parse_1 = require("../all/core/00-parse");
9
10
  const _10_normalize_1 = require("../all/core/10-normalize");
10
11
  const _20_dataflow_1 = require("../all/core/20-dataflow");
11
12
  const _00_slice_1 = require("../all/static-slicing/00-slice");
12
13
  const _10_reconstruct_1 = require("../all/static-slicing/10-reconstruct");
14
+ const _01_parse_tree_sitter_1 = require("../all/core/01-parse-tree-sitter");
15
+ const _11_normalize_tree_sitter_1 = require("../all/core/11-normalize-tree-sitter");
16
+ const pipeline_executor_1 = require("../../pipeline-executor");
13
17
  exports.DEFAULT_SLICING_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE, _10_reconstruct_1.NAIVE_RECONSTRUCT);
14
18
  exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE;
15
19
  exports.DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE);
20
+ exports.TREE_SITTER_SLICING_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP, _11_normalize_tree_sitter_1.NORMALIZE_TREE_SITTER, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE, _10_reconstruct_1.NAIVE_RECONSTRUCT);
21
+ exports.TREE_SITTER_SLICE_AND_RECONSTRUCT_PIPELINE = exports.TREE_SITTER_SLICING_PIPELINE;
22
+ exports.TREE_SITTER_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP, _11_normalize_tree_sitter_1.NORMALIZE_TREE_SITTER, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE);
16
23
  /**
17
24
  * The default pipeline for working with flowr, including the dataflow step,
18
25
  * see the {@link DEFAULT_NORMALIZE_PIPELINE} for the pipeline without the dataflow step,
19
26
  * and the {@link DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE} for the pipeline with slicing and reconstructing steps
20
27
  */
21
28
  exports.DEFAULT_DATAFLOW_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW);
29
+ exports.TREE_SITTER_DATAFLOW_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP, _11_normalize_tree_sitter_1.NORMALIZE_TREE_SITTER, _20_dataflow_1.STATIC_DATAFLOW);
22
30
  /** The pipeline to use when you want to parse and normalize your R file, see {@link DEFAULT_DATAFLOW_PIPELINE} for the additional `dataflow` step */
23
31
  exports.DEFAULT_NORMALIZE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE);
32
+ exports.TREE_SITTER_NORMALIZE_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP, _11_normalize_tree_sitter_1.NORMALIZE_TREE_SITTER);
24
33
  exports.DEFAULT_PARSE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP);
34
+ exports.TREE_SITTER_PARSE_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP);
35
+ function createParsePipeline(parser, inputs) {
36
+ const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_PARSE_PIPELINE : exports.DEFAULT_PARSE_PIPELINE;
37
+ return new pipeline_executor_1.PipelineExecutor(base, {
38
+ parser: parser,
39
+ ...inputs
40
+ });
41
+ }
42
+ function createSlicePipeline(parser, inputs) {
43
+ const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_SLICING_PIPELINE : exports.DEFAULT_SLICING_PIPELINE;
44
+ return new pipeline_executor_1.PipelineExecutor(base, {
45
+ parser: parser,
46
+ ...inputs
47
+ });
48
+ }
49
+ function createNormalizePipeline(parser, inputs) {
50
+ const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_NORMALIZE_PIPELINE : exports.DEFAULT_NORMALIZE_PIPELINE;
51
+ return new pipeline_executor_1.PipelineExecutor(base, {
52
+ parser: parser,
53
+ ...inputs
54
+ });
55
+ }
56
+ function createDataflowPipeline(parser, inputs) {
57
+ const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_DATAFLOW_PIPELINE : exports.DEFAULT_DATAFLOW_PIPELINE;
58
+ return new pipeline_executor_1.PipelineExecutor(base, {
59
+ parser: parser,
60
+ ...inputs
61
+ });
62
+ }
25
63
  //# sourceMappingURL=default-pipelines.js.map
@@ -2,5 +2,6 @@ import type { DataflowInformation } from './info';
2
2
  import type { DataflowProcessors } from './processor';
3
3
  import type { NormalizedAst, ParentInformation } from '../r-bridge/lang-4.x/ast/model/processing/decorate';
4
4
  import type { RParseRequests } from '../r-bridge/retriever';
5
+ import type { KnownParserType, Parser } from '../r-bridge/parser';
5
6
  export declare const processors: DataflowProcessors<ParentInformation>;
6
- export declare function produceDataFlowGraph<OtherInfo>(request: RParseRequests, ast: NormalizedAst<OtherInfo & ParentInformation>): DataflowInformation;
7
+ export declare function produceDataFlowGraph<OtherInfo>(parser: Parser<KnownParserType>, request: RParseRequests, ast: NormalizedAst<OtherInfo & ParentInformation>): DataflowInformation;
@@ -68,7 +68,7 @@ function resolveLinkToSideEffects(ast, graph) {
68
68
  }
69
69
  }
70
70
  }
71
- function produceDataFlowGraph(request, ast) {
71
+ function produceDataFlowGraph(parser, request, ast) {
72
72
  const multifile = Array.isArray(request);
73
73
  let firstRequest;
74
74
  if (multifile) {
@@ -78,6 +78,7 @@ function produceDataFlowGraph(request, ast) {
78
78
  firstRequest = request;
79
79
  }
80
80
  const dfData = {
81
+ parser: parser,
81
82
  completeAst: ast,
82
83
  environment: (0, environment_1.initializeCleanEnvironments)(),
83
84
  processors: exports.processors,
@@ -7,11 +7,9 @@ exports.setSourceProvider = setSourceProvider;
7
7
  exports.processSourceCall = processSourceCall;
8
8
  exports.sourceRequest = sourceRequest;
9
9
  exports.standaloneSourceFile = standaloneSourceFile;
10
- const shell_executor_1 = require("../../../../../../r-bridge/shell-executor");
11
10
  const processor_1 = require("../../../../../processor");
12
11
  const info_1 = require("../../../../../info");
13
12
  const config_1 = require("../../../../../../config");
14
- const parser_1 = require("../../../../../../r-bridge/lang-4.x/ast/parser/json/parser");
15
13
  const known_call_handling_1 = require("../known-call-handling");
16
14
  const retriever_1 = require("../../../../../../r-bridge/retriever");
17
15
  const decorate_1 = require("../../../../../../r-bridge/lang-4.x/ast/model/processing/decorate");
@@ -21,6 +19,8 @@ const type_1 = require("../../../../../../r-bridge/lang-4.x/ast/model/type");
21
19
  const overwrite_1 = require("../../../../../environments/overwrite");
22
20
  const log_1 = require("../../../../../../util/log");
23
21
  const fs_1 = __importDefault(require("fs"));
22
+ const parser_1 = require("../../../../../../r-bridge/lang-4.x/ast/parser/json/parser");
23
+ const shell_executor_1 = require("../../../../../../r-bridge/shell-executor");
24
24
  let sourceProvider = (0, retriever_1.requestProviderFromFile)();
25
25
  function setSourceProvider(provider) {
26
26
  sourceProvider = provider;
@@ -61,13 +61,14 @@ function sourceRequest(rootId, request, data, information, getId) {
61
61
  return information;
62
62
  }
63
63
  }
64
- const executor = new shell_executor_1.RShellExecutor();
65
64
  // parse, normalize and dataflow the sourced file
66
65
  let normalized;
67
66
  let dataflow;
68
67
  try {
69
- const parsed = (0, retriever_1.retrieveParseDataFromRCode)(request, executor);
70
- normalized = (0, parser_1.normalize)({ parsed }, getId, request.request === 'file' ? request.content : undefined);
68
+ const file = request.request === 'file' ? request.content : undefined;
69
+ const parsed = (!data.parser.async ? data.parser : new shell_executor_1.RShellExecutor()).parse(request);
70
+ normalized = (typeof parsed !== 'string' ?
71
+ (0, parser_1.normalizeTreeSitter)({ parsed }, getId, file) : (0, parser_1.normalize)({ parsed }, getId, file));
71
72
  dataflow = (0, processor_1.processDataflowFor)(normalized.ast, {
72
73
  ...data,
73
74
  currentRequest: request,
@@ -6,7 +6,9 @@ import type { NormalizedAst, ParentInformation, RNodeWithParent } from '../r-bri
6
6
  import type { REnvironmentInformation } from './environments/environment';
7
7
  import type { RParseRequest } from '../r-bridge/retriever';
8
8
  import type { RNode } from '../r-bridge/lang-4.x/ast/model/model';
9
+ import type { KnownParserType, Parser } from '../r-bridge/parser';
9
10
  export interface DataflowProcessorInformation<OtherInfo> {
11
+ readonly parser: Parser<KnownParserType>;
10
12
  /**
11
13
  * Initial and frozen ast-information
12
14
  */
@@ -2,8 +2,9 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.autoGenHeader = autoGenHeader;
4
4
  const version_1 = require("../../util/version");
5
+ const doc_files_1 = require("./doc-files");
5
6
  function autoGenHeader({ rVersion, filename, purpose, currentDateAndTime = new Date().toISOString().replace('T', ', ').replace(/\.\d+Z$/, ' UTC') }) {
6
7
  const shortenFilename = filename.replace(/^.*src\//, 'src/');
7
- return `_This document was generated from '${shortenFilename}' on ${currentDateAndTime} presenting an overview of flowR's ${purpose} (v${(0, version_1.flowrVersion)().format()}${rVersion ? ', using R v' + rVersion : ''})._`;
8
+ return `_This document was generated from '${(0, doc_files_1.linkFlowRSourceFile)(shortenFilename)}' on ${currentDateAndTime} presenting an overview of flowR's ${purpose} (v${(0, version_1.flowrVersion)().format()}${rVersion ? ', using R v' + rVersion : ''}). Please do not edit this file/wiki page directly._`;
8
9
  }
9
10
  //# sourceMappingURL=doc-auto-gen.js.map
@@ -7,7 +7,7 @@ const default_pipelines_1 = require("../../core/steps/pipeline/default-pipelines
7
7
  const retriever_1 = require("../../r-bridge/retriever");
8
8
  async function getCfg(shell, code) {
9
9
  const steps = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_NORMALIZE_PIPELINE, {
10
- shell,
10
+ parser: shell,
11
11
  request: (0, retriever_1.requestFromInput)(code)
12
12
  }).allRemainingSteps();
13
13
  return {
@@ -35,7 +35,7 @@ function formatSideEffect(ef) {
35
35
  async function printDfGraphForCode(shell, code, { mark, showCode = true, codeOpen = false, exposeResult, switchCodeAndGraph = false } = {}) {
36
36
  const now = performance.now();
37
37
  const result = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
38
- shell,
38
+ parser: shell,
39
39
  request: (0, retriever_1.requestFromInput)(code)
40
40
  }).allRemainingSteps();
41
41
  const duration = performance.now() - now;
@@ -74,7 +74,7 @@ ${switchCodeAndGraph ? dfGraph : codeText}
74
74
  async function verifyExpectedSubgraph(shell, code, expectedSubgraph) {
75
75
  /* we verify that we get what we want first! */
76
76
  const info = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
77
- shell,
77
+ parser: shell,
78
78
  request: (0, retriever_1.requestFromInput)(code),
79
79
  getId: (0, decorate_1.deterministicCountingIdGenerator)(0)
80
80
  }).allRemainingSteps();
@@ -7,3 +7,4 @@ export declare const FlowrDockerRef = "https://hub.docker.com/r/eagleoutice/flow
7
7
  export declare const FlowrCodecovRef = "https://app.codecov.io/gh/flowr-analysis/flowr";
8
8
  export declare function getFilePathMd(path: string): string;
9
9
  export declare function getFileContentFromRoot(path: string): string;
10
+ export declare function linkFlowRSourceFile(path: string): string;
@@ -6,6 +6,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.FlowrCodecovRef = exports.FlowrDockerRef = exports.FlowrNpmRef = exports.FlowrWikiBaseRef = exports.RemoteFlowrFilePathBaseRef = exports.FlowrSiteBaseRef = exports.FlowrGithubBaseRef = void 0;
7
7
  exports.getFilePathMd = getFilePathMd;
8
8
  exports.getFileContentFromRoot = getFileContentFromRoot;
9
+ exports.linkFlowRSourceFile = linkFlowRSourceFile;
9
10
  const fs_1 = __importDefault(require("fs"));
10
11
  exports.FlowrGithubBaseRef = 'https://github.com/flowr-analysis';
11
12
  exports.FlowrSiteBaseRef = 'https://flowr-analysis.github.io/flowr';
@@ -27,4 +28,7 @@ function getFileContentFromRoot(path) {
27
28
  const fullpath = require.resolve('../../../' + path);
28
29
  return fs_1.default.readFileSync(fullpath, 'utf-8');
29
30
  }
31
+ function linkFlowRSourceFile(path) {
32
+ return `[${path}](${exports.RemoteFlowrFilePathBaseRef}/${path})`;
33
+ }
30
34
  //# sourceMappingURL=doc-files.js.map
@@ -22,7 +22,7 @@ ${(0, ast_1.normalizedAstToMermaid)(ast, prefix)}
22
22
  async function printNormalizedAstForCode(shell, code, { showCode = true, prefix = 'flowchart TD\n' } = {}) {
23
23
  const now = performance.now();
24
24
  const result = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_NORMALIZE_PIPELINE, {
25
- shell,
25
+ parser: shell,
26
26
  request: (0, retriever_1.requestFromInput)(code)
27
27
  }).allRemainingSteps();
28
28
  const duration = performance.now() - now;
@@ -55,8 +55,7 @@ ${(0, ast_1.normalizedAstToMermaid)(result.normalize.ast, prefix)}
55
55
  /** returns resolved expected df graph */
56
56
  async function verifyExpectedSubgraph(shell, code, expectedSubgraph) {
57
57
  /* we verify that we get what we want first! */
58
- const info = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
59
- shell,
58
+ const info = await (0, default_pipelines_1.createDataflowPipeline)(shell, {
60
59
  request: (0, retriever_1.requestFromInput)(code),
61
60
  getId: (0, decorate_1.deterministicCountingIdGenerator)(0)
62
61
  }).allRemainingSteps();
@@ -20,7 +20,7 @@ const query_print_1 = require("../../queries/query-print");
20
20
  async function showQuery(shell, code, queries, { showCode, collapseResult, collapseQuery } = {}) {
21
21
  const now = performance.now();
22
22
  const analysis = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
23
- shell,
23
+ parser: shell,
24
24
  request: (0, retriever_1.requestFromInput)(code)
25
25
  }).allRemainingSteps();
26
26
  const results = (0, query_1.executeQueries)({ dataflow: analysis.dataflow, ast: analysis.normalize }, queries);
@@ -20,7 +20,7 @@ const dfg_1 = require("../../util/mermaid/dfg");
20
20
  async function showSearch(shell, code, search, { collapseResult = true } = {}) {
21
21
  const now = performance.now();
22
22
  const analysis = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
23
- shell,
23
+ parser: shell,
24
24
  request: (0, retriever_1.requestFromInput)(code)
25
25
  }).allRemainingSteps();
26
26
  const result = (0, flowr_search_executor_1.runSearch)(search, analysis);
@@ -368,9 +368,9 @@ function shortLink(name, hierarchy, codeStyle = true) {
368
368
  return '';
369
369
  }
370
370
  const [pkg, mainName, node] = res;
371
- const comments = node.comments?.join('\n').replace(/\\?\n|```[a-zA-Z]*|\s\s*/g, ' ').replace(/<\/?code>|/g, '') ?? '';
371
+ const comments = node.comments?.join('\n').replace(/\\?\n|```[a-zA-Z]*|\s\s*/g, ' ').replace(/<\/?code>|`/g, '').replace(/"/g, '\'') ?? '';
372
372
  return `[${codeStyle ? '<code>' : ''}${(node.comments?.length ?? 0) > 0 ?
373
- (0, html_hover_over_1.textWithTooltip)(pkg ? `${pkg}::<b>${mainName}</b>` : mainName, (0, mermaid_1.escapeMarkdown)(comments.length > 400 ? comments.slice(0, 400) + '...' : comments)) : node.name}${codeStyle ? '</code>' : ''}](${getTypePathLink(node)})`;
373
+ (0, html_hover_over_1.textWithTooltip)(pkg ? `${pkg}::<b>${mainName}</b>` : mainName, comments.length > 400 ? comments.slice(0, 400) + '...' : comments) : node.name}${codeStyle ? '</code>' : ''}](${getTypePathLink(node)})`;
374
374
  }
375
375
  function getDocumentationForType(name, hierarchy) {
376
376
  const res = retrieveNode(name, hierarchy);
@@ -85,7 +85,7 @@ async function getVertexExplanations(shell, vertexType) {
85
85
  /* we use the map to ensure order easily :D */
86
86
  const vertexExplanations = new Map();
87
87
  vertexExplanations.set(vertex_1.VertexType.Value, [{
88
- shell: shell,
88
+ shell,
89
89
  name: 'Value Vertex',
90
90
  type: vertex_1.VertexType.Value,
91
91
  description: `
@@ -113,7 +113,7 @@ ${(0, doc_structure_1.details)('Example: Semantics Create a Value', `In the foll
113
113
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().constant('0')
114
114
  }, []]);
115
115
  vertexExplanations.set(vertex_1.VertexType.Use, [{
116
- shell: shell,
116
+ shell,
117
117
  name: 'Use Vertex',
118
118
  type: vertex_1.VertexType.Use,
119
119
  description: `
@@ -160,7 +160,7 @@ ${(0, doc_structure_1.details)('Example: Reads Edge Identifying Multiple Definit
160
160
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().use('1@x', 'x')
161
161
  }, []]);
162
162
  vertexExplanations.set(vertex_1.VertexType.FunctionCall, [{
163
- shell: shell,
163
+ shell,
164
164
  name: 'Function Call Vertex',
165
165
  type: vertex_1.VertexType.FunctionCall,
166
166
  description: `
@@ -348,7 +348,7 @@ ${(0, doc_structure_1.details)('Example: Function Call with a Side-Effect', awai
348
348
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().call('1@foo', 'foo', [])
349
349
  }, []]);
350
350
  vertexExplanations.set(vertex_1.VertexType.VariableDefinition, [{
351
- shell: shell,
351
+ shell,
352
352
  name: 'Variable Definition Vertex',
353
353
  type: vertex_1.VertexType.VariableDefinition,
354
354
  description: `
@@ -392,7 +392,7 @@ As you can see, _flowR_ is able to recognize that the initial definition of \`x\
392
392
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().defineVariable('1@x', 'x')
393
393
  }, []]);
394
394
  vertexExplanations.set(vertex_1.VertexType.FunctionDefinition, [{
395
- shell: shell,
395
+ shell,
396
396
  name: 'Function Definition Vertex',
397
397
  type: vertex_1.VertexType.FunctionDefinition,
398
398
  description: `
@@ -484,7 +484,7 @@ Besides this being a theoretically "shorter" way of defining a function, this be
484
484
  async function getEdgesExplanations(shell) {
485
485
  const edgeExplanations = new Map();
486
486
  edgeExplanations.set(edge_1.EdgeType.Reads, [{
487
- shell: shell,
487
+ shell,
488
488
  name: 'Reads Edge',
489
489
  type: edge_1.EdgeType.Reads,
490
490
  description: `
@@ -522,7 +522,7 @@ Please refer to the explanation of the respective vertices for more information.
522
522
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().reads('1:20', '1@x')
523
523
  }]]);
524
524
  edgeExplanations.set(edge_1.EdgeType.DefinedBy, [{
525
- shell: shell,
525
+ shell,
526
526
  name: 'DefinedBy Edge', /* concat for link generation */
527
527
  type: edge_1.EdgeType.DefinedBy,
528
528
  description: `
@@ -546,7 +546,7 @@ However, nested definitions can carry it (in the nested case, \`x\` is defined b
546
546
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().definedBy('1@x', '1:8')
547
547
  }]]);
548
548
  edgeExplanations.set(edge_1.EdgeType.Calls, [{
549
- shell: shell,
549
+ shell,
550
550
  name: 'Calls Edge',
551
551
  type: edge_1.EdgeType.Calls,
552
552
  description: 'Link the [function call](#function-call-vertex) to the [function definition](#function-definition-vertex) that is called.',
@@ -554,7 +554,7 @@ However, nested definitions can carry it (in the nested case, \`x\` is defined b
554
554
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().calls('2@foo', '1@function')
555
555
  }, []]);
556
556
  edgeExplanations.set(edge_1.EdgeType.Returns, [{
557
- shell: shell,
557
+ shell,
558
558
  name: 'Returns Edge',
559
559
  type: edge_1.EdgeType.Returns,
560
560
  description: 'Link the [function call](#function-call-vertex) to the exit points of the target definition (this may incorporate the call-context).',
@@ -568,7 +568,7 @@ f()
568
568
  `.trim();
569
569
  const dfInfo = await (0, doc_dfg_1.printDfGraphForCode)(shell, lateBindingExample, { switchCodeAndGraph: true, codeOpen: true, mark: new Set([1, '1->5', '9->5']) });
570
570
  edgeExplanations.set(edge_1.EdgeType.DefinesOnCall, [{
571
- shell: shell,
571
+ shell,
572
572
  name: 'DefinesOnCall Edge',
573
573
  type: edge_1.EdgeType.DefinesOnCall,
574
574
  description: `*This edge is usually joined with ${linkEdgeName(edge_1.EdgeType.DefinedByOnCall)}!*
@@ -592,7 +592,7 @@ ${dfInfo}
592
592
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().definesOnCall('$11', '$1').definedByOnCall('$1', '$11')
593
593
  }, []]);
594
594
  edgeExplanations.set(edge_1.EdgeType.DefinedByOnCall, [{
595
- shell: shell,
595
+ shell,
596
596
  name: 'DefinedByOnCall Edge',
597
597
  type: edge_1.EdgeType.DefinedByOnCall,
598
598
  description: `*This edge is usually joined with ${linkEdgeName(edge_1.EdgeType.DefinesOnCall)}!*
@@ -602,7 +602,7 @@ ${dfInfo}
602
602
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().definesOnCall('$11', '$1').definedByOnCall('$1', '$11')
603
603
  }, []]);
604
604
  edgeExplanations.set(edge_1.EdgeType.Argument, [{
605
- shell: shell,
605
+ shell,
606
606
  name: 'Argument Edge',
607
607
  type: edge_1.EdgeType.Argument,
608
608
  description: `Links a [function call](#function-call-vertex) to the entry point of its arguments. If we do not know the target of such a call, we automatically assume that all arguments are read by the call as well!
@@ -613,7 +613,7 @@ The exception to this is the [function definition](#function-definition-vertex)
613
613
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().argument('1@f', '1@x').reads('1@f', '1@x').argument('1@f', '1@y').reads('1@f', '1@y')
614
614
  }, []]);
615
615
  edgeExplanations.set(edge_1.EdgeType.SideEffectOnCall, [{
616
- shell: shell,
616
+ shell,
617
617
  name: 'SideEffectOnCall Edge',
618
618
  type: edge_1.EdgeType.SideEffectOnCall,
619
619
  description: 'Links a global side effect to an affected function call (e.g., a super definition within the function body)',
@@ -621,7 +621,7 @@ The exception to this is the [function definition](#function-definition-vertex)
621
621
  expectedSubgraph: (0, dataflowgraph_builder_1.emptyGraph)().sideEffectOnCall('1@x', '2@f')
622
622
  }, []]);
623
623
  edgeExplanations.set(edge_1.EdgeType.NonStandardEvaluation, [{
624
- shell: shell,
624
+ shell,
625
625
  name: 'NonStandardEvaluation Edge',
626
626
  type: edge_1.EdgeType.NonStandardEvaluation,
627
627
  description: `
@@ -668,7 +668,7 @@ ${(0, doc_structure_1.details)('Example: While-Loop Body', await (0, doc_dfg_1.p
668
668
  async function dummyDataflow() {
669
669
  const shell = new shell_1.RShell();
670
670
  const result = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
671
- shell,
671
+ parser: shell,
672
672
  request: (0, retriever_1.requestFromInput)('x <- 1\nx + 1')
673
673
  }).allRemainingSteps();
674
674
  shell.close();
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,82 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const shell_1 = require("../r-bridge/shell");
7
+ const log_1 = require("../../test/functionality/_helper/log");
8
+ const doc_auto_gen_1 = require("./doc-util/doc-auto-gen");
9
+ const doc_types_1 = require("./doc-util/doc-types");
10
+ const path_1 = __importDefault(require("path"));
11
+ const shell_executor_1 = require("../r-bridge/shell-executor");
12
+ const tree_sitter_executor_1 = require("../r-bridge/lang-4.x/tree-sitter/tree-sitter-executor");
13
+ const doc_files_1 = require("./doc-util/doc-files");
14
+ const doc_cli_option_1 = require("./doc-util/doc-cli-option");
15
+ const doc_structure_1 = require("./doc-util/doc-structure");
16
+ async function getText(shell) {
17
+ const rversion = (await shell.usedRVersion())?.format() ?? 'unknown';
18
+ const types = (0, doc_types_1.getTypesFromFolderAsMermaid)({
19
+ rootFolder: path_1.default.resolve('src/r-bridge/lang-4.x/tree-sitter/'),
20
+ files: [path_1.default.resolve('./src/config.ts'), path_1.default.resolve('./src/r-bridge/shell.ts'), path_1.default.resolve('./src/r-bridge/shell-executor.ts')],
21
+ typeName: 'FlowrConfigOptions',
22
+ inlineTypes: doc_types_1.mermaidHide
23
+ });
24
+ return `${(0, doc_auto_gen_1.autoGenHeader)({ filename: module.filename, purpose: 'engines', rVersion: rversion })}
25
+
26
+ To analyze R scripts, flowR needs to parse the R code and for that, we require a parser.
27
+ Originally, flowR shipped with a ${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info)}, an asynchronous interface to the R interpreter, still available today.
28
+ Later we extended this with the ${(0, doc_types_1.shortLink)(shell_executor_1.RShellExecutor.name, types.info)}, the synchronous counterpart to the ${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info)}.
29
+ However, these interfaces are relatively slow as they require communication with an underlying R interpreter.
30
+ Using [tree-sitter](https://tree-sitter.github.io/tree-sitter/), with its [node bindings](https://github.com/tree-sitter/node-tree-sitter)
31
+ and [R grammar](https://github.com/r-lib/tree-sitter-r), we can provide the ${(0, doc_types_1.shortLink)(tree_sitter_executor_1.TreeSitterExecutor.name, types.info)} which
32
+ is synchronous, faster, and no longer needs an R installation, but requires the appropriate bindings.
33
+ To allow users of R to freely choose their backend between the R interpreter and the tree-sitter parser,
34
+ we provide the concept of engines.
35
+
36
+ Engines can be loaded with [flowR's configuration file](${doc_files_1.FlowrWikiBaseRef}/Interface#configuring-flowr). Additionally, they
37
+ are exposed with some command line options (e.g., when using the docker image of flowR):
38
+
39
+ - ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.r-shell.disabled', false)} to disable the ${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info)} engine
40
+ - ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.r-shell.r-path', false)} (which is the canonical version of ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'r-path')})
41
+ - ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.tree-sitter.disabled', false)} to disable the ${(0, doc_types_1.shortLink)(tree_sitter_executor_1.TreeSitterExecutor.name, types.info)} engine
42
+ - ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.tree-sitter.wasm-path', false)} pass the path to the wasm of the r grammar of tree-sitter (see [below](#tree-sitter))
43
+ - ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.tree-sitter.tree-sitter-wasm-path', false)} pass the path to the wasm of tree-sitter (see [below](#tree-sitter))
44
+ - ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'default-engine', false)} to set the default engine to use
45
+
46
+ <a id="tree-sitter"></a>
47
+ ## Dealing with the Tree-Sitter Engine
48
+
49
+ ${(0, doc_structure_1.block)({
50
+ type: 'WARNING',
51
+ content: 'As the tree-sitter engine is only for parsing, it cannot execute R code.'
52
+ })}
53
+
54
+ In general, there is no need for you to pass custom paths using either
55
+ ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.tree-sitter.wasm-path', false)} or
56
+ ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'engine.tree-sitter.tree-sitter-wasm-path', false)}.
57
+ However, you may want to experiment with the R grammar or provide a newer
58
+ one in case that of _flowR_ is outdated.
59
+
60
+ To use a newer [R grammar](https://github.com/r-lib/tree-sitter-r),
61
+ you first must build the new wasm file. For this you have to:
62
+
63
+ 1. Install the dependencies with \`npm ci\` in the tree-sitter-r repository.
64
+ 2. Build the wasm using \`tree-sitter build --wasm .\` the [tree sitter cli](https://github.com/tree-sitter/tree-sitter)
65
+ which should be a dev dependency.
66
+ 3. Pass the \`tree-sitter-r.wasm\` to flowR.
67
+
68
+ For tree-sitter, please rely on the [releases](https://github.com/tree-sitter/tree-sitter/releases).
69
+
70
+ `;
71
+ }
72
+ /** if we run this script, we want a Markdown representation of the capabilities */
73
+ if (require.main === module) {
74
+ (0, log_1.setMinLevelOfAllLogs)(6 /* LogLevel.Fatal */);
75
+ const shell = new shell_1.RShell();
76
+ void getText(shell).then(str => {
77
+ console.log(str);
78
+ }).finally(() => {
79
+ shell.close();
80
+ });
81
+ }
82
+ //# sourceMappingURL=print-engines-wiki.js.map
@@ -101,7 +101,7 @@ async function explainRepl(shell) {
101
101
  return `
102
102
  > [!NOTE]
103
103
  > To execute arbitrary R commands with a repl request, _flowR_ has to be started explicitly with ${(0, doc_cli_option_1.getCliLongOptionOf)('flowr', 'r-session-access')}.
104
- > Please be aware that this introduces a security risk.
104
+ > Please be aware that this introduces a security risk and note that this relies on the [\`r-shell\` engine](${doc_files_1.FlowrWikiBaseRef}/Engines).
105
105
 
106
106
 
107
107
  Although primarily meant for users to explore,
@@ -172,16 +172,16 @@ Within the REPL this works by running the following:
172
172
 
173
173
  ${(0, doc_code_1.codeBlock)('shell', ':query @config')}
174
174
 
175
-
176
175
  The following summarizes the configuration options:
177
176
 
178
-
179
177
  - \`ignoreSourceCalls\`: If set to \`true\`, _flowR_ will ignore source calls when analyzing the code, i.e., ignoring the inclusion of other files.
180
178
  - \`rPath\`: The path to the R executable. If not set, _flowR_ will try to find the R executable in the system's PATH.
181
179
  - \`semantics\`: allows to configure the way _flowR_ handles R, although we currently only support \`semantics/environment/overwriteBuiltIns\`.
182
180
  You may use this to overwrite _flowR_'s handling of built-in function and even completely clear the preset definitions shipped with flowR.
183
181
  See [Configure BuiltIn Semantics](#configure-builtin-semantics) for more information.
184
182
  - \`solver\`: allows to configure how _flowR_ resolves variables and their values (currently we support: ${Object.values(config_1.VariableResolve).map(v => `\`${v}\``).join(', ')}), as well as if pointer analysis should be active.
183
+ - \`engines\`: allows to configure the engines used by _flowR_ to interact with R code. See the [Engines wiki page](${doc_files_1.FlowrWikiBaseRef}/Engines) for more information.
184
+ - \`defaultEngine\`: allows to specify the default engine to use for interacting with R code. If not set, an arbitrary engine from the specified list will be used.
185
185
 
186
186
  So you can configure _flowR_ by adding a file like the following:
187
187
 
@@ -201,6 +201,7 @@ ${(0, doc_code_1.codeBlock)('json', JSON.stringify({
201
201
  }
202
202
  }
203
203
  },
204
+ engines: [{ type: 'r-shell' }],
204
205
  solver: {
205
206
  variables: config_1.VariableResolve.Alias,
206
207
  pointerTracking: true
@@ -246,15 +247,13 @@ function explainWritingCode(shell) {
246
247
  inlineTypes: doc_types_1.mermaidHide
247
248
  });
248
249
  return `
249
-
250
-
251
250
  _flowR_ can be used as a [module](${doc_files_1.FlowrNpmRef}) and offers several main classes and interfaces that are interesting for extension writers
252
251
  (see the [Visual Studio Code extension](${doc_files_1.FlowrGithubBaseRef}/vscode-flowr) or the [core](${doc_files_1.FlowrWikiBaseRef}/Core) wiki page for more information).
253
252
 
254
253
  ### Using the ${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info)} to Interact with R
255
254
 
256
255
  The ${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info)} class allows interfacing with the \`R\`&nbsp;ecosystem installed on the host system.
257
- For now, there are no (real) alternatives, although we plan on providing more flexible drop-in replacements.
256
+ Please have a look at [flowR's engines](${doc_files_1.FlowrWikiBaseRef}/Engines) for more information on alterantives.
258
257
 
259
258
  > [!IMPORTANT]
260
259
  > Each ${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info)} controls a new instance of the R&nbsp;interpreter, make sure to call <code>${(0, doc_types_1.shortLink)(shell_1.RShell.name, types.info, false)}::${shell.close.name}()</code> when you’re done.
@@ -279,7 +278,7 @@ In short, if you still "just want to slice" you can do it like this with the ${(
279
278
 
280
279
  ${(0, doc_code_1.codeBlock)('ts', `
281
280
  const slicer = new ${pipeline_executor_1.PipelineExecutor.name}(DEFAULT_SLICING_PIPELINE, {
282
- shell: new ${shell_1.RShell.name}(),
281
+ parser: new ${shell_1.RShell.name}(),
283
282
  request: ${retriever_1.requestFromInput.name}('x <- 1\\nx + 1'),
284
283
  criterion: ['2@x']
285
284
  })
@@ -100,7 +100,7 @@ a pipeline like the ${(0, doc_types_1.shortLink)('DEFAULT_NORMALIZE_PIPELINE', t
100
100
  ${(0, doc_code_1.codeBlock)('ts', `
101
101
  async function getAst(code: string): Promise<RNode> {
102
102
  const result = await new ${pipeline_executor_1.PipelineExecutor.name}(DEFAULT_NORMALIZE_PIPELINE, {
103
- shell: new ${shell_1.RShell.name}(),
103
+ parser: new ${shell_1.RShell.name}(),
104
104
  request: ${retriever_1.requestFromInput.name}(code.trim())
105
105
  }).allRemainingSteps();
106
106
  return result.normalize.ast;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@eagleoutice/flowr",
3
- "version": "2.1.12",
3
+ "version": "2.2.0",
4
4
  "description": "Static Dataflow Analyzer and Program Slicer for the R Programming Language",
5
5
  "types": "dist/src/index.d.ts",
6
6
  "repository": {
@@ -21,18 +21,21 @@
21
21
  "stats-helper": "ts-node src/cli/statistics-helper-app.ts",
22
22
  "slicer": "ts-node src/cli/slicer-app.ts",
23
23
  "benchmark-helper": "ts-node src/cli/benchmark-helper-app.ts",
24
- "benchmark": "npm run build && node dist/src/cli/benchmark-app.js",
24
+ "benchmark": "npm run build && npm run build:copy-wasm && node dist/src/cli/benchmark-app.js",
25
25
  "summarizer": "ts-node src/cli/summarizer-app.ts",
26
26
  "export-quads": "ts-node src/cli/export-quads-app.ts",
27
27
  "capabilities-markdown": "ts-node src/documentation/print-capabilities-markdown.ts",
28
28
  "wiki:df-graph": "ts-node src/documentation/print-dataflow-graph-wiki.ts",
29
29
  "wiki:normalized-ast": "ts-node src/documentation/print-normalized-ast-wiki.ts",
30
30
  "wiki:query-api": "ts-node src/documentation/print-query-wiki.ts",
31
+ "wiki:engines": "ts-node src/documentation/print-engines-wiki.ts",
31
32
  "wiki:search-api": "ts-node src/documentation/print-search-wiki.ts",
32
33
  "wiki:linting-and-testing": "ts-node src/documentation/print-linting-and-testing-wiki.ts",
33
34
  "wiki:interface": "ts-node src/documentation/print-interface-wiki.ts",
34
35
  "build": "tsc --project .",
35
- "build:bundle-flowr": "npm run build && esbuild --bundle dist/src/cli/flowr.js --platform=node --bundle --minify --target=node22 --outfile=dist/src/cli/flowr.min.js",
36
+ "build:bundle-flowr": "npm run build && esbuild --bundle dist/src/cli/flowr.js --platform=node --bundle --minify --target=node22 --outfile=dist/src/cli/flowr.min.js && npm run build:copy-wasm-min",
37
+ "build:copy-wasm": "cp src/r-bridge/lang-4.x/tree-sitter/tree-sitter-r.wasm dist/src/r-bridge/lang-4.x/tree-sitter/ && cp src/r-bridge/lang-4.x/tree-sitter/tree-sitter.wasm dist/src/r-bridge/lang-4.x/tree-sitter/",
38
+ "build:copy-wasm-min": "cp src/r-bridge/lang-4.x/tree-sitter/tree-sitter-r.wasm dist/src/cli && cp src/r-bridge/lang-4.x/tree-sitter/tree-sitter.wasm dist/src/cli",
36
39
  "lint-local": "npx eslint --version && npx eslint src/ test/ --rule \"no-warning-comments: off\"",
37
40
  "lint": "npm run license-compat -- --summary && npx eslint --version && npx eslint src/ test/",
38
41
  "license-compat": "license-checker --onlyAllow 'MIT;MIT OR X11;GPLv2;LGPL;GNUGPL;ISC;Apache-2.0;FreeBSD;BSD-2-Clause;clearbsd;ModifiedBSD;BSD-3-Clause;Python-2.0;Unlicense;WTFPL;BlueOak-1.0.0;CC-BY-4.0;CC-BY-3.0;CC0-1.0;0BSD'",
@@ -40,7 +43,7 @@
40
43
  "test": "vitest --exclude \"test/system-tests/**\" --config test/vitest.config.mts",
41
44
  "test:system": "vitest --dir test/system-tests --config test/system-tests/vitest.config.mts",
42
45
  "test:coverage": "npm run test -- --coverage",
43
- "performance-test": "func() { cd test/performance/ && bash run-all-suites.sh $1 $2 $3; cd ../../; }; func",
46
+ "performance-test": "func() { cd test/performance/ && bash run-all-suites.sh $1 $2 $3 $4; cd ../../; }; func",
44
47
  "test-full": "npm run test:coverage -- --no-watch -- --make-summary --test-installation",
45
48
  "detect-circular-deps": "npx madge --extensions ts,tsx --circular src/",
46
49
  "checkup": "npm run flowr -- --execute \":version\" && npm run lint && npm run test-full -- --allowOnly=false && npm run test:system -- --no-watch && docker build -t test-flowr -f scripts/Dockerfile . && npm run doc && npm-run-all wiki:*"
@@ -163,7 +166,7 @@
163
166
  "npm run lint",
164
167
  "npm run test-full"
165
168
  ],
166
- "after:bump": "npm run build",
169
+ "after:bump": "npm run build && npm run build:copy-wasm",
167
170
  "after:git:release": "echo After git push, before github release",
168
171
  "after:release": "echo Successfully released ${name} v${version} to ${repo.repository}."
169
172
  },
@@ -220,6 +223,7 @@
220
223
  "tmp": "^0.2.3",
221
224
  "ts-essentials": "^10.0.2",
222
225
  "tslog": "^4.9.3",
226
+ "web-tree-sitter": "^0.24.7",
223
227
  "ws": "^8.18.0",
224
228
  "xpath-ts2": "^1.4.2"
225
229
  }