@eagleoutice/flowr 2.2.1 → 2.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/cli/flowr.js +2 -1
  2. package/cli/repl/commands/repl-cfg.js +30 -7
  3. package/cli/repl/commands/repl-dataflow.js +29 -6
  4. package/cli/repl/commands/repl-normalize.js +22 -2
  5. package/cli/repl/commands/repl-parse.js +50 -3
  6. package/cli/repl/core.js +4 -0
  7. package/cli/repl/print-version.d.ts +1 -0
  8. package/cli/repl/print-version.js +7 -2
  9. package/cli/repl/server/connection.js +11 -9
  10. package/cli/script-core/statistics-helper-core.js +1 -1
  11. package/config.js +8 -1
  12. package/core/pipeline-executor.d.ts +6 -0
  13. package/core/pipeline-executor.js +8 -0
  14. package/core/print/dataflow-printer.js +3 -0
  15. package/core/steps/all/core/01-parse-tree-sitter.d.ts +7 -0
  16. package/core/steps/pipeline/default-pipelines.d.ts +57 -47
  17. package/core/steps/pipeline/default-pipelines.js +23 -2
  18. package/core/steps/pipeline/pipeline.d.ts +1 -1
  19. package/core/steps/pipeline/pipeline.js +1 -1
  20. package/core/steps/pipeline-step.d.ts +1 -3
  21. package/dataflow/environments/resolve-by-name.d.ts +3 -2
  22. package/dataflow/environments/resolve-by-name.js +4 -4
  23. package/dataflow/extractor.d.ts +10 -0
  24. package/dataflow/extractor.js +11 -1
  25. package/dataflow/graph/dataflowgraph-builder.d.ts +11 -10
  26. package/dataflow/graph/dataflowgraph-builder.js +11 -10
  27. package/dataflow/graph/edge.d.ts +1 -1
  28. package/dataflow/graph/edge.js +2 -2
  29. package/dataflow/graph/vertex.d.ts +6 -6
  30. package/dataflow/graph/vertex.js +5 -5
  31. package/dataflow/internal/process/functions/call/built-in/built-in-access.js +9 -5
  32. package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +1 -1
  33. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +22 -6
  34. package/documentation/doc-util/doc-cfg.js +2 -2
  35. package/documentation/doc-util/doc-dfg.d.ts +5 -3
  36. package/documentation/doc-util/doc-dfg.js +10 -8
  37. package/documentation/doc-util/doc-files.d.ts +1 -1
  38. package/documentation/doc-util/doc-files.js +1 -1
  39. package/documentation/doc-util/doc-normalized-ast.d.ts +2 -1
  40. package/documentation/doc-util/doc-normalized-ast.js +4 -5
  41. package/documentation/doc-util/doc-repl.d.ts +6 -2
  42. package/documentation/doc-util/doc-repl.js +10 -6
  43. package/documentation/doc-util/doc-structure.d.ts +1 -1
  44. package/documentation/doc-util/doc-types.d.ts +7 -5
  45. package/documentation/doc-util/doc-types.js +17 -12
  46. package/documentation/index.d.ts +9 -0
  47. package/documentation/index.js +26 -0
  48. package/documentation/print-capabilities-markdown.js +105 -19
  49. package/documentation/print-core-wiki.d.ts +1 -0
  50. package/documentation/print-core-wiki.js +406 -0
  51. package/documentation/print-dataflow-graph-wiki.js +27 -27
  52. package/documentation/print-interface-wiki.js +1 -3
  53. package/documentation/print-linting-and-testing-wiki.js +26 -8
  54. package/documentation/print-normalized-ast-wiki.js +22 -17
  55. package/documentation/print-query-wiki.js +37 -7
  56. package/documentation/print-search-wiki.js +2 -1
  57. package/package.json +10 -7
  58. package/queries/catalog/call-context-query/call-context-query-executor.js +1 -1
  59. package/queries/catalog/happens-before-query/happens-before-query-executor.d.ts +1 -1
  60. package/queries/catalog/happens-before-query/happens-before-query-executor.js +2 -2
  61. package/queries/catalog/happens-before-query/happens-before-query-format.js +1 -1
  62. package/queries/catalog/resolve-value-query/resolve-value-query-executor.d.ts +4 -0
  63. package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +34 -0
  64. package/queries/catalog/resolve-value-query/resolve-value-query-format.d.ts +72 -0
  65. package/queries/catalog/resolve-value-query/resolve-value-query-format.js +49 -0
  66. package/queries/catalog/search-query/search-query-format.js +1 -1
  67. package/queries/query.d.ts +60 -1
  68. package/queries/query.js +3 -1
  69. package/r-bridge/data/data.d.ts +50 -9
  70. package/r-bridge/data/data.js +64 -10
  71. package/r-bridge/data/types.d.ts +7 -1
  72. package/r-bridge/lang-4.x/ast/model/processing/decorate.d.ts +2 -0
  73. package/r-bridge/lang-4.x/ast/model/processing/node-id.js +2 -5
  74. package/r-bridge/lang-4.x/ast/parser/json/format.d.ts +6 -0
  75. package/r-bridge/lang-4.x/ast/parser/json/format.js +6 -0
  76. package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +13 -2
  77. package/r-bridge/lang-4.x/ast/parser/json/parser.js +19 -3
  78. package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-root.d.ts +3 -0
  79. package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-root.js +3 -0
  80. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +51 -29
  81. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-types.d.ts +4 -1
  82. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-types.js +3 -0
  83. package/r-bridge/parser.d.ts +10 -0
  84. package/r-bridge/parser.js +26 -2
  85. package/search/flowr-search-builder.d.ts +1 -2
  86. package/search/flowr-search-builder.js +1 -3
  87. package/util/cfg/cfg.d.ts +10 -1
  88. package/util/cfg/cfg.js +56 -2
  89. package/util/mermaid/dfg.d.ts +3 -0
  90. package/util/mermaid/dfg.js +24 -8
  91. package/util/range.d.ts +21 -0
  92. package/util/range.js +3 -0
  93. package/util/strings.d.ts +9 -0
  94. package/util/strings.js +14 -0
  95. package/util/version.js +1 -1
package/cli/flowr.js CHANGED
@@ -23,6 +23,7 @@ const print_version_1 = require("./repl/print-version");
23
23
  const flowr_main_options_1 = require("./flowr-main-options");
24
24
  const tree_sitter_executor_1 = require("../r-bridge/lang-4.x/tree-sitter/tree-sitter-executor");
25
25
  const fs_1 = __importDefault(require("fs"));
26
+ const path_1 = __importDefault(require("path"));
26
27
  exports.toolName = 'flowr';
27
28
  exports.optionHelp = [
28
29
  {
@@ -63,7 +64,7 @@ if (options['config-json']) {
63
64
  if (!usedConfig) {
64
65
  if (options['config-file']) {
65
66
  // validate it exists
66
- if (!fs_1.default.existsSync(options['config-file'])) {
67
+ if (!fs_1.default.existsSync(path_1.default.resolve(options['config-file']))) {
67
68
  log_1.log.error(`Config file '${options['config-file']}' does not exist`);
68
69
  process.exit(1);
69
70
  }
@@ -1,24 +1,41 @@
1
1
  "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
2
5
  Object.defineProperty(exports, "__esModule", { value: true });
3
6
  exports.controlflowStarCommand = exports.controlflowCommand = void 0;
4
7
  const cfg_1 = require("../../../util/cfg/cfg");
5
8
  const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
6
9
  const retriever_1 = require("../../../r-bridge/retriever");
7
10
  const cfg_2 = require("../../../util/mermaid/cfg");
11
+ const ansi_1 = require("../../../util/ansi");
12
+ const clipboardy_1 = __importDefault(require("clipboardy"));
8
13
  async function controlflow(parser, remainingLine) {
9
- return await (0, default_pipelines_1.createNormalizePipeline)(parser, {
14
+ return await (0, default_pipelines_1.createDataflowPipeline)(parser, {
10
15
  request: (0, retriever_1.requestFromInput)(remainingLine.trim())
11
16
  }).allRemainingSteps();
12
17
  }
18
+ function handleString(code) {
19
+ return code.startsWith('"') ? JSON.parse(code) : code;
20
+ }
21
+ function formatInfo(out, type) {
22
+ return out.formatter.format(`Copied ${type} to clipboard.`, { color: 7 /* Colors.White */, effect: ansi_1.ColorEffect.Foreground, style: 3 /* FontStyles.Italic */ });
23
+ }
13
24
  exports.controlflowCommand = {
14
25
  description: `Get mermaid code for the control-flow graph of R code, start with '${retriever_1.fileProtocol}' to indicate a file`,
15
26
  usageExample: ':controlflow',
16
27
  aliases: ['cfg', 'cf'],
17
28
  script: false,
18
29
  fn: async (output, shell, remainingLine) => {
19
- const result = await controlflow(shell, remainingLine);
20
- const cfg = (0, cfg_1.extractCFG)(result.normalize);
21
- output.stdout((0, cfg_2.cfgToMermaid)(cfg, result.normalize));
30
+ const result = await controlflow(shell, handleString(remainingLine));
31
+ const cfg = (0, cfg_1.extractCFG)(result.normalize, result.dataflow.graph);
32
+ const mermaid = (0, cfg_2.cfgToMermaid)(cfg, result.normalize);
33
+ output.stdout(mermaid);
34
+ try {
35
+ clipboardy_1.default.writeSync(mermaid);
36
+ output.stdout(formatInfo(output, 'mermaid code'));
37
+ }
38
+ catch (e) { /* do nothing this is a service thing */ }
22
39
  }
23
40
  };
24
41
  exports.controlflowStarCommand = {
@@ -27,9 +44,15 @@ exports.controlflowStarCommand = {
27
44
  aliases: ['cfg*', 'cf*'],
28
45
  script: false,
29
46
  fn: async (output, shell, remainingLine) => {
30
- const result = await controlflow(shell, remainingLine);
31
- const cfg = (0, cfg_1.extractCFG)(result.normalize);
32
- output.stdout((0, cfg_2.cfgToMermaidUrl)(cfg, result.normalize));
47
+ const result = await controlflow(shell, handleString(remainingLine));
48
+ const cfg = (0, cfg_1.extractCFG)(result.normalize, result.dataflow.graph);
49
+ const mermaid = (0, cfg_2.cfgToMermaidUrl)(cfg, result.normalize);
50
+ output.stdout(mermaid);
51
+ try {
52
+ clipboardy_1.default.writeSync(mermaid);
53
+ output.stdout(formatInfo(output, 'mermaid url'));
54
+ }
55
+ catch (e) { /* do nothing this is a service thing */ }
33
56
  }
34
57
  };
35
58
  //# sourceMappingURL=repl-cfg.js.map
@@ -1,25 +1,42 @@
1
1
  "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
2
5
  Object.defineProperty(exports, "__esModule", { value: true });
3
6
  exports.dataflowStarCommand = exports.dataflowCommand = void 0;
4
7
  const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
5
8
  const retriever_1 = require("../../../r-bridge/retriever");
6
9
  const dfg_1 = require("../../../util/mermaid/dfg");
7
- async function dataflow(parser, remainingLine) {
10
+ const clipboardy_1 = __importDefault(require("clipboardy"));
11
+ const ansi_1 = require("../../../util/ansi");
12
+ /**
13
+ * Obtain the dataflow graph using a known parser (such as the {@link RShell} or {@link TreeSitterExecutor}).
14
+ */
15
+ async function replGetDataflow(parser, code) {
8
16
  return await (0, default_pipelines_1.createDataflowPipeline)(parser, {
9
- request: (0, retriever_1.requestFromInput)(remainingLine.trim())
17
+ request: (0, retriever_1.requestFromInput)(code.trim())
10
18
  }).allRemainingSteps();
11
19
  }
12
20
  function handleString(code) {
13
21
  return code.startsWith('"') ? JSON.parse(code) : code;
14
22
  }
23
+ function formatInfo(out, type, timing) {
24
+ return out.formatter.format(`Copied ${type} to clipboard (dataflow: ${timing}ms).`, { color: 7 /* Colors.White */, effect: ansi_1.ColorEffect.Foreground, style: 3 /* FontStyles.Italic */ });
25
+ }
15
26
  exports.dataflowCommand = {
16
27
  description: `Get mermaid code for the dataflow graph of R code, start with '${retriever_1.fileProtocol}' to indicate a file`,
17
28
  usageExample: ':dataflow',
18
29
  aliases: ['d', 'df'],
19
30
  script: false,
20
31
  fn: async (output, shell, remainingLine) => {
21
- const result = await dataflow(shell, handleString(remainingLine));
22
- output.stdout((0, dfg_1.graphToMermaid)({ graph: result.dataflow.graph, includeEnvironments: false }).string);
32
+ const result = await replGetDataflow(shell, handleString(remainingLine));
33
+ const mermaid = (0, dfg_1.graphToMermaid)({ graph: result.dataflow.graph, includeEnvironments: false }).string;
34
+ output.stdout(mermaid);
35
+ try {
36
+ clipboardy_1.default.writeSync(mermaid);
37
+ output.stdout(formatInfo(output, 'mermaid code', result.dataflow['.meta'].timing));
38
+ }
39
+ catch (e) { /* do nothing this is a service thing */ }
23
40
  }
24
41
  };
25
42
  exports.dataflowStarCommand = {
@@ -28,8 +45,14 @@ exports.dataflowStarCommand = {
28
45
  aliases: ['d*', 'df*'],
29
46
  script: false,
30
47
  fn: async (output, shell, remainingLine) => {
31
- const result = await dataflow(shell, handleString(remainingLine));
32
- output.stdout((0, dfg_1.graphToMermaidUrl)(result.dataflow.graph, false));
48
+ const result = await replGetDataflow(shell, handleString(remainingLine));
49
+ const mermaid = (0, dfg_1.graphToMermaidUrl)(result.dataflow.graph, false);
50
+ output.stdout(mermaid);
51
+ try {
52
+ clipboardy_1.default.writeSync(mermaid);
53
+ output.stdout(formatInfo(output, 'mermaid url', result.dataflow['.meta'].timing));
54
+ }
55
+ catch (e) { /* do nothing this is a service thing */ }
33
56
  }
34
57
  };
35
58
  //# sourceMappingURL=repl-dataflow.js.map
@@ -1,9 +1,14 @@
1
1
  "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
2
5
  Object.defineProperty(exports, "__esModule", { value: true });
3
6
  exports.normalizeStarCommand = exports.normalizeCommand = void 0;
4
7
  const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
5
8
  const retriever_1 = require("../../../r-bridge/retriever");
6
9
  const ast_1 = require("../../../util/mermaid/ast");
10
+ const clipboardy_1 = __importDefault(require("clipboardy"));
11
+ const ansi_1 = require("../../../util/ansi");
7
12
  async function normalize(parser, remainingLine) {
8
13
  return await (0, default_pipelines_1.createNormalizePipeline)(parser, {
9
14
  request: (0, retriever_1.requestFromInput)(remainingLine.trim())
@@ -12,6 +17,9 @@ async function normalize(parser, remainingLine) {
12
17
  function handleString(code) {
13
18
  return code.startsWith('"') ? JSON.parse(code) : code;
14
19
  }
20
+ function formatInfo(out, type, timing) {
21
+ return out.formatter.format(`Copied ${type} to clipboard (normalize: ${timing}ms).`, { color: 7 /* Colors.White */, effect: ansi_1.ColorEffect.Foreground, style: 3 /* FontStyles.Italic */ });
22
+ }
15
23
  exports.normalizeCommand = {
16
24
  description: `Get mermaid code for the normalized AST of R code, start with '${retriever_1.fileProtocol}' to indicate a file`,
17
25
  usageExample: ':normalize',
@@ -19,7 +27,13 @@ exports.normalizeCommand = {
19
27
  script: false,
20
28
  fn: async (output, shell, remainingLine) => {
21
29
  const result = await normalize(shell, handleString(remainingLine));
22
- output.stdout((0, ast_1.normalizedAstToMermaid)(result.normalize.ast));
30
+ const mermaid = (0, ast_1.normalizedAstToMermaid)(result.normalize.ast);
31
+ output.stdout(mermaid);
32
+ try {
33
+ clipboardy_1.default.writeSync(mermaid);
34
+ output.stdout(formatInfo(output, 'mermaid url', result.normalize['.meta'].timing));
35
+ }
36
+ catch (e) { /* do nothing this is a service thing */ }
23
37
  }
24
38
  };
25
39
  exports.normalizeStarCommand = {
@@ -29,7 +43,13 @@ exports.normalizeStarCommand = {
29
43
  script: false,
30
44
  fn: async (output, shell, remainingLine) => {
31
45
  const result = await normalize(shell, handleString(remainingLine));
32
- output.stdout((0, ast_1.normalizedAstToMermaidUrl)(result.normalize.ast));
46
+ const mermaid = (0, ast_1.normalizedAstToMermaidUrl)(result.normalize.ast);
47
+ output.stdout(mermaid);
48
+ try {
49
+ clipboardy_1.default.writeSync(mermaid);
50
+ output.stdout(formatInfo(output, 'mermaid url', result.normalize['.meta'].timing));
51
+ }
52
+ catch (e) { /* do nothing this is a service thing */ }
33
53
  }
34
54
  };
35
55
  //# sourceMappingURL=repl-normalize.js.map
@@ -17,7 +17,41 @@ function toDepthMap(entry) {
17
17
  result.push({ ...current, leaf: children.length === 0 });
18
18
  children.reverse();
19
19
  const nextDepth = current.depth + 1;
20
- visit.push(...children.map(c => ({ depth: nextDepth, node: c })));
20
+ for (const c of children) {
21
+ visit.push({ depth: nextDepth, node: c });
22
+ }
23
+ }
24
+ return result;
25
+ }
26
+ function treeSitterToJsonEntry(node) {
27
+ return {
28
+ token: node.type,
29
+ children: [],
30
+ text: node.text,
31
+ id: node.id,
32
+ parent: node.parent?.id ?? -1,
33
+ terminal: node.isNamed,
34
+ line1: node.startPosition.row + 1,
35
+ col1: node.startPosition.column + 1,
36
+ line2: node.endPosition.row + 1,
37
+ col2: node.endPosition.column + 1
38
+ };
39
+ }
40
+ function treeSitterToDepthList(node) {
41
+ const visit = [{ depth: 0, node }];
42
+ const result = [];
43
+ while (visit.length > 0) {
44
+ const current = visit.pop();
45
+ if (current === undefined) {
46
+ continue;
47
+ }
48
+ const children = current.node.children;
49
+ result.push({ depth: current.depth, node: treeSitterToJsonEntry(current.node), leaf: children.length === 0 });
50
+ children.reverse();
51
+ const nextDepth = current.depth + 1;
52
+ for (const c of children) {
53
+ visit.push({ depth: nextDepth, node: c });
54
+ }
21
55
  }
22
56
  return result;
23
57
  }
@@ -57,6 +91,9 @@ function retrieveLocationString(locationRaw) {
57
91
  if (extracted[0] === extracted[2] && extracted[1] === extracted[3]) {
58
92
  return ` (${extracted[0]}:${extracted[1]})`;
59
93
  }
94
+ else if (extracted[0] === extracted[2]) {
95
+ return ` (${extracted[0]}:${extracted[1]}─${extracted[3]})`;
96
+ }
60
97
  else {
61
98
  return ` (${extracted[0]}:${extracted[1]}─${extracted[2]}:${extracted[3]})`;
62
99
  }
@@ -66,6 +103,10 @@ function depthListToTextTree(list, f) {
66
103
  const deadDepths = new Set();
67
104
  let i = 0;
68
105
  for (const { depth, node, leaf } of list) {
106
+ if (depth > 10) {
107
+ result += '...';
108
+ break;
109
+ }
69
110
  const nextDepth = i + 1 < list.length ? list[i + 1].depth : 0;
70
111
  deadDepths.delete(depth);
71
112
  result += initialIndentation(i, depth, deadDepths, nextDepth, list, f);
@@ -93,8 +134,14 @@ exports.parseCommand = {
93
134
  const result = await (0, default_pipelines_1.createParsePipeline)(parser, {
94
135
  request: (0, retriever_1.requestFromInput)((0, retriever_1.removeRQuotes)(remainingLine.trim()))
95
136
  }).allRemainingSteps();
96
- const object = (0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(result.parse.parsed));
97
- output.stdout(depthListToTextTree(toDepthMap(object), output.formatter));
137
+ if (parser.name === 'r-shell') {
138
+ const object = (0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(result.parse.parsed));
139
+ output.stdout(depthListToTextTree(toDepthMap(object), output.formatter));
140
+ }
141
+ else {
142
+ // print the tree-sitter ast
143
+ output.stdout(depthListToTextTree(treeSitterToDepthList(result.parse.parsed.rootNode), output.formatter));
144
+ }
98
145
  }
99
146
  };
100
147
  //# sourceMappingURL=repl-parse.js.map
package/cli/repl/core.js CHANGED
@@ -48,6 +48,7 @@ const scripts_info_1 = require("../common/scripts-info");
48
48
  const retriever_1 = require("../../r-bridge/retriever");
49
49
  const repl_main_1 = require("./commands/repl-main");
50
50
  const shell_1 = require("../../r-bridge/shell");
51
+ const log_1 = require("../../util/log");
51
52
  let _replCompleterKeywords = undefined;
52
53
  function replCompleterKeywords() {
53
54
  if (_replCompleterKeywords === undefined) {
@@ -107,6 +108,9 @@ async function replProcessStatement(output, statement, parser, allowRSessionAcce
107
108
  }
108
109
  catch (e) {
109
110
  output.stdout(`${bold(`Failed to execute command ${command}`)}: ${e?.message}. Using the ${bold('--verbose')} flag on startup may provide additional information.\n`);
111
+ if (log_1.log.settings.minLevel < 6 /* LogLevel.Fatal */) {
112
+ console.error(e);
113
+ }
110
114
  }
111
115
  }
112
116
  else {
@@ -1,2 +1,3 @@
1
1
  import type { KnownParser } from '../../r-bridge/parser';
2
+ export declare function versionReplString(parser: KnownParser): Promise<string>;
2
3
  export declare function printVersionRepl(parser: KnownParser): Promise<void>;
@@ -1,9 +1,14 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.versionReplString = versionReplString;
3
4
  exports.printVersionRepl = printVersionRepl;
4
5
  const repl_version_1 = require("./commands/repl-version");
5
- async function printVersionRepl(parser) {
6
+ async function versionReplString(parser) {
6
7
  const version = await (0, repl_version_1.retrieveVersionInformation)(parser);
7
- console.log(`flowR repl using flowR ${version.flowr}, R ${version.r}, engine ${version.engine}`);
8
+ const rVersion = version.r === 'none' ? '' : version.r === 'unknown' ? ', R version unknown' : `, R v${version.r}`;
9
+ return `flowR repl using flowR v${version.flowr}${rVersion} (${version.engine} engine)`;
10
+ }
11
+ async function printVersionRepl(parser) {
12
+ console.log(await versionReplString(parser));
8
13
  }
9
14
  //# sourceMappingURL=print-version.js.map
@@ -38,9 +38,6 @@ const core_1 = require("../core");
38
38
  const cfg_1 = require("../../../util/cfg/cfg");
39
39
  const quads_1 = require("../../../util/quads");
40
40
  const print_1 = require("../../../core/print/print");
41
- const _00_parse_1 = require("../../../core/steps/all/core/00-parse");
42
- const _10_normalize_1 = require("../../../core/steps/all/core/10-normalize");
43
- const _20_dataflow_1 = require("../../../core/steps/all/core/20-dataflow");
44
41
  const ansi_1 = require("../../../util/ansi");
45
42
  const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
46
43
  const graph_1 = require("../../../dataflow/graph/graph");
@@ -136,7 +133,7 @@ class FlowRServerConnection {
136
133
  }
137
134
  const tempFile = tmp.fileSync({ postfix: '.R' });
138
135
  const slicer = this.createPipelineExecutorForRequest(message, tempFile.name);
139
- await slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(results, message))
136
+ await slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(slicer, results, message))
140
137
  .catch(e => {
141
138
  this.logger.error(`[${this.name}] Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`);
142
139
  (0, send_1.sendMessage)(this.socket, {
@@ -149,13 +146,18 @@ class FlowRServerConnection {
149
146
  // this is an interestingly named function that means "I am a callback that removes a file" - so this deletes the file
150
147
  tempFile.removeCallback();
151
148
  }
152
- async sendFileAnalysisResponse(results, message) {
149
+ async sendFileAnalysisResponse(slicer, results, message) {
153
150
  let cfg = undefined;
154
151
  if (message.cfg) {
155
- cfg = (0, cfg_1.extractCFG)(results.normalize);
152
+ cfg = (0, cfg_1.extractCFG)(results.normalize, results.dataflow?.graph);
156
153
  }
157
154
  const config = () => ({ context: message.filename ?? 'unknown', getId: (0, quads_1.defaultQuadIdGenerator)() });
158
155
  const sanitizedResults = sanitizeAnalysisResults(results);
156
+ const pipeline = slicer.getPipeline();
157
+ const parseStep = pipeline.steps.get('parse');
158
+ const normalizedStep = pipeline.steps.get('normalize');
159
+ const dataflowStep = pipeline.steps.get('dataflow');
160
+ (0, assert_1.guard)(parseStep !== undefined && normalizedStep !== undefined && dataflowStep !== undefined, 'All steps must be present');
159
161
  if (message.format === 'n-quads') {
160
162
  (0, send_1.sendMessage)(this.socket, {
161
163
  type: 'response-file-analysis',
@@ -163,9 +165,9 @@ class FlowRServerConnection {
163
165
  id: message.id,
164
166
  cfg: cfg ? (0, cfg_1.cfg2quads)(cfg, config()) : undefined,
165
167
  results: {
166
- parse: await (0, print_1.printStepResult)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, sanitizedResults.parse, 5 /* StepOutputFormat.RdfQuads */, config()),
167
- normalize: await (0, print_1.printStepResult)(_10_normalize_1.NORMALIZE, sanitizedResults.normalize, 5 /* StepOutputFormat.RdfQuads */, config()),
168
- dataflow: await (0, print_1.printStepResult)(_20_dataflow_1.STATIC_DATAFLOW, sanitizedResults.dataflow, 5 /* StepOutputFormat.RdfQuads */, config())
168
+ parse: await (0, print_1.printStepResult)(parseStep, sanitizedResults.parse, 5 /* StepOutputFormat.RdfQuads */, config()),
169
+ normalize: await (0, print_1.printStepResult)(normalizedStep, sanitizedResults.normalize, 5 /* StepOutputFormat.RdfQuads */, config()),
170
+ dataflow: await (0, print_1.printStepResult)(dataflowStep, sanitizedResults.dataflow, 5 /* StepOutputFormat.RdfQuads */, config())
169
171
  }
170
172
  });
171
173
  }
@@ -59,7 +59,7 @@ async function getStatsForSingleFile(options) {
59
59
  if (stats.outputs.size === 1) {
60
60
  if (options['dump-json']) {
61
61
  const [, output] = [...stats.outputs.entries()][0];
62
- const cfg = (0, cfg_1.extractCFG)(output.normalize);
62
+ const cfg = (0, cfg_1.extractCFG)(output.normalize, output.dataflow.graph);
63
63
  statistics_file_1.statisticsFileProvider.append('output-json', 'parse', await (0, print_1.printStepResult)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, output.parse, 2 /* StepOutputFormat.Json */));
64
64
  statistics_file_1.statisticsFileProvider.append('output-json', 'normalize', await (0, print_1.printStepResult)(_10_normalize_1.NORMALIZE, output.normalize, 2 /* StepOutputFormat.Json */));
65
65
  statistics_file_1.statisticsFileProvider.append('output-json', 'dataflow', await (0, print_1.printStepResult)(_20_dataflow_1.STATIC_DATAFLOW, output.dataflow, 2 /* StepOutputFormat.Json */));
package/config.js CHANGED
@@ -31,7 +31,6 @@ const defaultEngineConfigs = {
31
31
  };
32
32
  exports.defaultConfigOptions = {
33
33
  ignoreSourceCalls: false,
34
- rPath: undefined,
35
34
  semantics: {
36
35
  environment: {
37
36
  overwriteBuiltIns: {
@@ -126,6 +125,14 @@ function getEngineConfig(engine) {
126
125
  }
127
126
  function loadConfigFromFile(configFile, workingDirectory) {
128
127
  if (configFile !== undefined) {
128
+ if (path_1.default.isAbsolute(configFile) && fs_1.default.existsSync(configFile)) {
129
+ log_1.log.trace(`Found config at ${configFile} (absolute)`);
130
+ const ret = parseConfig(fs_1.default.readFileSync(configFile, { encoding: 'utf-8' }));
131
+ if (ret) {
132
+ log_1.log.info(`Using config ${JSON.stringify(ret)}`);
133
+ return ret;
134
+ }
135
+ }
129
136
  let searchPath = path_1.default.resolve(workingDirectory);
130
137
  do {
131
138
  const configPath = path_1.default.join(searchPath, configFile);
@@ -95,10 +95,16 @@ export declare class PipelineExecutor<P extends Pipeline> {
95
95
  * Construct a new pipeline executor.
96
96
  * The required additional input is specified by the {@link IPipelineStep#requiredInput|required input configuration} of each step in the `pipeline`.
97
97
  *
98
+ * Please see {@link createDataflowPipeline} and friends for engine agnostic shortcuts to create a pipeline executor.
99
+ *
98
100
  * @param pipeline - The {@link Pipeline} to execute, probably created with {@link createPipeline}.
99
101
  * @param input - External {@link PipelineInput|configuration and input} required to execute the given pipeline.
100
102
  */
101
103
  constructor(pipeline: P, input: PipelineInput<P>);
104
+ /**
105
+ * Retrieve the {@link Pipeline|pipeline} that is currently being.
106
+ */
107
+ getPipeline(): P;
102
108
  /**
103
109
  * Retrieve the current {@link PipelineStepStage|stage} the pipeline executor is in.
104
110
  *
@@ -99,6 +99,8 @@ class PipelineExecutor {
99
99
  * Construct a new pipeline executor.
100
100
  * The required additional input is specified by the {@link IPipelineStep#requiredInput|required input configuration} of each step in the `pipeline`.
101
101
  *
102
+ * Please see {@link createDataflowPipeline} and friends for engine agnostic shortcuts to create a pipeline executor.
103
+ *
102
104
  * @param pipeline - The {@link Pipeline} to execute, probably created with {@link createPipeline}.
103
105
  * @param input - External {@link PipelineInput|configuration and input} required to execute the given pipeline.
104
106
  */
@@ -114,6 +116,12 @@ class PipelineExecutor {
114
116
  }
115
117
  (0, built_in_config_1.registerBuiltInDefinitions)(builtIns.definitions);
116
118
  }
119
+ /**
120
+ * Retrieve the {@link Pipeline|pipeline} that is currently being.
121
+ */
122
+ getPipeline() {
123
+ return this.pipeline;
124
+ }
117
125
  /**
118
126
  * Retrieve the current {@link PipelineStepStage|stage} the pipeline executor is in.
119
127
  *
@@ -16,6 +16,9 @@ function mayObjectJson(d) {
16
16
  }
17
17
  }
18
18
  function objectJson(df) {
19
+ if (df === null) {
20
+ return 'null';
21
+ }
19
22
  const elems = [];
20
23
  for (const [key, value] of Object.entries(df)) {
21
24
  switch (typeof value) {
@@ -5,6 +5,13 @@ import type { ParseRequiredInput } from '../../../../r-bridge/parser';
5
5
  export interface ParseStepOutputTS {
6
6
  readonly parsed: Tree;
7
7
  }
8
+ export interface TreeSitterParseJson {
9
+ readonly '.meta': {
10
+ readonly tokenCount: number;
11
+ readonly tokenCountNoComments: number;
12
+ };
13
+ readonly str: string;
14
+ }
8
15
  export declare const PARSE_WITH_TREE_SITTER_STEP: {
9
16
  readonly name: "parse";
10
17
  readonly humanReadableName: "parse with tree-sitter";